AMBARI-21987.Store Zeppelin's interpreter.json in HDFS(Prabhjyot Singh via 
Venkata Sairam)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a90f3b36
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a90f3b36
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a90f3b36

Branch: refs/heads/trunk
Commit: a90f3b36c878e30b9eeeeecab678b9736f83254b
Parents: 87c4205
Author: Venkata Sairam <[email protected]>
Authored: Wed Sep 20 12:00:54 2017 +0530
Committer: Venkata Sairam <[email protected]>
Committed: Wed Sep 20 12:00:54 2017 +0530

----------------------------------------------------------------------
 .../0.6.0/configuration/zeppelin-config.xml     |   2 +-
 .../ZEPPELIN/0.6.0/package/scripts/master.py    |   2 +-
 .../0.7.0/configuration/zeppelin-config.xml     |   8 +-
 .../ZEPPELIN/0.7.0/package/scripts/master.py    |  67 +++-
 .../stacks/2.5/ZEPPELIN/test_zeppelin_060.py    | 339 +++++++++++++++++++
 .../stacks/2.5/ZEPPELIN/test_zeppelin_master.py | 339 -------------------
 .../test/python/stacks/2.5/configs/default.json |  54 +--
 .../test/python/stacks/2.5/configs/secured.json |  72 ++--
 .../2.6/ZEPPELIN/interpreter_json_generated.py  |  27 ++
 .../stacks/2.6/ZEPPELIN/test_zeppelin_070.py    | 327 ++++++++++++++++++
 .../test/python/stacks/2.6/configs/default.json |  80 ++---
 11 files changed, 861 insertions(+), 456 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a90f3b36/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/configuration/zeppelin-config.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/configuration/zeppelin-config.xml
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/configuration/zeppelin-config.xml
index bd6ad76..03ad5f7 100644
--- 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/configuration/zeppelin-config.xml
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/configuration/zeppelin-config.xml
@@ -77,7 +77,7 @@
   </property>
   <property>
     <name>zeppelin.notebook.storage</name>
-    <value>org.apache.zeppelin.notebook.repo.HdfsNotebookRepo</value>
+    <value>org.apache.zeppelin.notebook.repo.VFSNotebookRepo</value>
     <description>notebook persistence layer implementation. If S3 is used, set 
this to
             org.apache.zeppelin.notebook.repo.S3NotebookRepo instead. If S3 is 
used to store the
             notebooks, it is necessary to use the following folder structure

http://git-wip-us.apache.org/repos/asf/ambari/blob/a90f3b36/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/package/scripts/master.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/package/scripts/master.py
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/package/scripts/master.py
index ba73d10..0fdc27c 100644
--- 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/package/scripts/master.py
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/package/scripts/master.py
@@ -242,7 +242,7 @@ class Master(Script):
              os.path.join(params.zeppelin_dir, "notebook")), sudo=True)
 
     if 'zeppelin.notebook.storage' in 
params.config['configurations']['zeppelin-config'] \
-        and 
params.config['configurations']['zeppelin-config']['zeppelin.notebook.storage'] 
== 'org.apache.zeppelin.notebook.repo.HdfsNotebookRepo':
+        and 
params.config['configurations']['zeppelin-config']['zeppelin.notebook.storage'] 
== 'org.apache.zeppelin.notebook.repo.FileSystemNotebookRepo':
       self.check_and_copy_notebook_in_hdfs(params)
 
     if params.security_enabled:

http://git-wip-us.apache.org/repos/asf/ambari/blob/a90f3b36/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/configuration/zeppelin-config.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/configuration/zeppelin-config.xml
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/configuration/zeppelin-config.xml
index ca6b295..171b7eb 100644
--- 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/configuration/zeppelin-config.xml
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/configuration/zeppelin-config.xml
@@ -77,7 +77,7 @@
   </property>
   <property>
     <name>zeppelin.notebook.storage</name>
-    <value>org.apache.zeppelin.notebook.repo.HdfsNotebookRepo</value>
+    <value>org.apache.zeppelin.notebook.repo.FileSystemNotebookRepo</value>
     <description>notebook persistence layer implementation. If S3 is used, set 
this to
             org.apache.zeppelin.notebook.repo.S3NotebookRepo instead. If S3 is 
used to store the
             notebooks, it is necessary to use the following folder structure
@@ -86,6 +86,12 @@
     <on-ambari-upgrade add="false"/>
   </property>
   <property>
+    <name>zeppelin.config.fs.dir</name>
+    <value>conf</value>
+    <description>Location where interpreter.json should be 
installed</description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
     <name>zeppelin.interpreter.dir</name>
     <value>interpreter</value>
     <description>Interpreter implementation base directory</description>

http://git-wip-us.apache.org/repos/asf/ambari/blob/a90f3b36/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
index ba73d10..7d44d16 100644
--- 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
@@ -23,6 +23,7 @@ import os
 
 from resource_management.core import shell, sudo
 from resource_management.core.logger import Logger
+from resource_management.core.exceptions import Fail
 from resource_management.core.resources import Directory
 from resource_management.core.resources.system import Execute, File
 from resource_management.core.source import InlineTemplate
@@ -242,7 +243,7 @@ class Master(Script):
              os.path.join(params.zeppelin_dir, "notebook")), sudo=True)
 
     if 'zeppelin.notebook.storage' in 
params.config['configurations']['zeppelin-config'] \
-        and 
params.config['configurations']['zeppelin-config']['zeppelin.notebook.storage'] 
== 'org.apache.zeppelin.notebook.repo.HdfsNotebookRepo':
+        and 
params.config['configurations']['zeppelin-config']['zeppelin.notebook.storage'] 
== 'org.apache.zeppelin.notebook.repo.FileSystemNotebookRepo':
       self.check_and_copy_notebook_in_hdfs(params)
 
     if params.security_enabled:
@@ -294,23 +295,49 @@ class Master(Script):
 
     self.set_interpreter_settings(config_data)
 
+  def pre_upgrade_restart(self, env, upgrade_type=None):
+    Logger.info("Executing Stack Upgrade pre-restart")
+    import params
+    env.set_params(params)
+
+    if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
format_stack_version(params.version)):
+      stack_select.select_packages(params.version)
+
+  def getZeppelinConfFS(self, params):
+    hdfs_interpreter_config = 
params.config['configurations']['zeppelin-config']['zeppelin.config.fs.dir'] + 
"/interpreter.json"
+
+    if not hdfs_interpreter_config.startswith("/"):
+      hdfs_interpreter_config = "/user/" + format("{zeppelin_user}") + "/" + 
hdfs_interpreter_config
+
+    return hdfs_interpreter_config
+
   def get_interpreter_settings(self):
     import params
     import json
 
     interpreter_config = os.path.join(params.conf_dir, "interpreter.json")
+    if 'zeppelin.notebook.storage' in 
params.config['configurations']['zeppelin-config'] \
+      and 
params.config['configurations']['zeppelin-config']['zeppelin.notebook.storage'] 
== 'org.apache.zeppelin.notebook.repo.FileSystemNotebookRepo':
+
+      if 'zeppelin.config.fs.dir' in 
params.config['configurations']['zeppelin-config']:
+        try:
+          # copy from hdfs to /etc/zeppelin/conf/interpreter.json
+          params.HdfsResource(interpreter_config,
+                              type="file",
+                              action="download_on_execute",
+                              source=self.getZeppelinConfFS(params),
+                              group=params.zeppelin_group,
+                              user=params.zeppelin_user)
+        except Fail as fail:
+          if "doesn't exist" not in fail.args[0]:
+            print "Error getting interpreter.json from HDFS"
+            print fail.args
+            raise Fail
+
     config_content = sudo.read_file(interpreter_config)
     config_data = json.loads(config_content)
     return config_data
 
-  def pre_upgrade_restart(self, env, upgrade_type=None):
-    Logger.info("Executing Stack Upgrade pre-restart")
-    import params
-    env.set_params(params)
-
-    if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
format_stack_version(params.version)):
-      stack_select.select_packages(params.version)
-
   def set_interpreter_settings(self, config_data):
     import params
     import json
@@ -319,8 +346,19 @@ class Master(Script):
     File(interpreter_config,
          group=params.zeppelin_group,
          owner=params.zeppelin_user,
-         content=json.dumps(config_data, indent=2)
-         )
+         content=json.dumps(config_data, indent=2))
+
+    if 'zeppelin.notebook.storage' in 
params.config['configurations']['zeppelin-config'] \
+      and 
params.config['configurations']['zeppelin-config']['zeppelin.notebook.storage'] 
== 'org.apache.zeppelin.notebook.repo.FileSystemNotebookRepo':
+
+      if 'zeppelin.config.fs.dir' in 
params.config['configurations']['zeppelin-config']:
+        params.HdfsResource(self.getZeppelinConfFS(params),
+                            type="file",
+                            action="create_on_execute",
+                            source=interpreter_config,
+                            group=params.zeppelin_group,
+                            user=params.zeppelin_user,
+                            replace_existing_files=True)
 
   def update_kerberos_properties(self):
     import params
@@ -499,8 +537,11 @@ class Master(Script):
     import params
 
     interpreter_json = interpreter_json_template.template
-    File(format("{params.conf_dir}/interpreter.json"), 
content=interpreter_json,
-         owner=params.zeppelin_user, group=params.zeppelin_group)
+    File(format("{params.conf_dir}/interpreter.json"),
+         content=interpreter_json,
+         owner=params.zeppelin_user,
+         group=params.zeppelin_group,
+         mode=0664)
 
   def get_zeppelin_spark_dependencies(self):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/a90f3b36/ambari-server/src/test/python/stacks/2.5/ZEPPELIN/test_zeppelin_060.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.5/ZEPPELIN/test_zeppelin_060.py 
b/ambari-server/src/test/python/stacks/2.5/ZEPPELIN/test_zeppelin_060.py
new file mode 100644
index 0000000..8df053c
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.5/ZEPPELIN/test_zeppelin_060.py
@@ -0,0 +1,339 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+from ambari_commons import OSCheck
+'''
+
+from stacks.utils.RMFTestCase import *
+from mock.mock import MagicMock, patch, call
+import time
+from resource_management.core import sudo
+import glob
+import interpreter_json_generated
+
[email protected](glob, "glob", new = MagicMock(return_value=["/tmp"]))
[email protected](sudo, "read_file", new = 
MagicMock(return_value=interpreter_json_generated.template))
+class TestZeppelin060(RMFTestCase):
+  COMMON_SERVICES_PACKAGE_DIR = "ZEPPELIN/0.6.0/package"
+  STACK_VERSION = "2.5"
+
+  def assert_configure_default(self):
+    self.assertResourceCalled('Directory', '/var/log/zeppelin',
+        owner = 'zeppelin',
+        group = 'zeppelin',
+        create_parents = True,
+        mode = 0755,
+        cd_access = 'a',
+    )
+    self.assertResourceCalled('Directory', '/var/run/zeppelin',
+        owner = 'zeppelin',
+        create_parents = True,
+        group = 'zeppelin',
+        mode = 0755,
+        cd_access = 'a',
+    )
+    self.assertResourceCalled('Directory', '/usr/hdp/current/zeppelin-server',
+        owner = 'zeppelin',
+        group = 'zeppelin',
+        create_parents = True,
+        mode = 0755,
+        cd_access = 'a',
+    )
+    self.assertResourceCalled('Execute', ('chown', '-R', u'zeppelin:zeppelin', 
'/var/run/zeppelin'), sudo = True)
+    self.assertResourceCalled('XmlConfig', 'zeppelin-site.xml',
+        owner = 'zeppelin',
+        group = 'zeppelin',
+        conf_dir = '/etc/zeppelin/conf',
+        configurations = self.getConfig()['configurations']['zeppelin-config'],
+    )
+    self.assertResourceCalled('File', '/etc/zeppelin/conf/zeppelin-env.sh',
+        owner = 'zeppelin',
+        content = 
InlineTemplate(self.getConfig()['configurations']['zeppelin-env']['zeppelin_env_content']),
+        group = 'zeppelin',
+    )
+    self.assertResourceCalled('File', '/etc/zeppelin/conf/shiro.ini',
+        owner = 'zeppelin',
+        content = 
InlineTemplate(self.getConfig()['configurations']['zeppelin-shiro-ini']['shiro_ini_content']),
+        group = 'zeppelin',
+    )
+    self.assertResourceCalled('File', '/etc/zeppelin/conf/log4j.properties',
+        owner = u'zeppelin',
+        content = u'log4j.rootLogger = INFO, dailyfile',
+        group = u'zeppelin',
+    )
+    self.assertResourceCalled('Directory', 
'/etc/zeppelin/conf/external-dependency-conf',
+        owner = 'zeppelin',
+        group = 'zeppelin',
+        create_parents = True,
+        mode = 0755,
+        cd_access = 'a',
+    )
+
+  def assert_configure_secured(self):
+    self.assertResourceCalled('Directory', '/var/log/zeppelin',
+        owner = 'zeppelin',
+        group = 'zeppelin',
+        create_parents = True,
+        mode = 0755,
+        cd_access = 'a',
+    )
+    self.assertResourceCalled('Directory', '/var/run/zeppelin',
+        owner = 'zeppelin',
+        create_parents = True,
+        group = 'zeppelin',
+        mode = 0755,
+        cd_access = 'a',
+    )
+    self.assertResourceCalled('Directory', '/usr/hdp/current/zeppelin-server',
+        owner = 'zeppelin',
+        group = 'zeppelin',
+        create_parents = True,
+        mode = 0755,
+        cd_access = 'a',
+    )
+    self.assertResourceCalled('Execute', ('chown', '-R', u'zeppelin:zeppelin', 
'/var/run/zeppelin'), sudo = True)
+    self.assertResourceCalled('XmlConfig', 'zeppelin-site.xml',
+        owner = 'zeppelin',
+        group = 'zeppelin',
+        conf_dir = '/etc/zeppelin/conf',
+        configurations = self.getConfig()['configurations']['zeppelin-config'],
+    )
+    self.assertResourceCalled('File', '/etc/zeppelin/conf/zeppelin-env.sh',
+        owner = 'zeppelin',
+        content = 
InlineTemplate(self.getConfig()['configurations']['zeppelin-env']['zeppelin_env_content']),
+        group = 'zeppelin',
+    )
+    self.assertResourceCalled('File', '/etc/zeppelin/conf/shiro.ini',
+        owner = 'zeppelin',
+        content = 
InlineTemplate(self.getConfig()['configurations']['zeppelin-shiro-ini']['shiro_ini_content']),
+        group = 'zeppelin',
+    )
+    self.assertResourceCalled('File', '/etc/zeppelin/conf/log4j.properties',
+        owner = u'zeppelin',
+        content = u'log4j.rootLogger = INFO, dailyfile',
+        group = u'zeppelin',
+    )
+    self.assertResourceCalled('Directory', 
'/etc/zeppelin/conf/external-dependency-conf',
+        owner = 'zeppelin',
+        group = 'zeppelin',
+        create_parents = True,
+        mode = 0755,
+        cd_access = 'a',
+    )
+
+  def test_configure_default(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/master.py",
+                       classname = "Master",
+                       command = "configure",
+                       config_file = "default.json",
+                       stack_version = self.STACK_VERSION,
+                       target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+    self.assert_configure_default()
+    self.assertNoMoreResources()
+
+  def test_configure_secured(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/master.py",
+                       classname = "Master",
+                       command = "configure",
+                       config_file = "secured.json",
+                       stack_version = self.STACK_VERSION,
+                       target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+    self.assert_configure_secured()
+    self.assertNoMoreResources()
+    
+  def test_stop_secured(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/master.py",
+                       classname = "Master",
+                       command = "stop",
+                       config_file = "secured.json",
+                       stack_version = self.STACK_VERSION,
+                       target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+    self.assertResourceCalled('Directory', '/var/log/zeppelin',
+        owner = 'zeppelin',
+        group = 'zeppelin',
+        create_parents = True,
+        mode = 0755,
+        cd_access = 'a',
+    )
+    self.assertResourceCalled('Execute', ('chown', '-R', u'zeppelin:zeppelin', 
'/var/run/zeppelin'),
+        sudo = True,
+    )
+    self.assertResourceCalled('Execute', 
'/usr/hdp/current/zeppelin-server/bin/zeppelin-daemon.sh stop >> 
/var/log/zeppelin/zeppelin-setup.log',
+        user = 'zeppelin',
+    )
+    self.assertNoMoreResources()
+ 
+  def test_stop_default(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/master.py",
+                       classname = "Master",
+                       command = "stop",
+                       config_file = "default.json",
+                       stack_version = self.STACK_VERSION,
+                       target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+    self.assertResourceCalled('Directory', '/var/log/zeppelin',
+        owner = 'zeppelin',
+        group = 'zeppelin',
+        create_parents = True,
+        mode = 0755,
+        cd_access = 'a',
+    )
+    self.assertResourceCalled('Execute', ('chown', '-R', u'zeppelin:zeppelin', 
'/var/run/zeppelin'),
+        sudo = True,
+    )
+    self.assertResourceCalled('Execute', 
'/usr/hdp/current/zeppelin-server/bin/zeppelin-daemon.sh stop >> 
/var/log/zeppelin/zeppelin-setup.log',
+        user = 'zeppelin',
+    )
+    self.assertNoMoreResources()
+
+  def test_start_default(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/master.py",
+                       classname = "Master",
+                       command = "start",
+                       config_file = "default.json",
+                       stack_version = self.STACK_VERSION,
+                       target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+    self.assert_configure_default()
+    self.assertResourceCalled('Execute', ('chown', '-R', u'zeppelin:zeppelin', 
'/etc/zeppelin'),
+        sudo = True,
+    )
+
+  @patch('os.path.exists', return_value = True)
+  def test_start_secured(self, os_path_exists_mock):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/master.py",
+                       classname = "Master",
+                       command = "start",
+                       config_file = "secured.json",
+                       stack_version = self.STACK_VERSION,
+                       target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+    self.assert_configure_secured()
+    self.assertResourceCalled('Execute', ('chown', '-R', u'zeppelin:zeppelin', 
'/etc/zeppelin'),
+        sudo = True,
+    )
+    self.assertResourceCalled('Execute', ('chown', '-R', 'zeppelin:zeppelin', 
'/usr/hdp/current/zeppelin-server/notebook'),
+        sudo = True,
+    )
+    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt 
/etc/security/keytabs/zeppelin.server.kerberos.keytab [email protected]; ',
+        user = 'zeppelin',
+    )
+    self.assertResourceCalled('HdfsResource', '/user/zeppelin',
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = UnknownConfigurationMock(),
+        default_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        hdfs_resource_ignore_file = 
'/var/lib/ambari-agent/data/.hdfs_resource_ignore',
+        hdfs_site = {u'a': u'b'},
+        kinit_path_local = '/usr/bin/kinit',
+        principal_name = UnknownConfigurationMock(),
+        user = 'hdfs',
+        owner = 'zeppelin',
+        recursive_chown = True,
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+        recursive_chmod = True,
+    )
+    self.assertResourceCalled('HdfsResource', '/user/zeppelin/test',
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = UnknownConfigurationMock(),
+        default_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        hdfs_resource_ignore_file = 
'/var/lib/ambari-agent/data/.hdfs_resource_ignore',
+        hdfs_site = {u'a': u'b'},
+        kinit_path_local = '/usr/bin/kinit',
+        principal_name = UnknownConfigurationMock(),
+        user = 'hdfs',
+        owner = 'zeppelin',
+        recursive_chown = True,
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+        recursive_chmod = True,
+    )
+    self.assertResourceCalled('HdfsResource', '/apps/zeppelin',
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = UnknownConfigurationMock(),
+        default_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        hdfs_resource_ignore_file = 
'/var/lib/ambari-agent/data/.hdfs_resource_ignore',
+        hdfs_site = {u'a': u'b'},
+        kinit_path_local = '/usr/bin/kinit',
+        principal_name = UnknownConfigurationMock(),
+        user = 'hdfs',
+        owner = 'zeppelin',
+        recursive_chown = True,
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+        recursive_chmod = True,
+    )
+    self.assertResourceCalled('HdfsResource', '/apps/zeppelin/tmp',
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = UnknownConfigurationMock(),
+        source = '/tmp',
+        default_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        replace_existing_files = True,
+        hdfs_resource_ignore_file = 
'/var/lib/ambari-agent/data/.hdfs_resource_ignore',
+        hdfs_site = {u'a': u'b'},
+        kinit_path_local = '/usr/bin/kinit',
+        principal_name = UnknownConfigurationMock(),
+        user = 'hdfs',
+        owner = 'zeppelin',
+        group = 'zeppelin',
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        type = 'file',
+        action = ['create_on_execute'],
+        mode = 0444,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = UnknownConfigurationMock(),
+        default_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        hdfs_resource_ignore_file = 
'/var/lib/ambari-agent/data/.hdfs_resource_ignore',
+        hdfs_site = {u'a': u'b'},
+        kinit_path_local = '/usr/bin/kinit',
+        principal_name = UnknownConfigurationMock(),
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+    )
+    self.assertResourceCalled('File', '/etc/zeppelin/conf/interpreter.json',
+        content=interpreter_json_generated.template_after_base,
+        owner = 'zeppelin',
+        group = 'zeppelin',
+    )
+    self.assertResourceCalled('File', '/etc/zeppelin/conf/interpreter.json',
+        
content=interpreter_json_generated.template_after_without_spark_and_livy,
+        owner = 'zeppelin',
+        group = 'zeppelin')
+    self.assertResourceCalled('File', '/etc/zeppelin/conf/interpreter.json',
+                              
content=interpreter_json_generated.template_after_kerberos,
+                              owner = 'zeppelin',
+                              group = 'zeppelin')
+    self.assertResourceCalled('Execute', 
'/usr/hdp/current/zeppelin-server/bin/zeppelin-daemon.sh restart >> 
/var/log/zeppelin/zeppelin-setup.log',
+        user = 'zeppelin'
+    )
+    self.assertNoMoreResources()
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/a90f3b36/ambari-server/src/test/python/stacks/2.5/ZEPPELIN/test_zeppelin_master.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.5/ZEPPELIN/test_zeppelin_master.py 
b/ambari-server/src/test/python/stacks/2.5/ZEPPELIN/test_zeppelin_master.py
deleted file mode 100644
index 815b2c0..0000000
--- a/ambari-server/src/test/python/stacks/2.5/ZEPPELIN/test_zeppelin_master.py
+++ /dev/null
@@ -1,339 +0,0 @@
-#!/usr/bin/env python
-
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-from ambari_commons import OSCheck
-'''
-
-from stacks.utils.RMFTestCase import *
-from mock.mock import MagicMock, patch, call
-import time
-from resource_management.core import sudo
-import glob
-import interpreter_json_generated
-
[email protected](glob, "glob", new = MagicMock(return_value=["/tmp"]))
[email protected](sudo, "read_file", new = 
MagicMock(return_value=interpreter_json_generated.template))
-class TestZeppelinMaster(RMFTestCase):
-  COMMON_SERVICES_PACKAGE_DIR = "ZEPPELIN/0.6.0.2.5/package"
-  STACK_VERSION = "2.5"
-
-  def assert_configure_default(self):
-    self.assertResourceCalled('Directory', '/var/log/zeppelin',
-        owner = 'zeppelin',
-        group = 'zeppelin',
-        create_parents = True,
-        mode = 0755,
-        cd_access = 'a',
-    )
-    self.assertResourceCalled('Directory', '/var/run/zeppelin',
-        owner = 'zeppelin',
-        create_parents = True,
-        group = 'zeppelin',
-        mode = 0755,
-        cd_access = 'a',
-    )
-    self.assertResourceCalled('Directory', '/usr/hdp/current/zeppelin-server',
-        owner = 'zeppelin',
-        group = 'zeppelin',
-        create_parents = True,
-        mode = 0755,
-        cd_access = 'a',
-    )
-    self.assertResourceCalled('Execute', ('chown', '-R', u'zeppelin:zeppelin', 
'/var/run/zeppelin'), sudo = True)
-    self.assertResourceCalled('XmlConfig', 'zeppelin-site.xml',
-        owner = 'zeppelin',
-        group = 'zeppelin',
-        conf_dir = '/etc/zeppelin/conf',
-        configurations = self.getConfig()['configurations']['zeppelin-config'],
-    )
-    self.assertResourceCalled('File', '/etc/zeppelin/conf/zeppelin-env.sh',
-        owner = 'zeppelin',
-        content = 
InlineTemplate(self.getConfig()['configurations']['zeppelin-env']['zeppelin_env_content']),
-        group = 'zeppelin',
-    )
-    self.assertResourceCalled('File', '/etc/zeppelin/conf/shiro.ini',
-        owner = 'zeppelin',
-        content = 
InlineTemplate(self.getConfig()['configurations']['zeppelin-shiro-ini']['shiro_ini_content']),
-        group = 'zeppelin',
-    )
-    self.assertResourceCalled('File', '/etc/zeppelin/conf/log4j.properties',
-        owner = u'zeppelin',
-        content = u'log4j.rootLogger = INFO, dailyfile',
-        group = u'zeppelin',
-    )
-    self.assertResourceCalled('Directory', 
'/etc/zeppelin/conf/external-dependency-conf',
-        owner = 'zeppelin',
-        group = 'zeppelin',
-        create_parents = True,
-        mode = 0755,
-        cd_access = 'a',
-    )
-
-  def assert_configure_secured(self):
-    self.assertResourceCalled('Directory', '/var/log/zeppelin',
-        owner = 'zeppelin',
-        group = 'zeppelin',
-        create_parents = True,
-        mode = 0755,
-        cd_access = 'a',
-    )
-    self.assertResourceCalled('Directory', '/var/run/zeppelin',
-        owner = 'zeppelin',
-        create_parents = True,
-        group = 'zeppelin',
-        mode = 0755,
-        cd_access = 'a',
-    )
-    self.assertResourceCalled('Directory', '/usr/hdp/current/zeppelin-server',
-        owner = 'zeppelin',
-        group = 'zeppelin',
-        create_parents = True,
-        mode = 0755,
-        cd_access = 'a',
-    )
-    self.assertResourceCalled('Execute', ('chown', '-R', u'zeppelin:zeppelin', 
'/var/run/zeppelin'), sudo = True)
-    self.assertResourceCalled('XmlConfig', 'zeppelin-site.xml',
-        owner = 'zeppelin',
-        group = 'zeppelin',
-        conf_dir = '/etc/zeppelin/conf',
-        configurations = self.getConfig()['configurations']['zeppelin-config'],
-    )
-    self.assertResourceCalled('File', '/etc/zeppelin/conf/zeppelin-env.sh',
-        owner = 'zeppelin',
-        content = 
InlineTemplate(self.getConfig()['configurations']['zeppelin-env']['zeppelin_env_content']),
-        group = 'zeppelin',
-    )
-    self.assertResourceCalled('File', '/etc/zeppelin/conf/shiro.ini',
-        owner = 'zeppelin',
-        content = 
InlineTemplate(self.getConfig()['configurations']['zeppelin-shiro-ini']['shiro_ini_content']),
-        group = 'zeppelin',
-    )
-    self.assertResourceCalled('File', '/etc/zeppelin/conf/log4j.properties',
-        owner = u'zeppelin',
-        content = u'log4j.rootLogger = INFO, dailyfile',
-        group = u'zeppelin',
-    )
-    self.assertResourceCalled('Directory', 
'/etc/zeppelin/conf/external-dependency-conf',
-        owner = 'zeppelin',
-        group = 'zeppelin',
-        create_parents = True,
-        mode = 0755,
-        cd_access = 'a',
-    )
-
-  def test_configure_default(self):
-    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/master.py",
-                       classname = "Master",
-                       command = "configure",
-                       config_file = "default.json",
-                       stack_version = self.STACK_VERSION,
-                       target = RMFTestCase.TARGET_COMMON_SERVICES
-    )
-    self.assert_configure_default()
-    self.assertNoMoreResources()
-
-  def test_configure_secured(self):
-    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/master.py",
-                       classname = "Master",
-                       command = "configure",
-                       config_file = "secured.json",
-                       stack_version = self.STACK_VERSION,
-                       target = RMFTestCase.TARGET_COMMON_SERVICES
-    )
-    self.assert_configure_secured()
-    self.assertNoMoreResources()
-    
-  def test_stop_secured(self):
-    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/master.py",
-                       classname = "Master",
-                       command = "stop",
-                       config_file = "secured.json",
-                       stack_version = self.STACK_VERSION,
-                       target = RMFTestCase.TARGET_COMMON_SERVICES
-    )
-    self.assertResourceCalled('Directory', '/var/log/zeppelin',
-        owner = 'zeppelin',
-        group = 'zeppelin',
-        create_parents = True,
-        mode = 0755,
-        cd_access = 'a',
-    )
-    self.assertResourceCalled('Execute', ('chown', '-R', u'zeppelin:zeppelin', 
'/var/run/zeppelin'),
-        sudo = True,
-    )
-    self.assertResourceCalled('Execute', 
'/usr/hdp/current/zeppelin-server/bin/zeppelin-daemon.sh stop >> 
/var/log/zeppelin/zeppelin-setup.log',
-        user = 'zeppelin',
-    )
-    self.assertNoMoreResources()
- 
-  def test_stop_default(self):
-    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/master.py",
-                       classname = "Master",
-                       command = "stop",
-                       config_file = "default.json",
-                       stack_version = self.STACK_VERSION,
-                       target = RMFTestCase.TARGET_COMMON_SERVICES
-    )
-    self.assertResourceCalled('Directory', '/var/log/zeppelin',
-        owner = 'zeppelin',
-        group = 'zeppelin',
-        create_parents = True,
-        mode = 0755,
-        cd_access = 'a',
-    )
-    self.assertResourceCalled('Execute', ('chown', '-R', u'zeppelin:zeppelin', 
'/var/run/zeppelin'),
-        sudo = True,
-    )
-    self.assertResourceCalled('Execute', 
'/usr/hdp/current/zeppelin-server/bin/zeppelin-daemon.sh stop >> 
/var/log/zeppelin/zeppelin-setup.log',
-        user = 'zeppelin',
-    )
-    self.assertNoMoreResources()
-
-  def test_start_default(self):
-    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/master.py",
-                       classname = "Master",
-                       command = "start",
-                       config_file = "default.json",
-                       stack_version = self.STACK_VERSION,
-                       target = RMFTestCase.TARGET_COMMON_SERVICES
-    )
-    self.assert_configure_default()
-    self.assertResourceCalled('Execute', ('chown', '-R', u'zeppelin:zeppelin', 
'/etc/zeppelin'),
-        sudo = True,
-    )
-
-  @patch('os.path.exists', return_value = True)
-  def test_start_secured(self, os_path_exists_mock):
-    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/master.py",
-                       classname = "Master",
-                       command = "start",
-                       config_file = "secured.json",
-                       stack_version = self.STACK_VERSION,
-                       target = RMFTestCase.TARGET_COMMON_SERVICES
-    )
-    self.assert_configure_secured()
-    self.assertResourceCalled('Execute', ('chown', '-R', u'zeppelin:zeppelin', 
'/etc/zeppelin'),
-        sudo = True,
-    )
-    self.assertResourceCalled('Execute', ('chown', '-R', 'zeppelin:zeppelin', 
'/usr/hdp/current/zeppelin-server/notebook'),
-        sudo = True,
-    )
-    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt 
/etc/security/keytabs/zeppelin.server.kerberos.keytab [email protected]; ',
-        user = 'zeppelin',
-    )
-    self.assertResourceCalled('HdfsResource', '/user/zeppelin',
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        keytab = UnknownConfigurationMock(),
-        default_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        hdfs_resource_ignore_file = 
'/var/lib/ambari-agent/data/.hdfs_resource_ignore',
-        hdfs_site = {u'a': u'b'},
-        kinit_path_local = '/usr/bin/kinit',
-        principal_name = UnknownConfigurationMock(),
-        user = 'hdfs',
-        owner = 'zeppelin',
-        recursive_chown = True,
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-        recursive_chmod = True,
-    )
-    self.assertResourceCalled('HdfsResource', '/user/zeppelin/test',
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        keytab = UnknownConfigurationMock(),
-        default_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        hdfs_resource_ignore_file = 
'/var/lib/ambari-agent/data/.hdfs_resource_ignore',
-        hdfs_site = {u'a': u'b'},
-        kinit_path_local = '/usr/bin/kinit',
-        principal_name = UnknownConfigurationMock(),
-        user = 'hdfs',
-        owner = 'zeppelin',
-        recursive_chown = True,
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-        recursive_chmod = True,
-    )
-    self.assertResourceCalled('HdfsResource', '/apps/zeppelin',
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        keytab = UnknownConfigurationMock(),
-        default_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        hdfs_resource_ignore_file = 
'/var/lib/ambari-agent/data/.hdfs_resource_ignore',
-        hdfs_site = {u'a': u'b'},
-        kinit_path_local = '/usr/bin/kinit',
-        principal_name = UnknownConfigurationMock(),
-        user = 'hdfs',
-        owner = 'zeppelin',
-        recursive_chown = True,
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-        recursive_chmod = True,
-    )
-    self.assertResourceCalled('HdfsResource', '/apps/zeppelin/tmp',
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        keytab = UnknownConfigurationMock(),
-        source = '/tmp',
-        default_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        replace_existing_files = True,
-        hdfs_resource_ignore_file = 
'/var/lib/ambari-agent/data/.hdfs_resource_ignore',
-        hdfs_site = {u'a': u'b'},
-        kinit_path_local = '/usr/bin/kinit',
-        principal_name = UnknownConfigurationMock(),
-        user = 'hdfs',
-        owner = 'zeppelin',
-        group = 'zeppelin',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-        type = 'file',
-        action = ['create_on_execute'],
-        mode = 0444,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        keytab = UnknownConfigurationMock(),
-        default_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        hdfs_resource_ignore_file = 
'/var/lib/ambari-agent/data/.hdfs_resource_ignore',
-        hdfs_site = {u'a': u'b'},
-        kinit_path_local = '/usr/bin/kinit',
-        principal_name = UnknownConfigurationMock(),
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-    )
-    self.assertResourceCalled('File', '/etc/zeppelin/conf/interpreter.json',
-        content=interpreter_json_generated.template_after_base,
-        owner = 'zeppelin',
-        group = 'zeppelin',
-    )
-    self.assertResourceCalled('File', '/etc/zeppelin/conf/interpreter.json',
-        
content=interpreter_json_generated.template_after_without_spark_and_livy,
-        owner = 'zeppelin',
-        group = 'zeppelin')
-    self.assertResourceCalled('File', '/etc/zeppelin/conf/interpreter.json',
-                              
content=interpreter_json_generated.template_after_kerberos,
-                              owner = 'zeppelin',
-                              group = 'zeppelin')
-    self.assertResourceCalled('Execute', 
'/usr/hdp/current/zeppelin-server/bin/zeppelin-daemon.sh restart >> 
/var/log/zeppelin/zeppelin-setup.log',
-        user = 'zeppelin'
-    )
-    self.assertNoMoreResources()
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/a90f3b36/ambari-server/src/test/python/stacks/2.5/configs/default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/configs/default.json 
b/ambari-server/src/test/python/stacks/2.5/configs/default.json
index 36a7282..119da14 100644
--- a/ambari-server/src/test/python/stacks/2.5/configs/default.json
+++ b/ambari-server/src/test/python/stacks/2.5/configs/default.json
@@ -304,35 +304,35 @@
     "zeppelin-shiro-ini": {
       "shiro_ini_content": "\n[users]\n# List of users with their password 
allowed to access Zeppelin.\n# To use a different strategy (LDAP / Database / 
...) check the shiro doc at 
http://shiro.apache.org/configuration.html#Configuration-INISections\n#admin = 
password1\n#user1 = password2, role1, role2\n#user2 = password3, role3\n#user3 
= password4, role2\n\n# Sample LDAP configuration, for user Authentication, 
currently tested for single Realm\n[main]\n#ldapRealm = 
org.apache.shiro.realm.ldap.JndiLdapRealm\n#ldapRealm.userDnTemplate = 
uid={0},cn=users,cn=accounts,dc=hortonworks,dc=com\n#ldapRealm.contextFactory.url
 = ldap://ldaphost:389\n#ldapRealm.contextFactory.authenticationMechanism = 
SIMPLE\n#sessionManager = 
org.apache.shiro.web.session.mgt.DefaultWebSessionManager\n#securityManager.sessionManager
 = $sessionManager\n# 86,400,000 milliseconds = 24 
hour\n#securityManager.sessionManager.globalSessionTimeout = 
86400000\nshiro.loginUrl = /api/login\n\n[urls]\n# anon means the acce
 ss is anonymous.\n# authcBasic means Basic Auth Security\n# To enfore 
security, comment the line below and uncomment the next one\n/api/version = 
anon\n/** = anon\n#/** = authc"
     },
-    "zeppelin-log4j-properties":{
+    "zeppelin-log4j-properties": {
       "log4j_properties_content": "log4j.rootLogger = INFO, dailyfile"
     },
-"zeppelin-config": {
-            "zeppelin.server.port": "9995", 
-            "zeppelin.ssl.truststore.password": "change me", 
-            "zeppelin.interpreters": 
"org.apache.zeppelin.spark.SparkInterpreter,org.apache.zeppelin.spark.PySparkInterpreter,org.apache.zeppelin.spark.SparkSqlInterpreter,org.apache.zeppelin.spark.DepInterpreter,org.apache.zeppelin.markdown.Markdown,org.apache.zeppelin.angular.AngularInterpreter,org.apache.zeppelin.shell.ShellInterpreter,org.apache.zeppelin.jdbc.JDBCInterpreter,org.apache.zeppelin.phoenix.PhoenixInterpreter,org.apache.zeppelin.livy.LivySparkInterpreter,org.apache.zeppelin.livy.LivyPySparkInterpreter,org.apache.zeppelin.livy.LivySparkRInterpreter,org.apache.zeppelin.livy.LivySparkSQLInterpreter",
 
-            "zeppelin.ssl.truststore.path": "conf/truststore", 
-            "zeppelin.notebook.dir": "notebook", 
-            "zeppelin.ssl.keystore.password": "change me", 
-            "zeppelin.ssl.keystore.path": "conf/keystore", 
-            "zeppelin.server.addr": "0.0.0.0", 
-            "zeppelin.ssl.client.auth": "false", 
-            "zeppelin.notebook.homescreen": " ", 
-            "zeppelin.interpreter.dir": "interpreter", 
-            "zeppelin.ssl.keystore.type": "JKS", 
-            "zeppelin.notebook.s3.user": "user", 
-            "zeppelin.ssl.key.manager.password": "change me", 
-            "zeppelin.anonymous.allowed": "true", 
-            "zeppelin.ssl.truststore.type": "JKS", 
-            "zeppelin.ssl": "false", 
-            "zeppelin.notebook.storage": 
"org.apache.zeppelin.notebook.repo.VFSNotebookRepo", 
-            "zeppelin.websocket.max.text.message.size": "1024000", 
-            "zeppelin.interpreter.connect.timeout": "30000", 
-            "zeppelin.notebook.s3.bucket": "zeppelin", 
-            "zeppelin.notebook.homescreen.hide": "false", 
-            "zeppelin.server.allowed.origins": "*",
-            "zeppelin.interpreter.config.upgrade": "true"
-        },
+    "zeppelin-config": {
+      "zeppelin.server.port": "9995",
+      "zeppelin.ssl.truststore.password": "change me",
+      "zeppelin.interpreters": 
"org.apache.zeppelin.spark.SparkInterpreter,org.apache.zeppelin.spark.PySparkInterpreter,org.apache.zeppelin.spark.SparkSqlInterpreter,org.apache.zeppelin.spark.DepInterpreter,org.apache.zeppelin.markdown.Markdown,org.apache.zeppelin.angular.AngularInterpreter,org.apache.zeppelin.shell.ShellInterpreter,org.apache.zeppelin.jdbc.JDBCInterpreter,org.apache.zeppelin.phoenix.PhoenixInterpreter,org.apache.zeppelin.livy.LivySparkInterpreter,org.apache.zeppelin.livy.LivyPySparkInterpreter,org.apache.zeppelin.livy.LivySparkRInterpreter,org.apache.zeppelin.livy.LivySparkSQLInterpreter",
+      "zeppelin.ssl.truststore.path": "conf/truststore",
+      "zeppelin.notebook.dir": "notebook",
+      "zeppelin.ssl.keystore.password": "change me",
+      "zeppelin.ssl.keystore.path": "conf/keystore",
+      "zeppelin.server.addr": "0.0.0.0",
+      "zeppelin.ssl.client.auth": "false",
+      "zeppelin.notebook.homescreen": " ",
+      "zeppelin.interpreter.dir": "interpreter",
+      "zeppelin.ssl.keystore.type": "JKS",
+      "zeppelin.notebook.s3.user": "user",
+      "zeppelin.ssl.key.manager.password": "change me",
+      "zeppelin.anonymous.allowed": "true",
+      "zeppelin.ssl.truststore.type": "JKS",
+      "zeppelin.ssl": "false",
+      "zeppelin.notebook.storage": 
"org.apache.zeppelin.notebook.repo.VFSNotebookRepo",
+      "zeppelin.websocket.max.text.message.size": "1024000",
+      "zeppelin.interpreter.connect.timeout": "30000",
+      "zeppelin.notebook.s3.bucket": "zeppelin",
+      "zeppelin.notebook.homescreen.hide": "false",
+      "zeppelin.server.allowed.origins": "*",
+      "zeppelin.interpreter.config.upgrade": "true"
+    },
     "zoo.cfg": {
       "clientPort": "2181"
     },

http://git-wip-us.apache.org/repos/asf/ambari/blob/a90f3b36/ambari-server/src/test/python/stacks/2.5/configs/secured.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/configs/secured.json 
b/ambari-server/src/test/python/stacks/2.5/configs/secured.json
index b092b22..f37799d 100644
--- a/ambari-server/src/test/python/stacks/2.5/configs/secured.json
+++ b/ambari-server/src/test/python/stacks/2.5/configs/secured.json
@@ -225,47 +225,47 @@
     "zeppelin-shiro-ini": {
       "shiro_ini_content": "\n[users]\n# List of users with their password 
allowed to access Zeppelin.\n# To use a different strategy (LDAP / Database / 
...) check the shiro doc at 
http://shiro.apache.org/configuration.html#Configuration-INISections\n#admin = 
password1\n#user1 = password2, role1, role2\n#user2 = password3, role3\n#user3 
= password4, role2\n\n# Sample LDAP configuration, for user Authentication, 
currently tested for single Realm\n[main]\n#ldapRealm = 
org.apache.shiro.realm.ldap.JndiLdapRealm\n#ldapRealm.userDnTemplate = 
uid={0},cn=users,cn=accounts,dc=hortonworks,dc=com\n#ldapRealm.contextFactory.url
 = ldap://ldaphost:389\n#ldapRealm.contextFactory.authenticationMechanism = 
SIMPLE\n#sessionManager = 
org.apache.shiro.web.session.mgt.DefaultWebSessionManager\n#securityManager.sessionManager
 = $sessionManager\n# 86,400,000 milliseconds = 24 
hour\n#securityManager.sessionManager.globalSessionTimeout = 
86400000\nshiro.loginUrl = /api/login\n\n[urls]\n# anon means the acce
 ss is anonymous.\n# authcBasic means Basic Auth Security\n# To enfore 
security, comment the line below and uncomment the next one\n/api/version = 
anon\n/** = anon\n#/** = authc"
     },
-    "zeppelin-log4j-properties":{
+    "zeppelin-log4j-properties": {
       "log4j_properties_content": "log4j.rootLogger = INFO, dailyfile"
     },
     "zeppelin-env": {
-        "zeppelin.server.kerberos.keytab": 
"/etc/security/keytabs/zeppelin.server.kerberos.keytab",
-        "zeppelin.spark.jar.dir": "/apps/zeppelin",
-        "zeppelin.executor.mem": "512m",
-        "zeppelin_pid_dir": "/var/run/zeppelin",
-        "zeppelin.executor.instances": "2",
-        "zeppelin.server.kerberos.principal": "[email protected]", 
-        "zeppelin_user": "zeppelin", 
-        "zeppelin_env_content": "\n# Spark master url. eg. 
spark://master_addr:7077. Leave empty if you want to use local mode\nexport 
MASTER=yarn-client\nexport SPARK_YARN_JAR={{spark_jar}}\n\n\n# Where log files 
are stored.  PWD by default.\nexport ZEPPELIN_LOG_DIR={{zeppelin_log_dir}}\n\n# 
The pid files are stored. /tmp by default.\nexport 
ZEPPELIN_PID_DIR={{zeppelin_pid_dir}}\n\n\nexport 
JAVA_HOME={{java64_home}}\n\n# Additional jvm options. for example, export 
ZEPPELIN_JAVA_OPTS=\"-Dspark.executor.memory=8g -Dspark.cores.max=16\"\nexport 
ZEPPELIN_JAVA_OPTS=\"-Dhdp.version={{full_stack_version}} 
-Dspark.executor.memory={{executor_mem}} 
-Dspark.executor.instances={{executor_instances}} 
-Dspark.yarn.queue={{spark_queue}}\"\n\n\n# Zeppelin jvm mem options Default 
-Xmx1024m -XX:MaxPermSize=512m\n# export ZEPPELIN_MEM\n\n# zeppelin interpreter 
process jvm mem options. Defualt = ZEPPELIN_MEM\n# export 
ZEPPELIN_INTP_MEM\n\n# zeppelin interpreter process jvm options. Default = 
ZEPPELIN_
 JAVA_OPTS\n# export ZEPPELIN_INTP_JAVA_OPTS\n\n# Where notebook saved\n# 
export ZEPPELIN_NOTEBOOK_DIR\n\n# Id of notebook to be displayed in homescreen. 
ex) 2A94M5J1Z\n# export ZEPPELIN_NOTEBOOK_HOMESCREEN\n\n# hide homescreen 
notebook from list when this value set to \"true\". default \"false\"\n# export 
ZEPPELIN_NOTEBOOK_HOMESCREEN_HIDE\n\n# Bucket where notebook saved\n# export 
ZEPPELIN_NOTEBOOK_S3_BUCKET\n\n# User in bucket where notebook saved. For 
example bucket/user/notebook/2A94M5J1Z/note.json\n# export 
ZEPPELIN_NOTEBOOK_S3_USER\n\n# A string representing this instance of zeppelin. 
$USER by default\n# export ZEPPELIN_IDENT_STRING\n\n# The scheduling priority 
for daemons. Defaults to 0.\n# export ZEPPELIN_NICENESS\n\n\n#### Spark 
interpreter configuration ####\n\n## Use provided spark installation ##\n## 
defining SPARK_HOME makes Zeppelin run spark interpreter process using 
spark-submit\n##\n# (required) When it is defined, load it instead of Zeppelin 
embedded Spark libraries
 \nexport SPARK_HOME={{spark_home}}\n\n# (optional) extra options to pass to 
spark submit. eg) \"--driver-memory 512M --executor-memory 1G\".\n# export 
SPARK_SUBMIT_OPTIONS\n\n## Use embedded spark binaries ##\n## without 
SPARK_HOME defined, Zeppelin still able to run spark interpreter process using 
embedded spark binaries.\n## however, it is not encouraged when you can define 
SPARK_HOME\n##\n# Options read in YARN client mode\n# yarn-site.xml is located 
in configuration directory in HADOOP_CONF_DIR.\nexport 
HADOOP_CONF_DIR=/etc/hadoop/conf\n\n# Pyspark (supported with Spark 1.2.1 and 
above)\n# To configure pyspark, you need to set spark distribution's path to 
'spark.home' property in Interpreter setting screen in Zeppelin GUI\n# path to 
the python command. must be the same path on the driver(Zeppelin) and all 
workers.\n# export PYSPARK_PYTHON\n\nexport 
PYTHONPATH=\"${SPARK_HOME}/python:${SPARK_HOME}/python/lib/py4j-0.8.2.1-src.zip\"\nexport
 SPARK_YARN_USER_ENV=\"PYTHONPATH=${PYTHONP
 ATH}\"\n\n## Spark interpreter options ##\n##\n# Use HiveContext instead of 
SQLContext if set true. true by default.\n# export 
ZEPPELIN_SPARK_USEHIVECONTEXT\n\n# Execute multiple SQL concurrently if set 
true. false by default.\n# export ZEPPELIN_SPARK_CONCURRENTSQL\n\n# Max number 
of SparkSQL result to display. 1000 by default.\n# export 
ZEPPELIN_SPARK_MAXRESULT", 
-        "zeppelin.kerberos.enabled": "true", 
-        "zeppelin_group": "zeppelin", 
-        "zeppelin_log_dir": "/var/log/zeppelin"
+      "zeppelin.server.kerberos.keytab": 
"/etc/security/keytabs/zeppelin.server.kerberos.keytab",
+      "zeppelin.spark.jar.dir": "/apps/zeppelin",
+      "zeppelin.executor.mem": "512m",
+      "zeppelin_pid_dir": "/var/run/zeppelin",
+      "zeppelin.executor.instances": "2",
+      "zeppelin.server.kerberos.principal": "[email protected]",
+      "zeppelin_user": "zeppelin",
+      "zeppelin_env_content": "\n# Spark master url. eg. 
spark://master_addr:7077. Leave empty if you want to use local mode\nexport 
MASTER=yarn-client\nexport SPARK_YARN_JAR={{spark_jar}}\n\n\n# Where log files 
are stored.  PWD by default.\nexport ZEPPELIN_LOG_DIR={{zeppelin_log_dir}}\n\n# 
The pid files are stored. /tmp by default.\nexport 
ZEPPELIN_PID_DIR={{zeppelin_pid_dir}}\n\n\nexport 
JAVA_HOME={{java64_home}}\n\n# Additional jvm options. for example, export 
ZEPPELIN_JAVA_OPTS=\"-Dspark.executor.memory=8g -Dspark.cores.max=16\"\nexport 
ZEPPELIN_JAVA_OPTS=\"-Dhdp.version={{full_stack_version}} 
-Dspark.executor.memory={{executor_mem}} 
-Dspark.executor.instances={{executor_instances}} 
-Dspark.yarn.queue={{spark_queue}}\"\n\n\n# Zeppelin jvm mem options Default 
-Xmx1024m -XX:MaxPermSize=512m\n# export ZEPPELIN_MEM\n\n# zeppelin interpreter 
process jvm mem options. Defualt = ZEPPELIN_MEM\n# export 
ZEPPELIN_INTP_MEM\n\n# zeppelin interpreter process jvm options. Default = 
ZEPPELIN_JA
 VA_OPTS\n# export ZEPPELIN_INTP_JAVA_OPTS\n\n# Where notebook saved\n# export 
ZEPPELIN_NOTEBOOK_DIR\n\n# Id of notebook to be displayed in homescreen. ex) 
2A94M5J1Z\n# export ZEPPELIN_NOTEBOOK_HOMESCREEN\n\n# hide homescreen notebook 
from list when this value set to \"true\". default \"false\"\n# export 
ZEPPELIN_NOTEBOOK_HOMESCREEN_HIDE\n\n# Bucket where notebook saved\n# export 
ZEPPELIN_NOTEBOOK_S3_BUCKET\n\n# User in bucket where notebook saved. For 
example bucket/user/notebook/2A94M5J1Z/note.json\n# export 
ZEPPELIN_NOTEBOOK_S3_USER\n\n# A string representing this instance of zeppelin. 
$USER by default\n# export ZEPPELIN_IDENT_STRING\n\n# The scheduling priority 
for daemons. Defaults to 0.\n# export ZEPPELIN_NICENESS\n\n\n#### Spark 
interpreter configuration ####\n\n## Use provided spark installation ##\n## 
defining SPARK_HOME makes Zeppelin run spark interpreter process using 
spark-submit\n##\n# (required) When it is defined, load it instead of Zeppelin 
embedded Spark libraries\n
 export SPARK_HOME={{spark_home}}\n\n# (optional) extra options to pass to 
spark submit. eg) \"--driver-memory 512M --executor-memory 1G\".\n# export 
SPARK_SUBMIT_OPTIONS\n\n## Use embedded spark binaries ##\n## without 
SPARK_HOME defined, Zeppelin still able to run spark interpreter process using 
embedded spark binaries.\n## however, it is not encouraged when you can define 
SPARK_HOME\n##\n# Options read in YARN client mode\n# yarn-site.xml is located 
in configuration directory in HADOOP_CONF_DIR.\nexport 
HADOOP_CONF_DIR=/etc/hadoop/conf\n\n# Pyspark (supported with Spark 1.2.1 and 
above)\n# To configure pyspark, you need to set spark distribution's path to 
'spark.home' property in Interpreter setting screen in Zeppelin GUI\n# path to 
the python command. must be the same path on the driver(Zeppelin) and all 
workers.\n# export PYSPARK_PYTHON\n\nexport 
PYTHONPATH=\"${SPARK_HOME}/python:${SPARK_HOME}/python/lib/py4j-0.8.2.1-src.zip\"\nexport
 SPARK_YARN_USER_ENV=\"PYTHONPATH=${PYTHONPAT
 H}\"\n\n## Spark interpreter options ##\n##\n# Use HiveContext instead of 
SQLContext if set true. true by default.\n# export 
ZEPPELIN_SPARK_USEHIVECONTEXT\n\n# Execute multiple SQL concurrently if set 
true. false by default.\n# export ZEPPELIN_SPARK_CONCURRENTSQL\n\n# Max number 
of SparkSQL result to display. 1000 by default.\n# export 
ZEPPELIN_SPARK_MAXRESULT",
+      "zeppelin.kerberos.enabled": "true",
+      "zeppelin_group": "zeppelin",
+      "zeppelin_log_dir": "/var/log/zeppelin"
     },
     "zeppelin-config": {
-        "zeppelin.server.port": "9995", 
-        "zeppelin.ssl.truststore.password": "change me", 
-        "zeppelin.interpreters": 
"org.apache.zeppelin.spark.SparkInterpreter,org.apache.zeppelin.spark.PySparkInterpreter,org.apache.zeppelin.spark.SparkSqlInterpreter,org.apache.zeppelin.spark.DepInterpreter,org.apache.zeppelin.markdown.Markdown,org.apache.zeppelin.angular.AngularInterpreter,org.apache.zeppelin.shell.ShellInterpreter,org.apache.zeppelin.jdbc.JDBCInterpreter,org.apache.zeppelin.phoenix.PhoenixInterpreter,org.apache.zeppelin.livy.LivySparkInterpreter,org.apache.zeppelin.livy.LivyPySparkInterpreter,org.apache.zeppelin.livy.LivySparkRInterpreter,org.apache.zeppelin.livy.LivySparkSQLInterpreter",
 
-        "zeppelin.ssl.truststore.path": "conf/truststore", 
-        "zeppelin.notebook.dir": "notebook", 
-        "zeppelin.ssl.keystore.password": "change me", 
-        "zeppelin.ssl.keystore.path": "conf/keystore", 
-        "zeppelin.server.addr": "0.0.0.0", 
-        "zeppelin.ssl.client.auth": "false", 
-        "zeppelin.notebook.homescreen": " ", 
-        "zeppelin.interpreter.dir": "interpreter", 
-        "zeppelin.ssl.keystore.type": "JKS", 
-        "zeppelin.notebook.s3.user": "user", 
-        "zeppelin.ssl.key.manager.password": "change me", 
-        "zeppelin.anonymous.allowed": "true", 
-        "zeppelin.ssl.truststore.type": "JKS", 
-        "zeppelin.ssl": "false", 
-        "zeppelin.notebook.storage": 
"org.apache.zeppelin.notebook.repo.VFSNotebookRepo", 
-        "zeppelin.websocket.max.text.message.size": "1024000", 
-        "zeppelin.interpreter.connect.timeout": "30000", 
-        "zeppelin.notebook.s3.bucket": "zeppelin", 
-        "zeppelin.notebook.homescreen.hide": "false", 
-        "zeppelin.server.allowed.origins": "*",
-        "zeppelin.interpreter.config.upgrade": "true"
+      "zeppelin.server.port": "9995",
+      "zeppelin.ssl.truststore.password": "change me",
+      "zeppelin.interpreters": 
"org.apache.zeppelin.spark.SparkInterpreter,org.apache.zeppelin.spark.PySparkInterpreter,org.apache.zeppelin.spark.SparkSqlInterpreter,org.apache.zeppelin.spark.DepInterpreter,org.apache.zeppelin.markdown.Markdown,org.apache.zeppelin.angular.AngularInterpreter,org.apache.zeppelin.shell.ShellInterpreter,org.apache.zeppelin.jdbc.JDBCInterpreter,org.apache.zeppelin.phoenix.PhoenixInterpreter,org.apache.zeppelin.livy.LivySparkInterpreter,org.apache.zeppelin.livy.LivyPySparkInterpreter,org.apache.zeppelin.livy.LivySparkRInterpreter,org.apache.zeppelin.livy.LivySparkSQLInterpreter",
+      "zeppelin.ssl.truststore.path": "conf/truststore",
+      "zeppelin.notebook.dir": "notebook",
+      "zeppelin.ssl.keystore.password": "change me",
+      "zeppelin.ssl.keystore.path": "conf/keystore",
+      "zeppelin.server.addr": "0.0.0.0",
+      "zeppelin.ssl.client.auth": "false",
+      "zeppelin.notebook.homescreen": " ",
+      "zeppelin.interpreter.dir": "interpreter",
+      "zeppelin.ssl.keystore.type": "JKS",
+      "zeppelin.notebook.s3.user": "user",
+      "zeppelin.ssl.key.manager.password": "change me",
+      "zeppelin.anonymous.allowed": "true",
+      "zeppelin.ssl.truststore.type": "JKS",
+      "zeppelin.ssl": "false",
+      "zeppelin.notebook.storage": 
"org.apache.zeppelin.notebook.repo.VFSNotebookRepo",
+      "zeppelin.websocket.max.text.message.size": "1024000",
+      "zeppelin.interpreter.connect.timeout": "30000",
+      "zeppelin.notebook.s3.bucket": "zeppelin",
+      "zeppelin.notebook.homescreen.hide": "false",
+      "zeppelin.server.allowed.origins": "*",
+      "zeppelin.interpreter.config.upgrade": "true"
     },
     "spark-defaults": {
         "spark.yarn.scheduler.heartbeat.interval-ms": "5000", 

Reply via email to