Repository: ambari
Updated Branches:
  refs/heads/trunk 87c4205c2 -> a90f3b36c


http://git-wip-us.apache.org/repos/asf/ambari/blob/a90f3b36/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/interpreter_json_generated.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/interpreter_json_generated.py
 
b/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/interpreter_json_generated.py
new file mode 100644
index 0000000..1d2cf86
--- /dev/null
+++ 
b/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/interpreter_json_generated.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+template = '\n{\n  "interpreterSettings": {\n    "2CKEKWY8Z": {\n      "id": 
"2CKEKWY8Z",\n      "name": "angular",\n      "group": "angular",\n      
"properties": {},\n      "status": "READY",\n      "interpreterGroup": [\n      
  {\n          "name": "angular",\n          "class": 
"org.apache.zeppelin.angular.AngularInterpreter",\n          
"defaultInterpreter": false,\n          "editor": {\n            
"editOnDblClick": true\n          }\n        }\n      ],\n      "dependencies": 
[],\n      "option": {\n        "remote": true,\n        "port": -1,\n        
"perNote": "shared",\n        "perUser": "shared",\n        
"isExistingProcess": false,\n        "setPermission": false,\n        "users": 
[],\n        "isUserImpersonate": false\n      }\n    },\n    "2CKX8WPU1": {\n  
    "id": "2CKX8WPU1",\n      "name": "spark",\n      "group": "spark",\n      
"properties": {\n        "spark.executor.memory": "512m",\n        "args": 
"",\n        "zeppelin.spark.printREPLOutput": "true",\n
         "spark.cores.max": "",\n        
"zeppelin.dep.additionalRemoteRepository": 
"spark-packages,http://dl.bintray.com/spark-packages/maven,false;",\n        
"zeppelin.spark.sql.stacktrace": "false",\n        
"zeppelin.spark.importImplicit": "true",\n        
"zeppelin.spark.concurrentSQL": "false",\n        
"zeppelin.spark.useHiveContext": "true",\n        "zeppelin.pyspark.python": 
"python",\n        "zeppelin.dep.localrepo": "local-repo",\n        
"zeppelin.R.knitr": "true",\n        "zeppelin.spark.maxResult": "1000",\n      
  "master": "yarn-client",\n        "spark.app.name": "Zeppelin",\n        
"zeppelin.R.image.width": "100%",\n        "zeppelin.R.render.options": 
"out.format \\u003d \\u0027html\\u0027, comment \\u003d NA, echo \\u003d FALSE, 
results \\u003d \\u0027asis\\u0027, message \\u003d F, warning \\u003d F",\n    
    "zeppelin.R.cmd": "R"\n      },\n      "status": "READY",\n      
"interpreterGroup": [\n        {\n          "name": "spark",\n          
"class": "org
 .apache.zeppelin.spark.SparkInterpreter",\n          "defaultInterpreter": 
true,\n          "editor": {\n            "language": "scala"\n          }\n    
    },\n        {\n          "name": "sql",\n          "class": 
"org.apache.zeppelin.spark.SparkSqlInterpreter",\n          
"defaultInterpreter": false,\n          "editor": {\n            "language": 
"sql"\n          }\n        },\n        {\n          "name": "dep",\n          
"class": "org.apache.zeppelin.spark.DepInterpreter",\n          
"defaultInterpreter": false,\n          "editor": {\n            "language": 
"scala"\n          }\n        },\n        {\n          "name": "pyspark",\n     
     "class": "org.apache.zeppelin.spark.PySparkInterpreter",\n          
"defaultInterpreter": false,\n          "editor": {\n            "language": 
"python"\n          }\n        },\n        {\n          "name": "r",\n          
"class": "org.apache.zeppelin.spark.SparkRInterpreter",\n          
"defaultInterpreter": false,\n          "edi
 tor": {\n            "language": "r"\n          }\n        }\n      ],\n      
"dependencies": [],\n      "option": {\n        "remote": true,\n        
"port": -1,\n        "perNote": "shared",\n        "perUser": "shared",\n       
 "isExistingProcess": false,\n        "setPermission": false,\n        "users": 
[],\n        "isUserImpersonate": false\n      }\n    },\n    "2CK8A9MEG": {\n  
    "id": "2CK8A9MEG",\n      "name": "jdbc",\n      "group": "jdbc",\n      
"properties": {\n        "default.password": "",\n        
"zeppelin.jdbc.auth.type": "",\n        "common.max_count": "1000",\n        
"zeppelin.jdbc.principal": "",\n        "default.user": "gpadmin",\n        
"default.url": "jdbc:postgresql://localhost:5432/",\n        "default.driver": 
"org.postgresql.Driver",\n        "zeppelin.jdbc.keytab.location": "",\n        
"zeppelin.jdbc.concurrent.use": "true",\n        
"zeppelin.jdbc.concurrent.max_connection": "10"\n      },\n      "status": 
"READY",\n      "interpreterGroup":
  [\n        {\n          "name": "sql",\n          "class": 
"org.apache.zeppelin.jdbc.JDBCInterpreter",\n          "defaultInterpreter": 
false,\n          "editor": {\n            "language": "sql",\n            
"editOnDblClick": false\n          }\n        }\n      ],\n      
"dependencies": [],\n      "option": {\n        "remote": true,\n        
"port": -1,\n        "perNote": "shared",\n        "perUser": "shared",\n       
 "isExistingProcess": false,\n        "setPermission": false,\n        "users": 
[],\n        "isUserImpersonate": false\n      }\n    },\n    "2CKX6DGQZ": {\n  
    "id": "2CKX6DGQZ",\n      "name": "livy",\n      "group": "livy",\n      
"properties": {\n        "zeppelin.livy.pull_status.interval.millis": "1000",\n 
       "livy.spark.executor.memory": "",\n        
"zeppelin.livy.session.create_timeout": "120",\n        
"zeppelin.livy.principal": "",\n        "zeppelin.livy.spark.sql.maxResult": 
"1000",\n        "zeppelin.livy.keytab": "",\n        "zeppelin.liv
 y.concurrentSQL": "false",\n        "zeppelin.livy.spark.sql.field.truncate": 
"true",\n        "livy.spark.executor.cores": "",\n        
"zeppelin.livy.displayAppInfo": "false",\n        "zeppelin.livy.url": 
"http://localhost:8998",\n        "livy.spark.dynamicAllocation.minExecutors": 
"",\n        "livy.spark.driver.cores": "",\n        
"livy.spark.jars.packages": "",\n        
"livy.spark.dynamicAllocation.enabled": "",\n        
"livy.spark.executor.instances": "",\n        
"livy.spark.dynamicAllocation.cachedExecutorIdleTimeout": "",\n        
"livy.spark.dynamicAllocation.maxExecutors": "",\n        
"livy.spark.dynamicAllocation.initialExecutors": "",\n        
"livy.spark.driver.memory": ""\n      },\n      "status": "READY",\n      
"interpreterGroup": [\n        {\n          "name": "spark",\n          
"class": "org.apache.zeppelin.livy.LivySparkInterpreter",\n          
"defaultInterpreter": true,\n          "editor": {\n            "language": 
"scala",\n            "editOnDblCli
 ck": false\n          }\n        },\n        {\n          "name": "sql",\n     
     "class": "org.apache.zeppelin.livy.LivySparkSQLInterpreter",\n          
"defaultInterpreter": false,\n          "editor": {\n            "language": 
"sql",\n            "editOnDblClick": false\n          }\n        },\n        
{\n          "name": "pyspark",\n          "class": 
"org.apache.zeppelin.livy.LivyPySparkInterpreter",\n          
"defaultInterpreter": false,\n          "editor": {\n            "language": 
"python",\n            "editOnDblClick": false\n          }\n        },\n       
 {\n          "name": "pyspark3",\n          "class": 
"org.apache.zeppelin.livy.LivyPySpark3Interpreter",\n          
"defaultInterpreter": false,\n          "editor": {\n            "language": 
"python",\n            "editOnDblClick": false\n          }\n        },\n       
 {\n          "name": "sparkr",\n          "class": 
"org.apache.zeppelin.livy.LivySparkRInterpreter",\n          
"defaultInterpreter": false,
 \n          "editor": {\n            "language": "r",\n            
"editOnDblClick": false\n          }\n        }\n      ],\n      
"dependencies": [],\n      "option": {\n        "remote": true,\n        
"port": -1,\n        "perNote": "shared",\n        "perUser": "scoped",\n       
 "isExistingProcess": false,\n        "setPermission": false,\n        "users": 
[],\n        "isUserImpersonate": false\n      }\n    },\n    "2CKAY1A8Y": {\n  
    "id": "2CKAY1A8Y",\n      "name": "md",\n      "group": "md",\n      
"properties": {\n        "markdown.parser.type": "pegdown"\n      },\n      
"status": "READY",\n      "interpreterGroup": [\n        {\n          "name": 
"md",\n          "class": "org.apache.zeppelin.markdown.Markdown",\n          
"defaultInterpreter": false,\n          "editor": {\n            "language": 
"markdown",\n            "editOnDblClick": true\n          }\n        }\n      
],\n      "dependencies": [],\n      "option": {\n        "remote": true,\n     
   "port": 
 -1,\n        "perNote": "shared",\n        "perUser": "shared",\n        
"isExistingProcess": false,\n        "setPermission": false,\n        "users": 
[],\n        "isUserImpersonate": false\n      }\n    },\n    "2CHS8UYQQ": {\n  
    "id": "2CHS8UYQQ",\n      "name": "sh",\n      "group": "sh",\n      
"properties": {\n        "zeppelin.shell.keytab.location": "",\n        
"shell.command.timeout.millisecs": "60000",\n        
"zeppelin.shell.principal": "",\n        "zeppelin.shell.auth.type": ""\n      
},\n      "status": "READY",\n      "interpreterGroup": [\n        {\n          
"name": "sh",\n          "class": 
"org.apache.zeppelin.shell.ShellInterpreter",\n          "defaultInterpreter": 
false,\n          "editor": {\n            "language": "sh",\n            
"editOnDblClick": false\n          }\n        }\n      ],\n      
"dependencies": [],\n      "option": {\n        "remote": true,\n        
"port": -1,\n        "perNote": "shared",\n        "perUser": "shared",\n       
 "i
 sExistingProcess": false,\n        "setPermission": false,\n        "users": 
[],\n        "isUserImpersonate": false\n      }\n    }\n  },\n  
"interpreterBindings": {},\n  "interpreterRepositories": [\n    {\n      "id": 
"central",\n      "type": "default",\n      "url": 
"http://repo1.maven.org/maven2/",\n      "releasePolicy": {\n        "enabled": 
true,\n        "updatePolicy": "daily",\n        "checksumPolicy": "warn"\n     
 },\n      "snapshotPolicy": {\n        "enabled": true,\n        
"updatePolicy": "daily",\n        "checksumPolicy": "warn"\n      },\n      
"mirroredRepositories": [],\n      "repositoryManager": false\n    },\n    {\n  
    "id": "local",\n      "type": "default",\n      "url": 
"file:///home/zeppelin/.m2/repository",\n      "releasePolicy": {\n        
"enabled": true,\n        "updatePolicy": "daily",\n        "checksumPolicy": 
"warn"\n      },\n      "snapshotPolicy": {\n        "enabled": true,\n        
"updatePolicy": "daily",\n        "checksumPolicy": 
 "warn"\n      },\n      "mirroredRepositories": [],\n      
"repositoryManager": false\n    }\n  ]\n}\n'
+
+template_after_base = '{\n  "interpreterSettings": {\n    "2CHS8UYQQ": {\n     
 "status": "READY", \n      "group": "sh", \n      "name": "sh", \n      "id": 
"2CHS8UYQQ", \n      "interpreterGroup": [\n        {\n          "editor": {\n  
          "editOnDblClick": false, \n            "language": "sh"\n          }, 
\n          "defaultInterpreter": false, \n          "name": "sh", \n          
"class": "org.apache.zeppelin.shell.ShellInterpreter"\n        }\n      ], \n   
   "dependencies": [], \n      "properties": {\n        
"shell.command.timeout.millisecs": "60000", \n        
"zeppelin.shell.auth.type": "", \n        "zeppelin.shell.keytab.location": "", 
\n        "zeppelin.shell.principal": ""\n      }, \n      "option": {\n        
"setPermission": false, \n        "remote": true, \n        "users": [], \n     
   "isExistingProcess": false, \n        "perUser": "shared", \n        
"isUserImpersonate": false, \n        "perNote": "shared", \n        "port": 
-1\n      }\n    }, \
 n    "2CKAY1A8Y": {\n      "status": "READY", \n      "group": "md", \n      
"name": "md", \n      "id": "2CKAY1A8Y", \n      "interpreterGroup": [\n        
{\n          "editor": {\n            "editOnDblClick": true, \n            
"language": "markdown"\n          }, \n          "defaultInterpreter": false, 
\n          "name": "md", \n          "class": 
"org.apache.zeppelin.markdown.Markdown"\n        }\n      ], \n      
"dependencies": [], \n      "properties": {\n        "markdown.parser.type": 
"pegdown"\n      }, \n      "option": {\n        "setPermission": false, \n     
   "remote": true, \n        "users": [], \n        "isExistingProcess": false, 
\n        "perUser": "shared", \n        "isUserImpersonate": false, \n        
"perNote": "shared", \n        "port": -1\n      }\n    }, \n    "2CKX8WPU1": 
{\n      "status": "READY", \n      "group": "spark", \n      "name": "spark", 
\n      "id": "2CKX8WPU1", \n      "interpreterGroup": [\n        {\n          
"editor": {\n     
        "language": "scala"\n          }, \n          "defaultInterpreter": 
true, \n          "name": "spark", \n          "class": 
"org.apache.zeppelin.spark.SparkInterpreter"\n        }, \n        {\n          
"editor": {\n            "language": "sql"\n          }, \n          
"defaultInterpreter": false, \n          "name": "sql", \n          "class": 
"org.apache.zeppelin.spark.SparkSqlInterpreter"\n        }, \n        {\n       
   "editor": {\n            "language": "scala"\n          }, \n          
"defaultInterpreter": false, \n          "name": "dep", \n          "class": 
"org.apache.zeppelin.spark.DepInterpreter"\n        }, \n        {\n          
"editor": {\n            "language": "python"\n          }, \n          
"defaultInterpreter": false, \n          "name": "pyspark", \n          
"class": "org.apache.zeppelin.spark.PySparkInterpreter"\n        }, \n        
{\n          "editor": {\n            "language": "r"\n          }, \n          
"defaultInterpreter": false, 
 \n          "name": "r", \n          "class": 
"org.apache.zeppelin.spark.SparkRInterpreter"\n        }\n      ], \n      
"dependencies": [], \n      "properties": {\n        
"zeppelin.dep.additionalRemoteRepository": 
"spark-packages,http://dl.bintray.com/spark-packages/maven,false;";, \n        
"zeppelin.dep.localrepo": "local-repo", \n        
"zeppelin.spark.useHiveContext": "true", \n        
"zeppelin.spark.printREPLOutput": "true", \n        "zeppelin.R.image.width": 
"100%", \n        "zeppelin.spark.importImplicit": "true", \n        
"spark.app.name": "Zeppelin", \n        "args": "", \n        
"zeppelin.spark.sql.stacktrace": "false", \n        
"zeppelin.spark.concurrentSQL": "false", \n        "zeppelin.R.cmd": "R", \n    
    "master": "yarn-client", \n        "zeppelin.pyspark.python": "python", \n  
      "zeppelin.R.knitr": "true", \n        "zeppelin.R.render.options": 
"out.format = \'html\', comment = NA, echo = FALSE, results = \'asis\', message 
= F, warning = F", \n      
   "spark.executor.memory": "512m", \n        "zeppelin.spark.maxResult": 
"1000", \n        "spark.cores.max": ""\n      }, \n      "option": {\n        
"setPermission": false, \n        "remote": true, \n        "users": [], \n     
   "isExistingProcess": false, \n        "perUser": "shared", \n        
"isUserImpersonate": false, \n        "perNote": "shared", \n        "port": 
-1\n      }\n    }, \n    "2CK8A9MEG": {\n      "status": "READY", \n      
"group": "jdbc", \n      "name": "jdbc", \n      "id": "2CK8A9MEG", \n      
"interpreterGroup": [\n        {\n          "editor": {\n            
"editOnDblClick": false, \n            "language": "sql"\n          }, \n       
   "defaultInterpreter": false, \n          "name": "sql", \n          "class": 
"org.apache.zeppelin.jdbc.JDBCInterpreter"\n        }\n      ], \n      
"dependencies": [], \n      "properties": {\n        "common.max_count": 
"1000", \n        "zeppelin.jdbc.keytab.location": "", \n        
"zeppelin.jdbc.concurrent.
 max_connection": "10", \n        "default.user": "gpadmin", \n        
"zeppelin.jdbc.auth.type": "", \n        "default.url": 
"jdbc:postgresql://localhost:5432/", \n        "default.driver": 
"org.postgresql.Driver", \n        "zeppelin.jdbc.concurrent.use": "true", \n   
     "default.password": "", \n        "zeppelin.jdbc.principal": ""\n      }, 
\n      "option": {\n        "setPermission": false, \n        "remote": true, 
\n        "users": [], \n        "isExistingProcess": false, \n        
"perUser": "shared", \n        "isUserImpersonate": false, \n        "perNote": 
"shared", \n        "port": -1\n      }\n    }, \n    "2CKEKWY8Z": {\n      
"status": "READY", \n      "group": "angular", \n      "name": "angular", \n    
  "id": "2CKEKWY8Z", \n      "interpreterGroup": [\n        {\n          
"editor": {\n            "editOnDblClick": true\n          }, \n          
"defaultInterpreter": false, \n          "name": "angular", \n          
"class": "org.apache.zeppelin.angular.Angu
 larInterpreter"\n        }\n      ], \n      "dependencies": [], \n      
"properties": {}, \n      "option": {\n        "setPermission": false, \n       
 "remote": true, \n        "users": [], \n        "isExistingProcess": false, 
\n        "perUser": "shared", \n        "isUserImpersonate": false, \n        
"perNote": "shared", \n        "port": -1\n      }\n    }, \n    "2CKX6DGQZ": 
{\n      "status": "READY", \n      "group": "livy", \n      "name": "livy", \n 
     "id": "2CKX6DGQZ", \n      "interpreterGroup": [\n        {\n          
"editor": {\n            "editOnDblClick": false, \n            "language": 
"scala"\n          }, \n          "defaultInterpreter": true, \n          
"name": "spark", \n          "class": 
"org.apache.zeppelin.livy.LivySparkInterpreter"\n        }, \n        {\n       
   "editor": {\n            "editOnDblClick": false, \n            "language": 
"sql"\n          }, \n          "defaultInterpreter": false, \n          
"name": "sql", \n          "class
 ": "org.apache.zeppelin.livy.LivySparkSQLInterpreter"\n        }, \n        
{\n          "editor": {\n            "editOnDblClick": false, \n            
"language": "python"\n          }, \n          "defaultInterpreter": false, \n  
        "name": "pyspark", \n          "class": 
"org.apache.zeppelin.livy.LivyPySparkInterpreter"\n        }, \n        {\n     
     "editor": {\n            "editOnDblClick": false, \n            
"language": "python"\n          }, \n          "defaultInterpreter": false, \n  
        "name": "pyspark3", \n          "class": 
"org.apache.zeppelin.livy.LivyPySpark3Interpreter"\n        }, \n        {\n    
      "editor": {\n            "editOnDblClick": false, \n            
"language": "r"\n          }, \n          "defaultInterpreter": false, \n       
   "name": "sparkr", \n          "class": 
"org.apache.zeppelin.livy.LivySparkRInterpreter"\n        }\n      ], \n      
"dependencies": [], \n      "properties": {\n        
"livy.spark.dynamicAllocation.initi
 alExecutors": "", \n        "zeppelin.livy.keytab": "", \n        
"zeppelin.livy.spark.sql.maxResult": "1000", \n        
"livy.spark.executor.instances": "", \n        "livy.spark.driver.memory": "", 
\n        "livy.spark.executor.memory": "", \n        
"livy.spark.dynamicAllocation.enabled": "", \n        
"livy.spark.dynamicAllocation.cachedExecutorIdleTimeout": "", \n        
"livy.spark.driver.cores": "", \n        
"zeppelin.livy.session.create_timeout": "120", \n        
"zeppelin.livy.principal": "", \n        "livy.spark.jars.packages": "", \n     
   "livy.spark.dynamicAllocation.maxExecutors": "", \n        
"zeppelin.livy.concurrentSQL": "false", \n        
"zeppelin.livy.displayAppInfo": "false", \n        
"livy.spark.dynamicAllocation.minExecutors": "", \n        "zeppelin.livy.url": 
"http://localhost:8998";, \n        "zeppelin.livy.spark.sql.field.truncate": 
"true", \n        "zeppelin.livy.pull_status.interval.millis": "1000", \n       
 "livy.spark.executor.cores": ""\n     
  }, \n      "option": {\n        "setPermission": false, \n        "remote": 
true, \n        "users": [], \n        "isExistingProcess": false, \n        
"perUser": "scoped", \n        "isUserImpersonate": false, \n        "perNote": 
"shared", \n        "port": -1\n      }\n    }\n  }, \n  "interpreterBindings": 
{}, \n  "interpreterRepositories": [\n    {\n      "releasePolicy": {\n        
"checksumPolicy": "warn", \n        "enabled": true, \n        "updatePolicy": 
"daily"\n      }, \n      "mirroredRepositories": [], \n      "snapshotPolicy": 
{\n        "checksumPolicy": "warn", \n        "enabled": true, \n        
"updatePolicy": "daily"\n      }, \n      "url": 
"http://repo1.maven.org/maven2/";, \n      "repositoryManager": false, \n      
"type": "default", \n      "id": "central"\n    }, \n    {\n      
"releasePolicy": {\n        "checksumPolicy": "warn", \n        "enabled": 
true, \n        "updatePolicy": "daily"\n      }, \n      
"mirroredRepositories": [], \n      "snapshot
 Policy": {\n        "checksumPolicy": "warn", \n        "enabled": true, \n    
    "updatePolicy": "daily"\n      }, \n      "url": 
"file:///home/zeppelin/.m2/repository", \n      "repositoryManager": false, \n  
    "type": "default", \n      "id": "local"\n    }\n  ]\n}'
+
+template_after_without_spark_and_livy = '{\n  "interpreterSettings": {\n    
"2CHS8UYQQ": {\n      "status": "READY", \n      "group": "sh", \n      "name": 
"sh", \n      "id": "2CHS8UYQQ", \n      "interpreterGroup": [\n        {\n     
     "editor": {\n            "editOnDblClick": false, \n            
"language": "sh"\n          }, \n          "defaultInterpreter": false, \n      
    "name": "sh", \n          "class": 
"org.apache.zeppelin.shell.ShellInterpreter"\n        }\n      ], \n      
"dependencies": [], \n      "properties": {\n        
"shell.command.timeout.millisecs": "60000", \n        
"zeppelin.shell.auth.type": "", \n        "zeppelin.shell.keytab.location": "", 
\n        "zeppelin.shell.principal": ""\n      }, \n      "option": {\n        
"setPermission": false, \n        "remote": true, \n        "users": [], \n     
   "isExistingProcess": false, \n        "perUser": "shared", \n        
"isUserImpersonate": false, \n        "perNote": "shared", \n        "port": -1\
 n      }\n    }, \n    "2CKAY1A8Y": {\n      "status": "READY", \n      
"group": "md", \n      "name": "md", \n      "id": "2CKAY1A8Y", \n      
"interpreterGroup": [\n        {\n          "editor": {\n            
"editOnDblClick": true, \n            "language": "markdown"\n          }, \n   
       "defaultInterpreter": false, \n          "name": "md", \n          
"class": "org.apache.zeppelin.markdown.Markdown"\n        }\n      ], \n      
"dependencies": [], \n      "properties": {\n        "markdown.parser.type": 
"pegdown"\n      }, \n      "option": {\n        "setPermission": false, \n     
   "remote": true, \n        "users": [], \n        "isExistingProcess": false, 
\n        "perUser": "shared", \n        "isUserImpersonate": false, \n        
"perNote": "shared", \n        "port": -1\n      }\n    }, \n    "2CKX8WPU1": 
{\n      "status": "READY", \n      "group": "spark", \n      "name": "spark", 
\n      "id": "2CKX8WPU1", \n      "interpreterGroup": [\n        {\n          
 "editor": {\n            "language": "scala"\n          }, \n          
"defaultInterpreter": true, \n          "name": "spark", \n          "class": 
"org.apache.zeppelin.spark.SparkInterpreter"\n        }, \n        {\n          
"editor": {\n            "language": "sql"\n          }, \n          
"defaultInterpreter": false, \n          "name": "sql", \n          "class": 
"org.apache.zeppelin.spark.SparkSqlInterpreter"\n        }, \n        {\n       
   "editor": {\n            "language": "scala"\n          }, \n          
"defaultInterpreter": false, \n          "name": "dep", \n          "class": 
"org.apache.zeppelin.spark.DepInterpreter"\n        }, \n        {\n          
"editor": {\n            "language": "python"\n          }, \n          
"defaultInterpreter": false, \n          "name": "pyspark", \n          
"class": "org.apache.zeppelin.spark.PySparkInterpreter"\n        }, \n        
{\n          "editor": {\n            "language": "r"\n          }, \n          
"defaultInt
 erpreter": false, \n          "name": "r", \n          "class": 
"org.apache.zeppelin.spark.SparkRInterpreter"\n        }\n      ], \n      
"dependencies": [], \n      "properties": {\n        
"zeppelin.dep.additionalRemoteRepository": 
"spark-packages,http://dl.bintray.com/spark-packages/maven,false;";, \n        
"zeppelin.dep.localrepo": "local-repo", \n        
"zeppelin.spark.useHiveContext": "true", \n        
"zeppelin.spark.printREPLOutput": "true", \n        "zeppelin.R.image.width": 
"100%", \n        "zeppelin.spark.importImplicit": "true", \n        
"spark.app.name": "Zeppelin", \n        "args": "", \n        
"zeppelin.spark.sql.stacktrace": "false", \n        
"zeppelin.spark.concurrentSQL": "false", \n        "SPARK_HOME": 
"/usr/hdp/current/spark-client/", \n        "zeppelin.R.cmd": "R", \n        
"master": "yarn-client", \n        "zeppelin.pyspark.python": "python", \n      
  "zeppelin.R.knitr": "true", \n        "zeppelin.R.render.options": 
"out.format = \'html\', comment
  = NA, echo = FALSE, results = \'asis\', message = F, warning = F", \n        
"spark.executor.memory": "512m", \n        "zeppelin.spark.maxResult": "1000", 
\n        "spark.cores.max": ""\n      }, \n      "option": {\n        
"setPermission": false, \n        "remote": true, \n        "users": [], \n     
   "isExistingProcess": false, \n        "perUser": "shared", \n        
"isUserImpersonate": false, \n        "perNote": "shared", \n        "port": 
-1\n      }\n    }, \n    "2CK8A9MEG": {\n      "status": "READY", \n      
"group": "jdbc", \n      "name": "jdbc", \n      "id": "2CK8A9MEG", \n      
"interpreterGroup": [\n        {\n          "editor": {\n            
"editOnDblClick": false, \n            "language": "sql"\n          }, \n       
   "defaultInterpreter": false, \n          "name": "sql", \n          "class": 
"org.apache.zeppelin.jdbc.JDBCInterpreter"\n        }\n      ], \n      
"dependencies": [], \n      "properties": {\n        "common.max_count": 
"1000", \n     
    "zeppelin.jdbc.keytab.location": "", \n        
"zeppelin.jdbc.concurrent.max_connection": "10", \n        "default.user": 
"gpadmin", \n        "zeppelin.jdbc.auth.type": "", \n        "default.url": 
"jdbc:postgresql://localhost:5432/", \n        "default.driver": 
"org.postgresql.Driver", \n        "zeppelin.jdbc.concurrent.use": "true", \n   
     "default.password": "", \n        "zeppelin.jdbc.principal": ""\n      }, 
\n      "option": {\n        "setPermission": false, \n        "remote": true, 
\n        "users": [], \n        "isExistingProcess": false, \n        
"perUser": "shared", \n        "isUserImpersonate": false, \n        "perNote": 
"shared", \n        "port": -1\n      }\n    }, \n    "2CKEKWY8Z": {\n      
"status": "READY", \n      "group": "angular", \n      "name": "angular", \n    
  "id": "2CKEKWY8Z", \n      "interpreterGroup": [\n        {\n          
"editor": {\n            "editOnDblClick": true\n          }, \n          
"defaultInterpreter": false, \n       
    "name": "angular", \n          "class": 
"org.apache.zeppelin.angular.AngularInterpreter"\n        }\n      ], \n      
"dependencies": [], \n      "properties": {}, \n      "option": {\n        
"setPermission": false, \n        "remote": true, \n        "users": [], \n     
   "isExistingProcess": false, \n        "perUser": "shared", \n        
"isUserImpersonate": false, \n        "perNote": "shared", \n        "port": 
-1\n      }\n    }\n  }, \n  "interpreterBindings": {}, \n  
"interpreterRepositories": [\n    {\n      "releasePolicy": {\n        
"checksumPolicy": "warn", \n        "enabled": true, \n        "updatePolicy": 
"daily"\n      }, \n      "mirroredRepositories": [], \n      "snapshotPolicy": 
{\n        "checksumPolicy": "warn", \n        "enabled": true, \n        
"updatePolicy": "daily"\n      }, \n      "url": 
"http://repo1.maven.org/maven2/";, \n      "repositoryManager": false, \n      
"type": "default", \n      "id": "central"\n    }, \n    {\n      
"releasePolicy"
 : {\n        "checksumPolicy": "warn", \n        "enabled": true, \n        
"updatePolicy": "daily"\n      }, \n      "mirroredRepositories": [], \n      
"snapshotPolicy": {\n        "checksumPolicy": "warn", \n        "enabled": 
true, \n        "updatePolicy": "daily"\n      }, \n      "url": 
"file:///home/zeppelin/.m2/repository", \n      "repositoryManager": false, \n  
    "type": "default", \n      "id": "local"\n    }\n  ]\n}'
+
+template_after_kerberos = '{\n  "interpreterSettings": {\n    "2CHS8UYQQ": {\n 
     "status": "READY", \n      "group": "sh", \n      "name": "sh", \n      
"id": "2CHS8UYQQ", \n      "interpreterGroup": [\n        {\n          
"editor": {\n            "editOnDblClick": false, \n            "language": 
"sh"\n          }, \n          "defaultInterpreter": false, \n          "name": 
"sh", \n          "class": "org.apache.zeppelin.shell.ShellInterpreter"\n       
 }\n      ], \n      "dependencies": [], \n      "properties": {\n        
"shell.command.timeout.millisecs": "60000", \n        
"zeppelin.shell.auth.type": "", \n        "zeppelin.shell.keytab.location": "", 
\n        "zeppelin.shell.principal": ""\n      }, \n      "option": {\n        
"setPermission": false, \n        "remote": true, \n        "users": [], \n     
   "isExistingProcess": false, \n        "perUser": "shared", \n        
"isUserImpersonate": false, \n        "perNote": "shared", \n        "port": 
-1\n      }\n    
 }, \n    "2CKAY1A8Y": {\n      "status": "READY", \n      "group": "md", \n    
  "name": "md", \n      "id": "2CKAY1A8Y", \n      "interpreterGroup": [\n      
  {\n          "editor": {\n            "editOnDblClick": true, \n            
"language": "markdown"\n          }, \n          "defaultInterpreter": false, 
\n          "name": "md", \n          "class": 
"org.apache.zeppelin.markdown.Markdown"\n        }\n      ], \n      
"dependencies": [], \n      "properties": {\n        "markdown.parser.type": 
"pegdown"\n      }, \n      "option": {\n        "setPermission": false, \n     
   "remote": true, \n        "users": [], \n        "isExistingProcess": false, 
\n        "perUser": "shared", \n        "isUserImpersonate": false, \n        
"perNote": "shared", \n        "port": -1\n      }\n    }, \n    "2CKX8WPU1": 
{\n      "status": "READY", \n      "group": "spark", \n      "name": "spark", 
\n      "id": "2CKX8WPU1", \n      "interpreterGroup": [\n        {\n          
"editor": {\n 
            "language": "scala"\n          }, \n          "defaultInterpreter": 
true, \n          "name": "spark", \n          "class": 
"org.apache.zeppelin.spark.SparkInterpreter"\n        }, \n        {\n          
"editor": {\n            "language": "sql"\n          }, \n          
"defaultInterpreter": false, \n          "name": "sql", \n          "class": 
"org.apache.zeppelin.spark.SparkSqlInterpreter"\n        }, \n        {\n       
   "editor": {\n            "language": "scala"\n          }, \n          
"defaultInterpreter": false, \n          "name": "dep", \n          "class": 
"org.apache.zeppelin.spark.DepInterpreter"\n        }, \n        {\n          
"editor": {\n            "language": "python"\n          }, \n          
"defaultInterpreter": false, \n          "name": "pyspark", \n          
"class": "org.apache.zeppelin.spark.PySparkInterpreter"\n        }, \n        
{\n          "editor": {\n            "language": "r"\n          }, \n          
"defaultInterpreter": fal
 se, \n          "name": "r", \n          "class": 
"org.apache.zeppelin.spark.SparkRInterpreter"\n        }\n      ], \n      
"dependencies": [], \n      "properties": {\n        
"zeppelin.dep.additionalRemoteRepository": 
"spark-packages,http://dl.bintray.com/spark-packages/maven,false;";, \n        
"zeppelin.dep.localrepo": "local-repo", \n        
"zeppelin.spark.useHiveContext": "true", \n        
"zeppelin.spark.printREPLOutput": "true", \n        "spark.yarn.principal": "", 
\n        "zeppelin.R.image.width": "100%", \n        
"zeppelin.spark.importImplicit": "true", \n        "spark.app.name": 
"Zeppelin", \n        "args": "", \n        "zeppelin.spark.sql.stacktrace": 
"false", \n        "zeppelin.spark.concurrentSQL": "false", \n        
"spark.yarn.keytab": "", \n        "zeppelin.R.cmd": "R", \n        "master": 
"yarn-client", \n        "zeppelin.pyspark.python": "python", \n        
"zeppelin.R.knitr": "true", \n        "zeppelin.R.render.options": "out.format 
= \'html\', commen
 t = NA, echo = FALSE, results = \'asis\', message = F, warning = F", \n        
"spark.executor.memory": "512m", \n        "zeppelin.spark.maxResult": "1000", 
\n        "spark.cores.max": ""\n      }, \n      "option": {\n        
"setPermission": false, \n        "remote": true, \n        "users": [], \n     
   "isExistingProcess": false, \n        "perUser": "shared", \n        
"isUserImpersonate": false, \n        "perNote": "shared", \n        "port": 
-1\n      }\n    }, \n    "2CK8A9MEG": {\n      "status": "READY", \n      
"group": "jdbc", \n      "name": "jdbc", \n      "id": "2CK8A9MEG", \n      
"interpreterGroup": [\n        {\n          "editor": {\n            
"editOnDblClick": false, \n            "language": "sql"\n          }, \n       
   "defaultInterpreter": false, \n          "name": "sql", \n          "class": 
"org.apache.zeppelin.jdbc.JDBCInterpreter"\n        }\n      ], \n      
"dependencies": [], \n      "properties": {\n        "common.max_count": 
"1000", \n    
     "zeppelin.jdbc.keytab.location": "", \n        
"zeppelin.jdbc.concurrent.max_connection": "10", \n        "default.user": 
"gpadmin", \n        "zeppelin.jdbc.auth.type": "SIMPLE", \n        
"default.url": "jdbc:postgresql://localhost:5432/", \n        "default.driver": 
"org.postgresql.Driver", \n        "zeppelin.jdbc.concurrent.use": "true", \n   
     "default.password": "", \n        "zeppelin.jdbc.principal": ""\n      }, 
\n      "option": {\n        "setPermission": false, \n        "remote": true, 
\n        "users": [], \n        "isExistingProcess": false, \n        
"perUser": "shared", \n        "isUserImpersonate": false, \n        "perNote": 
"shared", \n        "port": -1\n      }\n    }, \n    "2CKEKWY8Z": {\n      
"status": "READY", \n      "group": "angular", \n      "name": "angular", \n    
  "id": "2CKEKWY8Z", \n      "interpreterGroup": [\n        {\n          
"editor": {\n            "editOnDblClick": true\n          }, \n          
"defaultInterpreter": false, \n
           "name": "angular", \n          "class": 
"org.apache.zeppelin.angular.AngularInterpreter"\n        }\n      ], \n      
"dependencies": [], \n      "properties": {}, \n      "option": {\n        
"setPermission": false, \n        "remote": true, \n        "users": [], \n     
   "isExistingProcess": false, \n        "perUser": "shared", \n        
"isUserImpersonate": false, \n        "perNote": "shared", \n        "port": 
-1\n      }\n    }, \n    "2CKX6DGQZ": {\n      "status": "READY", \n      
"group": "livy", \n      "name": "livy", \n      "id": "2CKX6DGQZ", \n      
"interpreterGroup": [\n        {\n          "editor": {\n            
"editOnDblClick": false, \n            "language": "scala"\n          }, \n     
     "defaultInterpreter": true, \n          "name": "spark", \n          
"class": "org.apache.zeppelin.livy.LivySparkInterpreter"\n        }, \n        
{\n          "editor": {\n            "editOnDblClick": false, \n            
"language": "sql"\n          }, \n 
          "defaultInterpreter": false, \n          "name": "sql", \n          
"class": "org.apache.zeppelin.livy.LivySparkSQLInterpreter"\n        }, \n      
  {\n          "editor": {\n            "editOnDblClick": false, \n            
"language": "python"\n          }, \n          "defaultInterpreter": false, \n  
        "name": "pyspark", \n          "class": 
"org.apache.zeppelin.livy.LivyPySparkInterpreter"\n        }, \n        {\n     
     "editor": {\n            "editOnDblClick": false, \n            
"language": "python"\n          }, \n          "defaultInterpreter": false, \n  
        "name": "pyspark3", \n          "class": 
"org.apache.zeppelin.livy.LivyPySpark3Interpreter"\n        }, \n        {\n    
      "editor": {\n            "editOnDblClick": false, \n            
"language": "r"\n          }, \n          "defaultInterpreter": false, \n       
   "name": "sparkr", \n          "class": 
"org.apache.zeppelin.livy.LivySparkRInterpreter"\n        }\n      ], \n      
"depe
 ndencies": [], \n      "properties": {\n        
"livy.spark.dynamicAllocation.initialExecutors": "", \n        
"zeppelin.livy.keytab": "", \n        "zeppelin.livy.spark.sql.maxResult": 
"1000", \n        "livy.spark.executor.instances": "", \n        
"livy.spark.driver.memory": "", \n        "livy.spark.executor.memory": "", \n  
      "livy.spark.dynamicAllocation.enabled": "", \n        
"livy.spark.dynamicAllocation.cachedExecutorIdleTimeout": "", \n        
"livy.spark.driver.cores": "", \n        
"zeppelin.livy.session.create_timeout": "120", \n        
"zeppelin.livy.principal": "", \n        "livy.spark.jars.packages": "", \n     
   "livy.spark.dynamicAllocation.maxExecutors": "", \n        
"zeppelin.livy.concurrentSQL": "false", \n        
"zeppelin.livy.displayAppInfo": "false", \n        
"livy.spark.dynamicAllocation.minExecutors": "", \n        "zeppelin.livy.url": 
"http://localhost:8998";, \n        "zeppelin.livy.spark.sql.field.truncate": 
"true", \n        "zeppelin.livy.pul
 l_status.interval.millis": "1000", \n        "livy.spark.executor.cores": ""\n 
     }, \n      "option": {\n        "setPermission": false, \n        
"remote": true, \n        "users": [], \n        "isExistingProcess": false, \n 
       "perUser": "scoped", \n        "isUserImpersonate": false, \n        
"perNote": "shared", \n        "port": -1\n      }\n    }\n  }, \n  
"interpreterBindings": {}, \n  "interpreterRepositories": [\n    {\n      
"releasePolicy": {\n        "checksumPolicy": "warn", \n        "enabled": 
true, \n        "updatePolicy": "daily"\n      }, \n      
"mirroredRepositories": [], \n      "snapshotPolicy": {\n        
"checksumPolicy": "warn", \n        "enabled": true, \n        "updatePolicy": 
"daily"\n      }, \n      "url": "http://repo1.maven.org/maven2/";, \n      
"repositoryManager": false, \n      "type": "default", \n      "id": 
"central"\n    }, \n    {\n      "releasePolicy": {\n        "checksumPolicy": 
"warn", \n        "enabled": true, \n        "upd
 atePolicy": "daily"\n      }, \n      "mirroredRepositories": [], \n      
"snapshotPolicy": {\n        "checksumPolicy": "warn", \n        "enabled": 
true, \n        "updatePolicy": "daily"\n      }, \n      "url": 
"file:///home/zeppelin/.m2/repository", \n      "repositoryManager": false, \n  
    "type": "default", \n      "id": "local"\n    }\n  ]\n}'
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/a90f3b36/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py 
b/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
new file mode 100644
index 0000000..9a28e68
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
@@ -0,0 +1,327 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+from ambari_commons import OSCheck
+'''
+
+import glob
+
+import time
+from mock.mock import MagicMock, patch, call
+from resource_management.core import sudo
+from stacks.utils.RMFTestCase import *
+
+import interpreter_json_generated
+
+
[email protected](glob, "glob", new=MagicMock(return_value=["/tmp"]))
[email protected](sudo, "read_file",
+              new=MagicMock(return_value=interpreter_json_generated.template))
+class TestZeppelin070(RMFTestCase):
+  COMMON_SERVICES_PACKAGE_DIR = "ZEPPELIN/0.7.0/package"
+  STACK_VERSION = "2.5"
+
+  def assert_configure_default(self):
+    self.assertResourceCalled('Directory', '/var/log/zeppelin',
+                              owner='zeppelin',
+                              group='zeppelin',
+                              create_parents=True,
+                              mode=0755,
+                              cd_access='a',
+                              )
+    self.assertResourceCalled('Directory', '/var/run/zeppelin',
+                              owner='zeppelin',
+                              create_parents=True,
+                              group='zeppelin',
+                              mode=0755,
+                              cd_access='a',
+                              )
+    self.assertResourceCalled('Directory', '/usr/hdp/current/zeppelin-server',
+                              owner='zeppelin',
+                              group='zeppelin',
+                              create_parents=True,
+                              mode=0755,
+                              cd_access='a',
+                              )
+    self.assertResourceCalled('Execute', (
+    'chown', '-R', u'zeppelin:zeppelin', '/var/run/zeppelin'), sudo=True)
+    self.assertResourceCalled('XmlConfig', 'zeppelin-site.xml',
+                              owner='zeppelin',
+                              group='zeppelin',
+                              conf_dir='/etc/zeppelin/conf',
+                              
configurations=self.getConfig()['configurations'][
+                                'zeppelin-config'],
+                              )
+    self.assertResourceCalled('File', '/etc/zeppelin/conf/zeppelin-env.sh',
+                              owner='zeppelin',
+                              content=InlineTemplate(
+                                self.getConfig()['configurations'][
+                                  'zeppelin-env']['zeppelin_env_content']),
+                              group='zeppelin',
+                              )
+    self.assertResourceCalled('File', '/etc/zeppelin/conf/shiro.ini',
+                              owner='zeppelin',
+                              content=InlineTemplate(
+                                self.getConfig()['configurations'][
+                                  'zeppelin-shiro-ini']['shiro_ini_content']),
+                              group='zeppelin',
+                              )
+    self.assertResourceCalled('File', '/etc/zeppelin/conf/log4j.properties',
+                              owner=u'zeppelin',
+                              content=u'log4j.rootLogger = INFO, dailyfile',
+                              group=u'zeppelin',
+                              )
+    self.assertResourceCalled('Directory',
+                              '/etc/zeppelin/conf/external-dependency-conf',
+                              owner='zeppelin',
+                              group='zeppelin',
+                              create_parents=True,
+                              mode=0755,
+                              cd_access='a',
+                              )
+
+  def assert_configure_secured(self):
+    self.assertResourceCalled('Directory', '/var/log/zeppelin',
+                              owner='zeppelin',
+                              group='zeppelin',
+                              create_parents=True,
+                              mode=0755,
+                              cd_access='a',
+                              )
+    self.assertResourceCalled('Directory', '/var/run/zeppelin',
+                              owner='zeppelin',
+                              create_parents=True,
+                              group='zeppelin',
+                              mode=0755,
+                              cd_access='a',
+                              )
+    self.assertResourceCalled('Directory', '/usr/hdp/current/zeppelin-server',
+                              owner='zeppelin',
+                              group='zeppelin',
+                              create_parents=True,
+                              mode=0755,
+                              cd_access='a',
+                              )
+    self.assertResourceCalled('Execute', (
+    'chown', '-R', u'zeppelin:zeppelin', '/var/run/zeppelin'), sudo=True)
+    self.assertResourceCalled('XmlConfig', 'zeppelin-site.xml',
+                              owner='zeppelin',
+                              group='zeppelin',
+                              conf_dir='/etc/zeppelin/conf',
+                              
configurations=self.getConfig()['configurations'][
+                                'zeppelin-config'],
+                              )
+    self.assertResourceCalled('File', '/etc/zeppelin/conf/zeppelin-env.sh',
+                              owner='zeppelin',
+                              content=InlineTemplate(
+                                self.getConfig()['configurations'][
+                                  'zeppelin-env']['zeppelin_env_content']),
+                              group='zeppelin',
+                              )
+    self.assertResourceCalled('File', '/etc/zeppelin/conf/shiro.ini',
+                              owner='zeppelin',
+                              content=InlineTemplate(
+                                self.getConfig()['configurations'][
+                                  'zeppelin-shiro-ini']['shiro_ini_content']),
+                              group='zeppelin',
+                              )
+    self.assertResourceCalled('File', '/etc/zeppelin/conf/log4j.properties',
+                              owner=u'zeppelin',
+                              content=u'log4j.rootLogger = INFO, dailyfile',
+                              group=u'zeppelin',
+                              )
+    self.assertResourceCalled('Directory',
+                              '/etc/zeppelin/conf/external-dependency-conf',
+                              owner='zeppelin',
+                              group='zeppelin',
+                              create_parents=True,
+                              mode=0755,
+                              cd_access='a',
+                              )
+
+  def test_configure_default(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/master.py",
+                       classname="Master",
+                       command="configure",
+                       config_file="default.json",
+                       stack_version=self.STACK_VERSION,
+                       target=RMFTestCase.TARGET_COMMON_SERVICES
+                       )
+    self.assert_configure_default()
+    self.assertNoMoreResources()
+
+  def test_stop_default(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/master.py",
+                       classname="Master",
+                       command="stop",
+                       config_file="default.json",
+                       stack_version=self.STACK_VERSION,
+                       target=RMFTestCase.TARGET_COMMON_SERVICES
+                       )
+    self.assertResourceCalled('Directory', '/var/log/zeppelin',
+                              owner='zeppelin',
+                              group='zeppelin',
+                              create_parents=True,
+                              mode=0755,
+                              cd_access='a',
+                              )
+    self.assertResourceCalled('Execute', (
+    'chown', '-R', u'zeppelin:zeppelin', '/var/run/zeppelin'),
+                              sudo=True,
+                              )
+    self.assertResourceCalled('Execute',
+                              
'/usr/hdp/current/zeppelin-server/bin/zeppelin-daemon.sh stop >> 
/var/log/zeppelin/zeppelin-setup.log',
+                              user='zeppelin',
+                              )
+    self.assertNoMoreResources()
+
+  def test_start_default(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/master.py",
+                       classname="Master",
+                       command="start",
+                       config_file="default.json",
+                       stack_version=self.STACK_VERSION,
+                       target=RMFTestCase.TARGET_COMMON_SERVICES
+                       )
+    self.assert_configure_default()
+    self.assertResourceCalled('Execute', (
+    'chown', '-R', u'zeppelin:zeppelin', '/etc/zeppelin'),
+                              sudo=True,
+                              )
+
+  @patch('os.path.exists', return_value=True)
+  def test_start_secured(self, os_path_exists_mock):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/master.py",
+                       classname="Master",
+                       command="start",
+                       config_file="default.json",
+                       stack_version=self.STACK_VERSION,
+                       target=RMFTestCase.TARGET_COMMON_SERVICES
+                       )
+    self.assert_configure_secured()
+    self.assertResourceCalled('Execute', (
+    'chown', '-R', u'zeppelin:zeppelin', '/etc/zeppelin'),
+                              sudo=True,
+                              )
+    self.assertResourceCalled('Execute', ('chown', '-R', 'zeppelin:zeppelin',
+                                          
'/usr/hdp/current/zeppelin-server/notebook'),
+                              sudo=True,
+                              )
+    self.assertResourceCalled('HdfsResource', '/user/zeppelin',
+                              
hadoop_bin_dir='/usr/hdp/current/hadoop-client/bin',
+                              
default_fs=u'hdfs://c6401.ambari.apache.org:8020',
+                              
hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
+                              hdfs_site={u'a': u'b'},
+                              keytab=UnknownConfigurationMock(),
+                              kinit_path_local='/usr/bin/kinit',
+                              user='hdfs',
+                              owner='zeppelin',
+                              principal_name=UnknownConfigurationMock(),
+                              recursive_chown=True,
+                              security_enabled=False,
+                              
hadoop_conf_dir='/usr/hdp/current/hadoop-client/conf',
+                              type='directory',
+                              action=['create_on_execute'],
+                              recursive_chmod=True
+                              )
+    self.assertResourceCalled('HdfsResource', '/user/zeppelin/test',
+                              
hadoop_bin_dir='/usr/hdp/current/hadoop-client/bin',
+                              
default_fs=u'hdfs://c6401.ambari.apache.org:8020',
+                              
hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
+                              hdfs_site={u'a': u'b'},
+                              kinit_path_local='/usr/bin/kinit',
+                              user='hdfs',
+                              owner='zeppelin',
+                              recursive_chown=True,
+                              
hadoop_conf_dir='/usr/hdp/current/hadoop-client/conf',
+                              type='directory',
+                              action=['create_on_execute'],
+                              recursive_chmod=True,
+                              keytab=UnknownConfigurationMock(),
+                              principal_name=UnknownConfigurationMock(),
+                              security_enabled=False,
+                              )
+    self.assertResourceCalled('HdfsResource', '/apps/zeppelin',
+                              
hadoop_bin_dir='/usr/hdp/current/hadoop-client/bin',
+                              
default_fs=u'hdfs://c6401.ambari.apache.org:8020',
+                              
hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
+                              hdfs_site={u'a': u'b'},
+                              kinit_path_local='/usr/bin/kinit',
+                              user='hdfs',
+                              owner='zeppelin',
+                              recursive_chown=True,
+                              
hadoop_conf_dir='/usr/hdp/current/hadoop-client/conf',
+                              type='directory',
+                              action=['create_on_execute'],
+                              recursive_chmod=True,
+                              keytab=UnknownConfigurationMock(),
+                              principal_name=UnknownConfigurationMock(),
+                              security_enabled=False,
+                              )
+    self.assertResourceCalled('HdfsResource', '/apps/zeppelin/tmp',
+                              
hadoop_bin_dir='/usr/hdp/current/hadoop-client/bin',
+                              source='/tmp',
+                              
default_fs=u'hdfs://c6401.ambari.apache.org:8020',
+                              replace_existing_files=True,
+                              
hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
+                              hdfs_site={u'a': u'b'},
+                              kinit_path_local='/usr/bin/kinit',
+                              user='hdfs',
+                              owner='zeppelin',
+                              group='zeppelin',
+                              
hadoop_conf_dir='/usr/hdp/current/hadoop-client/conf',
+                              type='file',
+                              action=['create_on_execute'],
+                              mode=0444,
+                              keytab=UnknownConfigurationMock(),
+                              principal_name=UnknownConfigurationMock(),
+                              security_enabled=False,
+                              )
+    self.assertResourceCalled('HdfsResource', None,
+                              
hadoop_bin_dir='/usr/hdp/current/hadoop-client/bin',
+                              
default_fs=u'hdfs://c6401.ambari.apache.org:8020',
+                              
hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
+                              hdfs_site={u'a': u'b'},
+                              kinit_path_local='/usr/bin/kinit',
+                              user='hdfs',
+                              action=['execute'],
+                              
hadoop_conf_dir='/usr/hdp/current/hadoop-client/conf',
+                              keytab=UnknownConfigurationMock(),
+                              principal_name=UnknownConfigurationMock(),
+                              security_enabled=False,
+                              )
+    self.assertResourceCalled('File', '/etc/zeppelin/conf/interpreter.json',
+                              
content=interpreter_json_generated.template_after_base,
+                              owner='zeppelin',
+                              group='zeppelin',
+                              )
+    self.assertResourceCalled('File', '/etc/zeppelin/conf/interpreter.json',
+                              
content=interpreter_json_generated.template_after_without_spark_and_livy,
+                              owner='zeppelin',
+                              group='zeppelin')
+
+    self.assertResourceCalled('File', '/etc/zeppelin/conf/interpreter.json',
+                              
content=interpreter_json_generated.template_after_kerberos,
+                              owner='zeppelin',
+                              group='zeppelin')
+
+    self.assertResourceCalled('Execute',
+                              
'/usr/hdp/current/zeppelin-server/bin/zeppelin-daemon.sh restart >> 
/var/log/zeppelin/zeppelin-setup.log',
+                              user='zeppelin'
+                              )
+    self.assertNoMoreResources()

http://git-wip-us.apache.org/repos/asf/ambari/blob/a90f3b36/ambari-server/src/test/python/stacks/2.6/configs/default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.6/configs/default.json 
b/ambari-server/src/test/python/stacks/2.6/configs/default.json
index 31f3dbd..10647fd 100644
--- a/ambari-server/src/test/python/stacks/2.6/configs/default.json
+++ b/ambari-server/src/test/python/stacks/2.6/configs/default.json
@@ -320,46 +320,50 @@
       "content": "<property><name>content</name><description>Custom solrconfig 
properties</description><value></value></property>"
     },
     "zeppelin-env": {
-      "zeppelin.server.kerberos.keytab": "", 
-      "shiro_ini_content": "\n[users]\n# List of users with their password 
allowed to access Zeppelin.\n# To use a different strategy (LDAP / Database / 
...) check the shiro doc at 
http://shiro.apache.org/configuration.html#Configuration-INISections\n#admin = 
password1\n#user1 = password2, role1, role2\n#user2 = password3, role3\n#user3 
= password4, role2\n\n# Sample LDAP configuration, for user Authentication, 
currently tested for single Realm\n[main]\n#ldapRealm = 
org.apache.shiro.realm.ldap.JndiLdapRealm\n#ldapRealm.userDnTemplate = 
uid={0},cn=users,cn=accounts,dc=hortonworks,dc=com\n#ldapRealm.contextFactory.url
 = ldap://ldaphost:389\n#ldapRealm.contextFactory.authenticationMechanism = 
SIMPLE\n#sessionManager = 
org.apache.shiro.web.session.mgt.DefaultWebSessionManager\n#securityManager.sessionManager
 = $sessionManager\n# 86,400,000 milliseconds = 24 
hour\n#securityManager.sessionManager.globalSessionTimeout = 
86400000\nshiro.loginUrl = /api/login\n\n[urls]\n# anon means the acce
 ss is anonymous.\n# authcBasic means Basic Auth Security\n# To enfore 
security, comment the line below and uncomment the next one\n/api/version = 
anon\n/** = anon\n#/** = authc", 
-      "zeppelin.spark.jar.dir": "/apps/zeppelin", 
-      "zeppelin.executor.mem": "512m", 
-      "zeppelin_pid_dir": "/var/run/zeppelin", 
-      "zeppelin.executor.instances": "2", 
-      "log4j_properties_content": "\nlog4j.rootLogger = INFO, 
dailyfile\nlog4j.appender.stdout = 
org.apache.log4j.ConsoleAppender\nlog4j.appender.stdout.layout = 
org.apache.log4j.PatternLayout\nlog4j.appender.stdout.layout.ConversionPattern=%5p
 [%d] ({%t} %F[%M]:%L) - 
%m%n\nlog4j.appender.dailyfile.DatePattern=.yyyy-MM-dd\nlog4j.appender.dailyfile.Threshold
 = INFO\nlog4j.appender.dailyfile = 
org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.dailyfile.File = 
${zeppelin.log.file}\nlog4j.appender.dailyfile.layout = 
org.apache.log4j.PatternLayout\nlog4j.appender.dailyfile.layout.ConversionPattern=%5p
 [%d] ({%t} %F[%M]:%L) - %m%n",
-      "zeppelin.server.kerberos.principal": "", 
-      "zeppelin_user": "zeppelin", 
-      "zeppelin_env_content": "\n# Spark master url. eg. 
spark://master_addr:7077. Leave empty if you want to use local mode\nexport 
MASTER=yarn-client\nexport SPARK_YARN_JAR={{spark_jar}}\n\n\n# Where log files 
are stored.  PWD by default.\nexport ZEPPELIN_LOG_DIR={{zeppelin_log_dir}}\n\n# 
The pid files are stored. /tmp by default.\nexport 
ZEPPELIN_PID_DIR={{zeppelin_pid_dir}}\n\n\nexport 
JAVA_HOME={{java64_home}}\n\n# Additional jvm options. for example, export 
ZEPPELIN_JAVA_OPTS=\"-Dspark.executor.memory=8g -Dspark.cores.max=16\"\nexport 
ZEPPELIN_JAVA_OPTS=\"-Dhdp.version={{full_stack_version}} 
-Dspark.executor.memory={{executor_mem}} 
-Dspark.executor.instances={{executor_instances}} 
-Dspark.yarn.queue={{spark_queue}}\"\n\n\n# Zeppelin jvm mem options Default 
-Xmx1024m -XX:MaxPermSize=512m\n# export ZEPPELIN_MEM\n\n# zeppelin interpreter 
process jvm mem options. Defualt = ZEPPELIN_MEM\n# export 
ZEPPELIN_INTP_MEM\n\n# zeppelin interpreter process jvm options. Default = 
ZEPPELIN_JA
 VA_OPTS\n# export ZEPPELIN_INTP_JAVA_OPTS\n\n# Where notebook saved\n# export 
ZEPPELIN_NOTEBOOK_DIR\n\n# Id of notebook to be displayed in homescreen. ex) 
2A94M5J1Z\n# export ZEPPELIN_NOTEBOOK_HOMESCREEN\n\n# hide homescreen notebook 
from list when this value set to \"true\". default \"false\"\n# export 
ZEPPELIN_NOTEBOOK_HOMESCREEN_HIDE\n\n# Bucket where notebook saved\n# export 
ZEPPELIN_NOTEBOOK_S3_BUCKET\n\n# User in bucket where notebook saved. For 
example bucket/user/notebook/2A94M5J1Z/note.json\n# export 
ZEPPELIN_NOTEBOOK_S3_USER\n\n# A string representing this instance of zeppelin. 
$USER by default\n# export ZEPPELIN_IDENT_STRING\n\n# The scheduling priority 
for daemons. Defaults to 0.\n# export ZEPPELIN_NICENESS\n\n\n#### Spark 
interpreter configuration ####\n\n## Use provided spark installation ##\n## 
defining SPARK_HOME makes Zeppelin run spark interpreter process using 
spark-submit\n##\n# (required) When it is defined, load it instead of Zeppelin 
embedded Spark libraries\n
 export SPARK_HOME={{spark_home}}\n\n# (optional) extra options to pass to 
spark submit. eg) \"--driver-memory 512M --executor-memory 1G\".\n# export 
SPARK_SUBMIT_OPTIONS\n\n## Use embedded spark binaries ##\n## without 
SPARK_HOME defined, Zeppelin still able to run spark interpreter process using 
embedded spark binaries.\n## however, it is not encouraged when you can define 
SPARK_HOME\n##\n# Options read in YARN client mode\n# yarn-site.xml is located 
in configuration directory in HADOOP_CONF_DIR.\nexport 
HADOOP_CONF_DIR=/etc/hadoop/conf\n\n# Pyspark (supported with Spark 1.2.1 and 
above)\n# To configure pyspark, you need to set spark distribution's path to 
'spark.home' property in Interpreter setting screen in Zeppelin GUI\n# path to 
the python command. must be the same path on the driver(Zeppelin) and all 
workers.\n# export PYSPARK_PYTHON\n\nexport 
PYTHONPATH=\"${SPARK_HOME}/python:${SPARK_HOME}/python/lib/py4j-0.8.2.1-src.zip\"\nexport
 SPARK_YARN_USER_ENV=\"PYTHONPATH=${PYTHONPAT
 H}\"\n\n## Spark interpreter options ##\n##\n# Use HiveContext instead of 
SQLContext if set true. true by default.\n# export 
ZEPPELIN_SPARK_USEHIVECONTEXT\n\n# Execute multiple SQL concurrently if set 
true. false by default.\n# export ZEPPELIN_SPARK_CONCURRENTSQL\n\n# Max number 
of SparkSQL result to display. 1000 by default.\n# export 
ZEPPELIN_SPARK_MAXRESULT", 
-      "zeppelin_log_dir": "/var/log/zeppelin", 
+      "zeppelin.server.kerberos.keytab": "",
+      "zeppelin.spark.jar.dir": "/apps/zeppelin",
+      "zeppelin.executor.mem": "512m",
+      "zeppelin_pid_dir": "/var/run/zeppelin",
+      "zeppelin.executor.instances": "2",
+      "zeppelin.server.kerberos.principal": "",
+      "zeppelin_user": "zeppelin",
+      "zeppelin_env_content": "\n# Spark master url. eg. 
spark://master_addr:7077. Leave empty if you want to use local mode\nexport 
MASTER=yarn-client\nexport SPARK_YARN_JAR={{spark_jar}}\n\n\n# Where log files 
are stored.  PWD by default.\nexport ZEPPELIN_LOG_DIR={{zeppelin_log_dir}}\n\n# 
The pid files are stored. /tmp by default.\nexport 
ZEPPELIN_PID_DIR={{zeppelin_pid_dir}}\n\n\nexport 
JAVA_HOME={{java64_home}}\n\n# Additional jvm options. for example, export 
ZEPPELIN_JAVA_OPTS=\"-Dspark.executor.memory=8g -Dspark.cores.max=16\"\nexport 
ZEPPELIN_JAVA_OPTS=\"-Dhdp.version={{full_stack_version}} 
-Dspark.executor.memory={{executor_mem}} 
-Dspark.executor.instances={{executor_instances}} 
-Dspark.yarn.queue={{spark_queue}}\"\n\n\n# Zeppelin jvm mem options Default 
-Xmx1024m -XX:MaxPermSize=512m\n# export ZEPPELIN_MEM\n\n# zeppelin interpreter 
process jvm mem options. Defualt = ZEPPELIN_MEM\n# export 
ZEPPELIN_INTP_MEM\n\n# zeppelin interpreter process jvm options. Default = 
ZEPPELIN_JA
 VA_OPTS\n# export ZEPPELIN_INTP_JAVA_OPTS\n\n# Where notebook saved\n# export 
ZEPPELIN_NOTEBOOK_DIR\n\n# Id of notebook to be displayed in homescreen. ex) 
2A94M5J1Z\n# export ZEPPELIN_NOTEBOOK_HOMESCREEN\n\n# hide homescreen notebook 
from list when this value set to \"true\". default \"false\"\n# export 
ZEPPELIN_NOTEBOOK_HOMESCREEN_HIDE\n\n# Bucket where notebook saved\n# export 
ZEPPELIN_NOTEBOOK_S3_BUCKET\n\n# User in bucket where notebook saved. For 
example bucket/user/notebook/2A94M5J1Z/note.json\n# export 
ZEPPELIN_NOTEBOOK_S3_USER\n\n# A string representing this instance of zeppelin. 
$USER by default\n# export ZEPPELIN_IDENT_STRING\n\n# The scheduling priority 
for daemons. Defaults to 0.\n# export ZEPPELIN_NICENESS\n\n\n#### Spark 
interpreter configuration ####\n\n## Use provided spark installation ##\n## 
defining SPARK_HOME makes Zeppelin run spark interpreter process using 
spark-submit\n##\n# (required) When it is defined, load it instead of Zeppelin 
embedded Spark libraries\n
 export SPARK_HOME={{spark_home}}\n\n# (optional) extra options to pass to 
spark submit. eg) \"--driver-memory 512M --executor-memory 1G\".\n# export 
SPARK_SUBMIT_OPTIONS\n\n## Use embedded spark binaries ##\n## without 
SPARK_HOME defined, Zeppelin still able to run spark interpreter process using 
embedded spark binaries.\n## however, it is not encouraged when you can define 
SPARK_HOME\n##\n# Options read in YARN client mode\n# yarn-site.xml is located 
in configuration directory in HADOOP_CONF_DIR.\nexport 
HADOOP_CONF_DIR=/etc/hadoop/conf\n\n# Pyspark (supported with Spark 1.2.1 and 
above)\n# To configure pyspark, you need to set spark distribution's path to 
'spark.home' property in Interpreter setting screen in Zeppelin GUI\n# path to 
the python command. must be the same path on the driver(Zeppelin) and all 
workers.\n# export PYSPARK_PYTHON\n\nexport 
PYTHONPATH=\"${SPARK_HOME}/python:${SPARK_HOME}/python/lib/py4j-0.8.2.1-src.zip\"\nexport
 SPARK_YARN_USER_ENV=\"PYTHONPATH=${PYTHONPAT
 H}\"\n\n## Spark interpreter options ##\n##\n# Use HiveContext instead of 
SQLContext if set true. true by default.\n# export 
ZEPPELIN_SPARK_USEHIVECONTEXT\n\n# Execute multiple SQL concurrently if set 
true. false by default.\n# export ZEPPELIN_SPARK_CONCURRENTSQL\n\n# Max number 
of SparkSQL result to display. 1000 by default.\n# export 
ZEPPELIN_SPARK_MAXRESULT",
+      "zeppelin_log_dir": "/var/log/zeppelin",
       "zeppelin_group": "zeppelin"
     },
-"zeppelin-config": {
-            "zeppelin.server.port": "9995",
-            "zeppelin.server.ssl.port": "9995",
-            "zeppelin.ssl.truststore.password": "change me", 
-            "zeppelin.interpreters": 
"org.apache.zeppelin.spark.SparkInterpreter,org.apache.zeppelin.spark.PySparkInterpreter,org.apache.zeppelin.spark.SparkSqlInterpreter,org.apache.zeppelin.spark.DepInterpreter,org.apache.zeppelin.markdown.Markdown,org.apache.zeppelin.angular.AngularInterpreter,org.apache.zeppelin.shell.ShellInterpreter,org.apache.zeppelin.jdbc.JDBCInterpreter,org.apache.zeppelin.phoenix.PhoenixInterpreter,org.apache.zeppelin.livy.LivySparkInterpreter,org.apache.zeppelin.livy.LivyPySparkInterpreter,org.apache.zeppelin.livy.LivySparkRInterpreter,org.apache.zeppelin.livy.LivySparkSQLInterpreter",
-            "zeppelin.interpreter.group.order": 
"spark,angular,jdbc,livy,md,sh",
-            "zeppelin.ssl.truststore.path": "conf/truststore", 
-            "zeppelin.notebook.dir": "notebook", 
-            "zeppelin.ssl.keystore.password": "change me", 
-            "zeppelin.ssl.keystore.path": "conf/keystore", 
-            "zeppelin.server.addr": "0.0.0.0", 
-            "zeppelin.ssl.client.auth": "false", 
-            "zeppelin.notebook.homescreen": " ", 
-            "zeppelin.interpreter.dir": "interpreter", 
-            "zeppelin.ssl.keystore.type": "JKS", 
-            "zeppelin.notebook.s3.user": "user", 
-            "zeppelin.ssl.key.manager.password": "change me", 
-            "zeppelin.anonymous.allowed": "true", 
-            "zeppelin.ssl.truststore.type": "JKS", 
-            "zeppelin.ssl": "false", 
-            "zeppelin.notebook.storage": 
"org.apache.zeppelin.notebook.repo.VFSNotebookRepo", 
-            "zeppelin.websocket.max.text.message.size": "1024000", 
-            "zeppelin.interpreter.connect.timeout": "30000", 
-            "zeppelin.notebook.s3.bucket": "zeppelin", 
-            "zeppelin.notebook.homescreen.hide": "false", 
-            "zeppelin.server.allowed.origins": "*"
-        },
+    "zeppelin-shiro-ini": {
+      "shiro_ini_content": "\n[users]\n# List of users with their password 
allowed to access Zeppelin.\n# To use a different strategy (LDAP / Database / 
...) check the shiro doc at 
http://shiro.apache.org/configuration.html#Configuration-INISections\n#admin = 
password1\n#user1 = password2, role1, role2\n#user2 = password3, role3\n#user3 
= password4, role2\n\n# Sample LDAP configuration, for user Authentication, 
currently tested for single Realm\n[main]\n#ldapRealm = 
org.apache.shiro.realm.ldap.JndiLdapRealm\n#ldapRealm.userDnTemplate = 
uid={0},cn=users,cn=accounts,dc=hortonworks,dc=com\n#ldapRealm.contextFactory.url
 = ldap://ldaphost:389\n#ldapRealm.contextFactory.authenticationMechanism = 
SIMPLE\n#sessionManager = 
org.apache.shiro.web.session.mgt.DefaultWebSessionManager\n#securityManager.sessionManager
 = $sessionManager\n# 86,400,000 milliseconds = 24 
hour\n#securityManager.sessionManager.globalSessionTimeout = 
86400000\nshiro.loginUrl = /api/login\n\n[urls]\n# anon means the acce
 ss is anonymous.\n# authcBasic means Basic Auth Security\n# To enfore 
security, comment the line below and uncomment the next one\n/api/version = 
anon\n/** = anon\n#/** = authc"
+    },
+    "zeppelin-log4j-properties": {
+      "log4j_properties_content": "log4j.rootLogger = INFO, dailyfile"
+    },
+    "zeppelin-config": {
+      "zeppelin.server.port": "9995",
+      "zeppelin.ssl.truststore.password": "change me",
+      "zeppelin.interpreters": 
"org.apache.zeppelin.spark.SparkInterpreter,org.apache.zeppelin.spark.PySparkInterpreter,org.apache.zeppelin.spark.SparkSqlInterpreter,org.apache.zeppelin.spark.DepInterpreter,org.apache.zeppelin.markdown.Markdown,org.apache.zeppelin.angular.AngularInterpreter,org.apache.zeppelin.shell.ShellInterpreter,org.apache.zeppelin.jdbc.JDBCInterpreter,org.apache.zeppelin.phoenix.PhoenixInterpreter,org.apache.zeppelin.livy.LivySparkInterpreter,org.apache.zeppelin.livy.LivyPySparkInterpreter,org.apache.zeppelin.livy.LivySparkRInterpreter,org.apache.zeppelin.livy.LivySparkSQLInterpreter",
+      "zeppelin.ssl.truststore.path": "conf/truststore",
+      "zeppelin.notebook.dir": "notebook",
+      "zeppelin.ssl.keystore.password": "change me",
+      "zeppelin.ssl.keystore.path": "conf/keystore",
+      "zeppelin.server.addr": "0.0.0.0",
+      "zeppelin.ssl.client.auth": "false",
+      "zeppelin.notebook.homescreen": " ",
+      "zeppelin.interpreter.dir": "interpreter",
+      "zeppelin.ssl.keystore.type": "JKS",
+      "zeppelin.notebook.s3.user": "user",
+      "zeppelin.ssl.key.manager.password": "change me",
+      "zeppelin.anonymous.allowed": "true",
+      "zeppelin.ssl.truststore.type": "JKS",
+      "zeppelin.ssl": "false",
+      "zeppelin.notebook.storage": 
"org.apache.zeppelin.notebook.repo.FileSystemNotebookRepo",
+      "zeppelin.config.fs.dir": "hdfs:///user/zeppelin/conf",
+      "zeppelin.websocket.max.text.message.size": "1024000",
+      "zeppelin.interpreter.connect.timeout": "30000",
+      "zeppelin.notebook.s3.bucket": "zeppelin",
+      "zeppelin.notebook.homescreen.hide": "false",
+      "zeppelin.server.allowed.origins": "*",
+      "zeppelin.interpreter.config.upgrade": "true"
+    },
     "zoo.cfg": {
       "clientPort": "2181"
     },

Reply via email to