Repository: ambari
Updated Branches:
  refs/heads/trunk 7f09e4c6a -> ac66085b3


AMBARI-17470 Refactor Ambari service def configurations for Zeppelin (r-kamath)

- Move shiro.ini and log4j.properties out of zeppelin-env configuration.
- Remove unused zeppelin-env fields
- fix unit tests


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ac66085b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ac66085b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ac66085b

Branch: refs/heads/trunk
Commit: ac66085b35b69264c6d161a00fc866b5e1a828da
Parents: 7f09e4c
Author: Renjith Kamath <[email protected]>
Authored: Wed Jan 11 07:13:40 2017 +0530
Committer: Renjith Kamath <[email protected]>
Committed: Wed Jan 11 07:13:40 2017 +0530

----------------------------------------------------------------------
 .../0.6.0.2.5/configuration/zeppelin-env.xml    |  87 ---------
 .../configuration/zeppelin-log4j-properties.xml |  37 ++++
 .../configuration/zeppelin-shiro-ini.xml        |  63 +++++++
 .../ZEPPELIN/0.6.0.2.5/metainfo.xml             |   2 +
 .../0.6.0.2.5/package/scripts/params.py         |   4 +-
 .../ZEPPELIN/configuration/zeppelin-env.xml     | 182 +++++++++++++++++++
 .../stacks/HDP/2.6/upgrades/config-upgrade.xml  |  34 ++++
 .../stacks/2.5/ZEPPELIN/test_zeppelin_master.py |  16 +-
 .../test/python/stacks/2.5/configs/default.json |  26 +--
 .../test/python/stacks/2.5/configs/secured.json |  18 +-
 10 files changed, 354 insertions(+), 115 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/ac66085b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-env.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-env.xml
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-env.xml
index 2beac97..e6c59d5 100644
--- 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-env.xml
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-env.xml
@@ -137,93 +137,6 @@ export SPARK_YARN_USER_ENV="PYTHONPATH=${PYTHONPATH}"
     <on-ambari-upgrade add="true"/>
   </property>
   <property>
-    <name>shiro_ini_content</name>
-    <description>This is the jinja template for shiro.ini file</description>
-    <value>
-[users]
-# List of users with their password allowed to access Zeppelin.
-# To use a different strategy (LDAP / Database / ...) check the shiro doc at 
http://shiro.apache.org/configuration.html#Configuration-INISections
-#admin = password1, admin
-#user1 = password2, role1, role2
-#user2 = password3, role3
-#user3 = password4, role2
-
-# Sample LDAP configuration, for user Authentication, currently tested for 
single Realm
-[main]
-### A sample for configuring Active Directory Realm
-#activeDirectoryRealm = org.apache.zeppelin.realm.ActiveDirectoryGroupRealm
-#activeDirectoryRealm.systemUsername = userNameA
-
-#use either systemPassword or hadoopSecurityCredentialPath, more details in 
http://zeppelin.apache.org/docs/latest/security/shiroauthentication.html
-#activeDirectoryRealm.systemPassword = passwordA
-#activeDirectoryRealm.hadoopSecurityCredentialPath = 
jceks://file/user/zeppelin/zeppelin.jceks
-#activeDirectoryRealm.searchBase = CN=Users,DC=SOME_GROUP,DC=COMPANY,DC=COM
-#activeDirectoryRealm.url = ldap://ldap.test.com:389
-#activeDirectoryRealm.groupRolesMap = 
"CN=admin,OU=groups,DC=SOME_GROUP,DC=COMPANY,DC=COM":"admin","CN=finance,OU=groups,DC=SOME_GROUP,DC=COMPANY,DC=COM":"finance","CN=hr,OU=groups,DC=SOME_GROUP,DC=COMPANY,DC=COM":"hr"
-#activeDirectoryRealm.authorizationCachingEnabled = false
-
-### A sample for configuring LDAP Directory Realm
-#ldapRealm = org.apache.zeppelin.realm.LdapGroupRealm
-## search base for ldap groups (only relevant for LdapGroupRealm):
-#ldapRealm.contextFactory.environment[ldap.searchBase] = dc=COMPANY,dc=COM
-#ldapRealm.contextFactory.url = ldap://ldap.test.com:389
-#ldapRealm.userDnTemplate = uid={0},ou=Users,dc=COMPANY,dc=COM
-#ldapRealm.contextFactory.authenticationMechanism = SIMPLE
-
-### A sample PAM configuration
-#pamRealm=org.apache.zeppelin.realm.PamRealm
-#pamRealm.service=sshd
-
-sessionManager = org.apache.shiro.web.session.mgt.DefaultWebSessionManager
-
-### If caching of user is required then uncomment below lines
-#cacheManager = org.apache.shiro.cache.MemoryConstrainedCacheManager
-#securityManager.cacheManager = $cacheManager
-
-securityManager.sessionManager = $sessionManager
-# 86,400,000 milliseconds = 24 hour
-securityManager.sessionManager.globalSessionTimeout = 86400000
-shiro.loginUrl = /api/login
-
-[roles]
-#role1 = *
-#role2 = *
-#role3 = *
-#admin = *
-
-[urls]
-# This section is used for url-based security.
-# You can secure interpreter, configuration and credential information by 
urls. Comment or uncomment the below urls that you want to hide.
-# anon means the access is anonymous.
-# authc means Form based Auth Security
-# To enfore security, comment the line below and uncomment the next one
-/api/version = anon
-#/api/interpreter/** = authc, roles[admin]
-#/api/configurations/** = authc, roles[admin]
-#/api/credential/** = authc, roles[admin]
-/** = anon
-#/** = authc
-    </value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>log4j_properties_content</name>
-    <description>This is the content for log4j.properties file</description>
-    <value>
-log4j.rootLogger = INFO, dailyfile
-log4j.appender.stdout = org.apache.log4j.ConsoleAppender
-log4j.appender.stdout.layout = org.apache.log4j.PatternLayout
-log4j.appender.stdout.layout.ConversionPattern=%5p [%d] ({%t} %F[%M]:%L) - %m%n
-log4j.appender.dailyfile.DatePattern=.yyyy-MM-dd
-log4j.appender.dailyfile.Threshold = INFO
-log4j.appender.dailyfile = org.apache.log4j.DailyRollingFileAppender
-log4j.appender.dailyfile.File = ${zeppelin.log.file}
-log4j.appender.dailyfile.layout = org.apache.log4j.PatternLayout
-log4j.appender.dailyfile.layout.ConversionPattern=%5p [%d] ({%t} %F[%M]:%L) - 
%m%n
-  </value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
     <name>zeppelin.executor.mem</name>
     <value>512m</value>
     <description>Executor memory to use (e.g. 512m or 1g)</description>

http://git-wip-us.apache.org/repos/asf/ambari/blob/ac66085b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-log4j-properties.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-log4j-properties.xml
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-log4j-properties.xml
new file mode 100644
index 0000000..bf50947
--- /dev/null
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-log4j-properties.xml
@@ -0,0 +1,37 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<configuration>
+<property>
+    <name>log4j_properties_content</name>
+    <description>This is the content for log4j.properties file</description>
+    <value>
+log4j.rootLogger = INFO, dailyfile
+log4j.appender.stdout = org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.layout = org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=%5p [%d] ({%t} %F[%M]:%L) - %m%n
+log4j.appender.dailyfile.DatePattern=.yyyy-MM-dd
+log4j.appender.dailyfile.Threshold = INFO
+log4j.appender.dailyfile = org.apache.log4j.DailyRollingFileAppender
+log4j.appender.dailyfile.File = ${zeppelin.log.file}
+log4j.appender.dailyfile.layout = org.apache.log4j.PatternLayout
+log4j.appender.dailyfile.layout.ConversionPattern=%5p [%d] ({%t} %F[%M]:%L) - 
%m%n
+    </value>
+    <on-ambari-upgrade add="true"/>
+</property>
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/ac66085b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-shiro-ini.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-shiro-ini.xml
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-shiro-ini.xml
new file mode 100644
index 0000000..3e2da1e
--- /dev/null
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-shiro-ini.xml
@@ -0,0 +1,63 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<configuration>
+  <property>
+    <name>shiro_ini_content</name>
+    <description>This is the jinja template for shiro.ini file</description>
+      <value>
+[users]
+# List of users with their password allowed to access Zeppelin.
+# To use a different strategy (LDAP / Database / ...) check the shiro doc at 
http://shiro.apache.org/configuration.html#Configuration-INISections
+#admin = password1
+#user1 = password2, role1, role2
+#user2 = password3, role3
+#user3 = password4, role2
+
+# Sample LDAP configuration, for user Authentication, currently tested for 
single Realm
+[main]
+#activeDirectoryRealm = org.apache.zeppelin.server.ActiveDirectoryGroupRealm
+#activeDirectoryRealm.systemUsername = 
CN=Administrator,CN=Users,DC=HW,DC=EXAMPLE,DC=COM
+#activeDirectoryRealm.systemPassword = Password1!
+#activeDirectoryRealm.hadoopSecurityCredentialPath = 
jceks://user/zeppelin/zeppelin.jceks
+#activeDirectoryRealm.searchBase = CN=Users,DC=HW,DC=TEST,DC=COM
+#activeDirectoryRealm.url = ldap://ad-nano.test.example.com:389
+#activeDirectoryRealm.groupRolesMap = ""
+#activeDirectoryRealm.authorizationCachingEnabled = true
+
+#ldapRealm = org.apache.shiro.realm.ldap.JndiLdapRealm
+#ldapRealm.userDnTemplate = uid={0},cn=users,cn=accounts,dc=example,dc=com
+#ldapRealm.contextFactory.url = ldap://ldaphost:389
+#ldapRealm.contextFactory.authenticationMechanism = SIMPLE
+#sessionManager = org.apache.shiro.web.session.mgt.DefaultWebSessionManager
+#securityManager.sessionManager = $sessionManager
+# 86,400,000 milliseconds = 24 hour
+#securityManager.sessionManager.globalSessionTimeout = 86400000
+shiro.loginUrl = /api/login
+
+[urls]
+# anon means the access is anonymous.
+# authcBasic means Basic Auth Security
+# To enfore security, comment the line below and uncomment the next one
+/api/version = anon
+/** = anon
+#/** = authc
+      </value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/ac66085b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/metainfo.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/metainfo.xml
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/metainfo.xml
index 9a0ba5f..78fbb1b 100644
--- 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/metainfo.xml
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/metainfo.xml
@@ -83,6 +83,8 @@ limitations under the License.
       <configuration-dependencies>
         <config-type>zeppelin-config</config-type>
         <config-type>zeppelin-env</config-type>
+        <config-type>zeppelin-shiro-ini</config-type>
+        <config-type>zeppelin-log4j-properties</config-type>
       </configuration-dependencies>
       <restartRequiredAfterChange>true</restartRequiredAfterChange>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/ac66085b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/params.py
index 5ee6147..97e93fe 100644
--- 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/params.py
@@ -101,10 +101,10 @@ notebook_dir = os.path.join(*[install_dir, 
zeppelin_dirname, 'notebook'])
 zeppelin_env_content = 
config['configurations']['zeppelin-env']['zeppelin_env_content']
 
 # shiro.ini
-shiro_ini_content = 
config['configurations']['zeppelin-env']['shiro_ini_content']
+shiro_ini_content = 
config['configurations']['zeppelin-shiro-ini']['shiro_ini_content']
 
 # log4j.properties
-log4j_properties_content = 
config['configurations']['zeppelin-env']['log4j_properties_content']
+log4j_properties_content = 
config['configurations']['zeppelin-log4j-properties']['log4j_properties_content']
 
 # detect configs
 master_configs = config['clusterHostInfo']

http://git-wip-us.apache.org/repos/asf/ambari/blob/ac66085b/ambari-server/src/main/resources/stacks/HDP/2.6/services/ZEPPELIN/configuration/zeppelin-env.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.6/services/ZEPPELIN/configuration/zeppelin-env.xml
 
b/ambari-server/src/main/resources/stacks/HDP/2.6/services/ZEPPELIN/configuration/zeppelin-env.xml
new file mode 100644
index 0000000..1f65fac
--- /dev/null
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.6/services/ZEPPELIN/configuration/zeppelin-env.xml
@@ -0,0 +1,182 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<configuration>
+  <property>
+    <name>zeppelin_pid_dir</name>
+    <value>/var/run/zeppelin</value>
+    <description>Dir containing process ID file</description>
+    <value-attributes>
+      <type>directory</type>
+      <overridable>false</overridable>
+      <editable-only-at-install>true</editable-only-at-install>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin_user</name>
+    <value>zeppelin</value>
+    <property-type>USER</property-type>
+    <description>User zeppelin daemon runs as</description>
+    <value-attributes>
+      <type>user</type>
+      <overridable>false</overridable>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin_group</name>
+    <value>zeppelin</value>
+    <property-type>GROUP</property-type>
+    <description>zeppelin group</description>
+    <value-attributes>
+      <type>user</type>
+      <overridable>false</overridable>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin_log_dir</name>
+    <value>/var/log/zeppelin</value>
+    <description>Zeppelin Log dir</description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin_env_content</name>
+    <description>This is the jinja template for zeppelin-env.sh 
file</description>
+    <value>
+# export JAVA_HOME=
+export JAVA_HOME={{java64_home}}
+# export MASTER=                              # Spark master url. eg. 
spark://master_addr:7077. Leave empty if you want to use local mode.
+export MASTER=yarn-client
+export SPARK_YARN_JAR={{spark_jar}}
+# export ZEPPELIN_JAVA_OPTS                   # Additional jvm options. for 
example, export ZEPPELIN_JAVA_OPTS="-Dspark.executor.memory=8g 
-Dspark.cores.max=16"
+export ZEPPELIN_JAVA_OPTS="-Dhdp.version={{full_stack_version}} 
-Dspark.executor.memory={{executor_mem}} 
-Dspark.executor.instances={{executor_instances}} 
-Dspark.yarn.queue={{spark_queue}}"
+# export ZEPPELIN_MEM                         # Zeppelin jvm mem options 
Default -Xms1024m -Xmx1024m -XX:MaxPermSize=512m
+# export ZEPPELIN_INTP_MEM                    # zeppelin interpreter process 
jvm mem options. Default -Xms1024m -Xmx1024m -XX:MaxPermSize=512m
+# export ZEPPELIN_INTP_JAVA_OPTS              # zeppelin interpreter process 
jvm options.
+# export ZEPPELIN_SSL_PORT                    # ssl port (used when ssl 
environment variable is set to true)
+
+# export ZEPPELIN_LOG_DIR                     # Where log files are stored.  
PWD by default.
+export ZEPPELIN_LOG_DIR={{zeppelin_log_dir}}
+# export ZEPPELIN_PID_DIR                     # The pid files are stored. 
${ZEPPELIN_HOME}/run by default.
+export ZEPPELIN_PID_DIR={{zeppelin_pid_dir}}
+# export ZEPPELIN_WAR_TEMPDIR                 # The location of jetty 
temporary directory.
+# export ZEPPELIN_NOTEBOOK_DIR                # Where notebook saved
+# export ZEPPELIN_NOTEBOOK_HOMESCREEN         # Id of notebook to be displayed 
in homescreen. ex) 2A94M5J1Z
+# export ZEPPELIN_NOTEBOOK_HOMESCREEN_HIDE    # hide homescreen notebook from 
list when this value set to "true". default "false"
+# export ZEPPELIN_NOTEBOOK_S3_BUCKET          # Bucket where notebook saved
+# export ZEPPELIN_NOTEBOOK_S3_ENDPOINT        # Endpoint of the bucket
+# export ZEPPELIN_NOTEBOOK_S3_USER            # User in bucket where notebook 
saved. For example bucket/user/notebook/2A94M5J1Z/note.json
+# export ZEPPELIN_IDENT_STRING                # A string representing this 
instance of zeppelin. $USER by default.
+# export ZEPPELIN_NICENESS                    # The scheduling priority for 
daemons. Defaults to 0.
+# export ZEPPELIN_INTERPRETER_LOCALREPO       # Local repository for 
interpreter's additional dependency loading
+# export ZEPPELIN_NOTEBOOK_STORAGE            # Refers to pluggable notebook 
storage class, can have two classes simultaneously with a sync between them 
(e.g. local and remote).
+# export ZEPPELIN_NOTEBOOK_ONE_WAY_SYNC       # If there are multiple notebook 
storages, should we treat the first one as the only source of truth?
+# export ZEPPELIN_NOTEBOOK_PUBLIC             # Make notebook public by 
default when created, private otherwise
+
+#### Spark interpreter configuration ####
+
+## Use provided spark installation ##
+## defining SPARK_HOME makes Zeppelin run spark interpreter process using 
spark-submit
+##
+# export SPARK_HOME                           # (required) When it is defined, 
load it instead of Zeppelin embedded Spark libraries
+#export SPARK_HOME={{spark_home}}
+# export SPARK_SUBMIT_OPTIONS                 # (optional) extra options to 
pass to spark submit. eg) "--driver-memory 512M --executor-memory 1G".
+# export SPARK_APP_NAME                       # (optional) The name of spark 
application.
+
+## Use embedded spark binaries ##
+## without SPARK_HOME defined, Zeppelin still able to run spark interpreter 
process using embedded spark binaries.
+## however, it is not encouraged when you can define SPARK_HOME
+##
+# Options read in YARN client mode
+# export HADOOP_CONF_DIR                      # yarn-site.xml is located in 
configuration directory in HADOOP_CONF_DIR.
+export HADOOP_CONF_DIR=/etc/hadoop/conf
+# Pyspark (supported with Spark 1.2.1 and above)
+# To configure pyspark, you need to set spark distribution's path to 
'spark.home' property in Interpreter setting screen in Zeppelin GUI
+# export PYSPARK_PYTHON                       # path to the python command. 
must be the same path on the driver(Zeppelin) and all workers.
+# export PYTHONPATH
+
+export 
PYTHONPATH="${SPARK_HOME}/python:${SPARK_HOME}/python/lib/py4j-0.8.2.1-src.zip"
+export SPARK_YARN_USER_ENV="PYTHONPATH=${PYTHONPATH}"
+
+## Spark interpreter options ##
+##
+# export ZEPPELIN_SPARK_USEHIVECONTEXT        # Use HiveContext instead of 
SQLContext if set true. true by default.
+# export ZEPPELIN_SPARK_CONCURRENTSQL         # Execute multiple SQL 
concurrently if set true. false by default.
+# export ZEPPELIN_SPARK_IMPORTIMPLICIT        # Import implicits, UDF 
collection, and sql if set true. true by default.
+# export ZEPPELIN_SPARK_MAXRESULT             # Max number of Spark SQL result 
to display. 1000 by default.
+# export ZEPPELIN_WEBSOCKET_MAX_TEXT_MESSAGE_SIZE       # Size in characters 
of the maximum text message to be received by websocket. Defaults to 1024000
+
+
+#### HBase interpreter configuration ####
+
+## To connect to HBase running on a cluster, either HBASE_HOME or 
HBASE_CONF_DIR must be set
+
+# export HBASE_HOME=                          # (require) Under which HBase 
scripts and configuration should be
+# export HBASE_CONF_DIR=                      # (optional) Alternatively, 
configuration directory can be set to point to the directory that has 
hbase-site.xml
+
+# export ZEPPELIN_IMPERSONATE_CMD             # Optional, when user want to 
run interpreter as end web user. eg) 'sudo -H -u ${ZEPPELIN_IMPERSONATE_USER} 
bash -c '
+
+    </value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin.executor.mem</name>
+    <value>512m</value>
+    <description>Executor memory to use (e.g. 512m or 1g)</description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin.executor.instances</name>
+    <value>2</value>
+    <description>Number of executor instances to use (e.g. 2)</description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin.spark.jar.dir</name>
+    <value>/apps/zeppelin</value>
+    <description>Shared location where zeppelin spark jar will be copied to. 
Should be accesible
+      by all cluster nodes
+    </description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+
+  <property>
+    <name>zeppelin.server.kerberos.principal</name>
+    <value/>
+    <value-attributes>
+      <empty-value-valid>true</empty-value-valid>
+    </value-attributes>
+    <description>
+      Kerberos principal name for the Zeppelin.
+    </description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin.server.kerberos.keytab</name>
+    <value/>
+    <value-attributes>
+      <empty-value-valid>true</empty-value-valid>
+    </value-attributes>
+    <description>
+      Location of the kerberos keytab file for the Zeppelin.
+    </description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/ac66085b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/config-upgrade.xml 
b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/config-upgrade.xml
new file mode 100644
index 0000000..0d71244
--- /dev/null
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/config-upgrade.xml
@@ -0,0 +1,34 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<upgrade-config-changes xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"; 
xsi:noNamespaceSchemaLocation="upgrade-config.xsd">
+  <services>
+    <service name="ZEPPELIN">
+      <component name="ZEPPELIN_MASTER">
+        <changes>
+          <definition xsi:type="configure" id="hdp_2_6_0_0_zeppelin_master">
+            <type>zeppelin-env</type>
+            <transfer operation="delete" delete-key="shiro_ini_content" />
+            <transfer operation="delete" delete-key="log4j_properties_content" 
/>
+          </definition>
+        </changes>
+      </component>
+    </service>
+
+  </services>
+</upgrade-config-changes>

http://git-wip-us.apache.org/repos/asf/ambari/blob/ac66085b/ambari-server/src/test/python/stacks/2.5/ZEPPELIN/test_zeppelin_master.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.5/ZEPPELIN/test_zeppelin_master.py 
b/ambari-server/src/test/python/stacks/2.5/ZEPPELIN/test_zeppelin_master.py
index 9a3555b..d046dcd 100644
--- a/ambari-server/src/test/python/stacks/2.5/ZEPPELIN/test_zeppelin_master.py
+++ b/ambari-server/src/test/python/stacks/2.5/ZEPPELIN/test_zeppelin_master.py
@@ -67,13 +67,13 @@ class TestZeppelinMaster(RMFTestCase):
     )
     self.assertResourceCalled('File', '/etc/zeppelin/conf/shiro.ini',
         owner = 'zeppelin',
-        content = 
InlineTemplate(self.getConfig()['configurations']['zeppelin-env']['shiro_ini_content']),
+        content = 
InlineTemplate(self.getConfig()['configurations']['zeppelin-shiro-ini']['shiro_ini_content']),
         group = 'zeppelin',
     )
     self.assertResourceCalled('File', '/etc/zeppelin/conf/log4j.properties',
-        owner = 'zeppelin',
-        content = '\nlog4j.rootLogger = INFO, dailyfile\nlog4j.appender.stdout 
= org.apache.log4j.ConsoleAppender\nlog4j.appender.stdout.layout = 
org.apache.log4j.PatternLayout\nlog4j.appender.stdout.layout.ConversionPattern=%5p
 [%d] ({%t} %F[%M]:%L) - 
%m%n\nlog4j.appender.dailyfile.DatePattern=.yyyy-MM-dd\nlog4j.appender.dailyfile.Threshold
 = INFO\nlog4j.appender.dailyfile = 
org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.dailyfile.File = 
${zeppelin.log.file}\nlog4j.appender.dailyfile.layout = 
org.apache.log4j.PatternLayout\nlog4j.appender.dailyfile.layout.ConversionPattern=%5p
 [%d] ({%t} %F[%M]:%L) - %m%n',
-        group = 'zeppelin',
+        owner = u'zeppelin',
+        content = u'log4j.rootLogger = INFO, dailyfile',
+        group = u'zeppelin',
     )
     self.assertResourceCalled('File', '/etc/zeppelin/conf/hive-site.xml',
         owner = 'zeppelin',
@@ -117,13 +117,13 @@ class TestZeppelinMaster(RMFTestCase):
     )
     self.assertResourceCalled('File', '/etc/zeppelin/conf/shiro.ini',
         owner = 'zeppelin',
-        content = 
InlineTemplate(self.getConfig()['configurations']['zeppelin-env']['shiro_ini_content']),
+        content = 
InlineTemplate(self.getConfig()['configurations']['zeppelin-shiro-ini']['shiro_ini_content']),
         group = 'zeppelin',
     )
     self.assertResourceCalled('File', '/etc/zeppelin/conf/log4j.properties',
-        owner = 'zeppelin',
-        content = '\nlog4j.rootLogger = INFO, dailyfile\nlog4j.appender.stdout 
= org.apache.log4j.ConsoleAppender\nlog4j.appender.stdout.layout = 
org.apache.log4j.PatternLayout\nlog4j.appender.stdout.layout.ConversionPattern=%5p
 [%d] ({%t} %F[%M]:%L) - 
%m%n\nlog4j.appender.dailyfile.DatePattern=.yyyy-MM-dd\nlog4j.appender.dailyfile.Threshold
 = INFO\nlog4j.appender.dailyfile = 
org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.dailyfile.File = 
${zeppelin.log.file}\nlog4j.appender.dailyfile.layout = 
org.apache.log4j.PatternLayout\nlog4j.appender.dailyfile.layout.ConversionPattern=%5p
 [%d] ({%t} %F[%M]:%L) - %m%n',
-        group = 'zeppelin',
+        owner = u'zeppelin',
+        content = u'log4j.rootLogger = INFO, dailyfile',
+        group = u'zeppelin',
     )
     self.assertResourceCalled('File', '/etc/zeppelin/conf/hive-site.xml',
         owner = 'zeppelin',

http://git-wip-us.apache.org/repos/asf/ambari/blob/ac66085b/ambari-server/src/test/python/stacks/2.5/configs/default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/configs/default.json 
b/ambari-server/src/test/python/stacks/2.5/configs/default.json
index a7b2de2..bcc3607 100644
--- a/ambari-server/src/test/python/stacks/2.5/configs/default.json
+++ b/ambari-server/src/test/python/stacks/2.5/configs/default.json
@@ -290,19 +290,23 @@
       "content": "<property><name>content</name><description>Custom solrconfig 
properties</description><value></value></property>"
     },
     "zeppelin-env": {
-      "zeppelin.server.kerberos.keytab": "", 
-      "shiro_ini_content": "\n[users]\n# List of users with their password 
allowed to access Zeppelin.\n# To use a different strategy (LDAP / Database / 
...) check the shiro doc at 
http://shiro.apache.org/configuration.html#Configuration-INISections\n#admin = 
password1\n#user1 = password2, role1, role2\n#user2 = password3, role3\n#user3 
= password4, role2\n\n# Sample LDAP configuration, for user Authentication, 
currently tested for single Realm\n[main]\n#ldapRealm = 
org.apache.shiro.realm.ldap.JndiLdapRealm\n#ldapRealm.userDnTemplate = 
uid={0},cn=users,cn=accounts,dc=hortonworks,dc=com\n#ldapRealm.contextFactory.url
 = ldap://ldaphost:389\n#ldapRealm.contextFactory.authenticationMechanism = 
SIMPLE\n#sessionManager = 
org.apache.shiro.web.session.mgt.DefaultWebSessionManager\n#securityManager.sessionManager
 = $sessionManager\n# 86,400,000 milliseconds = 24 
hour\n#securityManager.sessionManager.globalSessionTimeout = 
86400000\nshiro.loginUrl = /api/login\n\n[urls]\n# anon means the acce
 ss is anonymous.\n# authcBasic means Basic Auth Security\n# To enfore 
security, comment the line below and uncomment the next one\n/api/version = 
anon\n/** = anon\n#/** = authc", 
-      "zeppelin.spark.jar.dir": "/apps/zeppelin", 
-      "zeppelin.executor.mem": "512m", 
-      "zeppelin_pid_dir": "/var/run/zeppelin", 
-      "zeppelin.executor.instances": "2", 
-      "log4j_properties_content": "\nlog4j.rootLogger = INFO, 
dailyfile\nlog4j.appender.stdout = 
org.apache.log4j.ConsoleAppender\nlog4j.appender.stdout.layout = 
org.apache.log4j.PatternLayout\nlog4j.appender.stdout.layout.ConversionPattern=%5p
 [%d] ({%t} %F[%M]:%L) - 
%m%n\nlog4j.appender.dailyfile.DatePattern=.yyyy-MM-dd\nlog4j.appender.dailyfile.Threshold
 = INFO\nlog4j.appender.dailyfile = 
org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.dailyfile.File = 
${zeppelin.log.file}\nlog4j.appender.dailyfile.layout = 
org.apache.log4j.PatternLayout\nlog4j.appender.dailyfile.layout.ConversionPattern=%5p
 [%d] ({%t} %F[%M]:%L) - %m%n", 
-      "zeppelin.server.kerberos.principal": "", 
-      "zeppelin_user": "zeppelin", 
-      "zeppelin_env_content": "\n# Spark master url. eg. 
spark://master_addr:7077. Leave empty if you want to use local mode\nexport 
MASTER=yarn-client\nexport SPARK_YARN_JAR={{spark_jar}}\n\n\n# Where log files 
are stored.  PWD by default.\nexport ZEPPELIN_LOG_DIR={{zeppelin_log_dir}}\n\n# 
The pid files are stored. /tmp by default.\nexport 
ZEPPELIN_PID_DIR={{zeppelin_pid_dir}}\n\n\nexport 
JAVA_HOME={{java64_home}}\n\n# Additional jvm options. for example, export 
ZEPPELIN_JAVA_OPTS=\"-Dspark.executor.memory=8g -Dspark.cores.max=16\"\nexport 
ZEPPELIN_JAVA_OPTS=\"-Dhdp.version={{full_stack_version}} 
-Dspark.executor.memory={{executor_mem}} 
-Dspark.executor.instances={{executor_instances}} 
-Dspark.yarn.queue={{spark_queue}}\"\n\n\n# Zeppelin jvm mem options Default 
-Xmx1024m -XX:MaxPermSize=512m\n# export ZEPPELIN_MEM\n\n# zeppelin interpreter 
process jvm mem options. Defualt = ZEPPELIN_MEM\n# export 
ZEPPELIN_INTP_MEM\n\n# zeppelin interpreter process jvm options. Default = 
ZEPPELIN_JA
 VA_OPTS\n# export ZEPPELIN_INTP_JAVA_OPTS\n\n# Where notebook saved\n# export 
ZEPPELIN_NOTEBOOK_DIR\n\n# Id of notebook to be displayed in homescreen. ex) 
2A94M5J1Z\n# export ZEPPELIN_NOTEBOOK_HOMESCREEN\n\n# hide homescreen notebook 
from list when this value set to \"true\". default \"false\"\n# export 
ZEPPELIN_NOTEBOOK_HOMESCREEN_HIDE\n\n# Bucket where notebook saved\n# export 
ZEPPELIN_NOTEBOOK_S3_BUCKET\n\n# User in bucket where notebook saved. For 
example bucket/user/notebook/2A94M5J1Z/note.json\n# export 
ZEPPELIN_NOTEBOOK_S3_USER\n\n# A string representing this instance of zeppelin. 
$USER by default\n# export ZEPPELIN_IDENT_STRING\n\n# The scheduling priority 
for daemons. Defaults to 0.\n# export ZEPPELIN_NICENESS\n\n\n#### Spark 
interpreter configuration ####\n\n## Use provided spark installation ##\n## 
defining SPARK_HOME makes Zeppelin run spark interpreter process using 
spark-submit\n##\n# (required) When it is defined, load it instead of Zeppelin 
embedded Spark libraries\n
 export SPARK_HOME={{spark_home}}\n\n# (optional) extra options to pass to 
spark submit. eg) \"--driver-memory 512M --executor-memory 1G\".\n# export 
SPARK_SUBMIT_OPTIONS\n\n## Use embedded spark binaries ##\n## without 
SPARK_HOME defined, Zeppelin still able to run spark interpreter process using 
embedded spark binaries.\n## however, it is not encouraged when you can define 
SPARK_HOME\n##\n# Options read in YARN client mode\n# yarn-site.xml is located 
in configuration directory in HADOOP_CONF_DIR.\nexport 
HADOOP_CONF_DIR=/etc/hadoop/conf\n\n# Pyspark (supported with Spark 1.2.1 and 
above)\n# To configure pyspark, you need to set spark distribution's path to 
'spark.home' property in Interpreter setting screen in Zeppelin GUI\n# path to 
the python command. must be the same path on the driver(Zeppelin) and all 
workers.\n# export PYSPARK_PYTHON\n\nexport 
PYTHONPATH=\"${SPARK_HOME}/python:${SPARK_HOME}/python/lib/py4j-0.8.2.1-src.zip\"\nexport
 SPARK_YARN_USER_ENV=\"PYTHONPATH=${PYTHONPAT
 H}\"\n\n## Spark interpreter options ##\n##\n# Use HiveContext instead of 
SQLContext if set true. true by default.\n# export 
ZEPPELIN_SPARK_USEHIVECONTEXT\n\n# Execute multiple SQL concurrently if set 
true. false by default.\n# export ZEPPELIN_SPARK_CONCURRENTSQL\n\n# Max number 
of SparkSQL result to display. 1000 by default.\n# export 
ZEPPELIN_SPARK_MAXRESULT", 
-      "zeppelin_log_dir": "/var/log/zeppelin", 
+      "zeppelin.server.kerberos.keytab": "",
+      "zeppelin.spark.jar.dir": "/apps/zeppelin",
+      "zeppelin.executor.mem": "512m",
+      "zeppelin_pid_dir": "/var/run/zeppelin",
+      "zeppelin.executor.instances": "2",
+      "zeppelin.server.kerberos.principal": "",
+      "zeppelin_user": "zeppelin",
+      "zeppelin_env_content": "\n# Spark master url. eg. 
spark://master_addr:7077. Leave empty if you want to use local mode\nexport 
MASTER=yarn-client\nexport SPARK_YARN_JAR={{spark_jar}}\n\n\n# Where log files 
are stored.  PWD by default.\nexport ZEPPELIN_LOG_DIR={{zeppelin_log_dir}}\n\n# 
The pid files are stored. /tmp by default.\nexport 
ZEPPELIN_PID_DIR={{zeppelin_pid_dir}}\n\n\nexport 
JAVA_HOME={{java64_home}}\n\n# Additional jvm options. for example, export 
ZEPPELIN_JAVA_OPTS=\"-Dspark.executor.memory=8g -Dspark.cores.max=16\"\nexport 
ZEPPELIN_JAVA_OPTS=\"-Dhdp.version={{full_stack_version}} 
-Dspark.executor.memory={{executor_mem}} 
-Dspark.executor.instances={{executor_instances}} 
-Dspark.yarn.queue={{spark_queue}}\"\n\n\n# Zeppelin jvm mem options Default 
-Xmx1024m -XX:MaxPermSize=512m\n# export ZEPPELIN_MEM\n\n# zeppelin interpreter 
process jvm mem options. Defualt = ZEPPELIN_MEM\n# export 
ZEPPELIN_INTP_MEM\n\n# zeppelin interpreter process jvm options. Default = 
ZEPPELIN_JA
 VA_OPTS\n# export ZEPPELIN_INTP_JAVA_OPTS\n\n# Where notebook saved\n# export 
ZEPPELIN_NOTEBOOK_DIR\n\n# Id of notebook to be displayed in homescreen. ex) 
2A94M5J1Z\n# export ZEPPELIN_NOTEBOOK_HOMESCREEN\n\n# hide homescreen notebook 
from list when this value set to \"true\". default \"false\"\n# export 
ZEPPELIN_NOTEBOOK_HOMESCREEN_HIDE\n\n# Bucket where notebook saved\n# export 
ZEPPELIN_NOTEBOOK_S3_BUCKET\n\n# User in bucket where notebook saved. For 
example bucket/user/notebook/2A94M5J1Z/note.json\n# export 
ZEPPELIN_NOTEBOOK_S3_USER\n\n# A string representing this instance of zeppelin. 
$USER by default\n# export ZEPPELIN_IDENT_STRING\n\n# The scheduling priority 
for daemons. Defaults to 0.\n# export ZEPPELIN_NICENESS\n\n\n#### Spark 
interpreter configuration ####\n\n## Use provided spark installation ##\n## 
defining SPARK_HOME makes Zeppelin run spark interpreter process using 
spark-submit\n##\n# (required) When it is defined, load it instead of Zeppelin 
embedded Spark libraries\n
 export SPARK_HOME={{spark_home}}\n\n# (optional) extra options to pass to 
spark submit. eg) \"--driver-memory 512M --executor-memory 1G\".\n# export 
SPARK_SUBMIT_OPTIONS\n\n## Use embedded spark binaries ##\n## without 
SPARK_HOME defined, Zeppelin still able to run spark interpreter process using 
embedded spark binaries.\n## however, it is not encouraged when you can define 
SPARK_HOME\n##\n# Options read in YARN client mode\n# yarn-site.xml is located 
in configuration directory in HADOOP_CONF_DIR.\nexport 
HADOOP_CONF_DIR=/etc/hadoop/conf\n\n# Pyspark (supported with Spark 1.2.1 and 
above)\n# To configure pyspark, you need to set spark distribution's path to 
'spark.home' property in Interpreter setting screen in Zeppelin GUI\n# path to 
the python command. must be the same path on the driver(Zeppelin) and all 
workers.\n# export PYSPARK_PYTHON\n\nexport 
PYTHONPATH=\"${SPARK_HOME}/python:${SPARK_HOME}/python/lib/py4j-0.8.2.1-src.zip\"\nexport
 SPARK_YARN_USER_ENV=\"PYTHONPATH=${PYTHONPAT
 H}\"\n\n## Spark interpreter options ##\n##\n# Use HiveContext instead of 
SQLContext if set true. true by default.\n# export 
ZEPPELIN_SPARK_USEHIVECONTEXT\n\n# Execute multiple SQL concurrently if set 
true. false by default.\n# export ZEPPELIN_SPARK_CONCURRENTSQL\n\n# Max number 
of SparkSQL result to display. 1000 by default.\n# export 
ZEPPELIN_SPARK_MAXRESULT",
+      "zeppelin_log_dir": "/var/log/zeppelin",
       "zeppelin_group": "zeppelin"
     },
+    "zeppelin-shiro-ini": {
+      "shiro_ini_content": "\n[users]\n# List of users with their password 
allowed to access Zeppelin.\n# To use a different strategy (LDAP / Database / 
...) check the shiro doc at 
http://shiro.apache.org/configuration.html#Configuration-INISections\n#admin = 
password1\n#user1 = password2, role1, role2\n#user2 = password3, role3\n#user3 
= password4, role2\n\n# Sample LDAP configuration, for user Authentication, 
currently tested for single Realm\n[main]\n#ldapRealm = 
org.apache.shiro.realm.ldap.JndiLdapRealm\n#ldapRealm.userDnTemplate = 
uid={0},cn=users,cn=accounts,dc=hortonworks,dc=com\n#ldapRealm.contextFactory.url
 = ldap://ldaphost:389\n#ldapRealm.contextFactory.authenticationMechanism = 
SIMPLE\n#sessionManager = 
org.apache.shiro.web.session.mgt.DefaultWebSessionManager\n#securityManager.sessionManager
 = $sessionManager\n# 86,400,000 milliseconds = 24 
hour\n#securityManager.sessionManager.globalSessionTimeout = 
86400000\nshiro.loginUrl = /api/login\n\n[urls]\n# anon means the acce
 ss is anonymous.\n# authcBasic means Basic Auth Security\n# To enfore 
security, comment the line below and uncomment the next one\n/api/version = 
anon\n/** = anon\n#/** = authc"
+    },
+    "zeppelin-log4j-properties":{
+      "log4j_properties_content": "log4j.rootLogger = INFO, dailyfile"
+    },
 "zeppelin-config": {
             "zeppelin.server.port": "9995", 
             "zeppelin.ssl.truststore.password": "change me", 

http://git-wip-us.apache.org/repos/asf/ambari/blob/ac66085b/ambari-server/src/test/python/stacks/2.5/configs/secured.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/configs/secured.json 
b/ambari-server/src/test/python/stacks/2.5/configs/secured.json
index 7485b5f..914e6a4 100644
--- a/ambari-server/src/test/python/stacks/2.5/configs/secured.json
+++ b/ambari-server/src/test/python/stacks/2.5/configs/secured.json
@@ -222,14 +222,18 @@
     "atlas-solrconfig": {
       "content": "<property><name>content</name><description>Custom solrconfig 
properties</description><value></value></property>"
     },
+    "zeppelin-shiro-ini": {
+      "shiro_ini_content": "\n[users]\n# List of users with their password 
allowed to access Zeppelin.\n# To use a different strategy (LDAP / Database / 
...) check the shiro doc at 
http://shiro.apache.org/configuration.html#Configuration-INISections\n#admin = 
password1\n#user1 = password2, role1, role2\n#user2 = password3, role3\n#user3 
= password4, role2\n\n# Sample LDAP configuration, for user Authentication, 
currently tested for single Realm\n[main]\n#ldapRealm = 
org.apache.shiro.realm.ldap.JndiLdapRealm\n#ldapRealm.userDnTemplate = 
uid={0},cn=users,cn=accounts,dc=hortonworks,dc=com\n#ldapRealm.contextFactory.url
 = ldap://ldaphost:389\n#ldapRealm.contextFactory.authenticationMechanism = 
SIMPLE\n#sessionManager = 
org.apache.shiro.web.session.mgt.DefaultWebSessionManager\n#securityManager.sessionManager
 = $sessionManager\n# 86,400,000 milliseconds = 24 
hour\n#securityManager.sessionManager.globalSessionTimeout = 
86400000\nshiro.loginUrl = /api/login\n\n[urls]\n# anon means the acce
 ss is anonymous.\n# authcBasic means Basic Auth Security\n# To enfore 
security, comment the line below and uncomment the next one\n/api/version = 
anon\n/** = anon\n#/** = authc"
+    },
+    "zeppelin-log4j-properties":{
+      "log4j_properties_content": "log4j.rootLogger = INFO, dailyfile"
+    },
     "zeppelin-env": {
-        "zeppelin.server.kerberos.keytab": 
"/etc/security/keytabs/zeppelin.server.kerberos.keytab", 
-        "shiro_ini_content": "\n[users]\n# List of users with their password 
allowed to access Zeppelin.\n# To use a different strategy (LDAP / Database / 
...) check the shiro doc at 
http://shiro.apache.org/configuration.html#Configuration-INISections\n#admin = 
password1\n#user1 = password2, role1, role2\n#user2 = password3, role3\n#user3 
= password4, role2\n\n# Sample LDAP configuration, for user Authentication, 
currently tested for single Realm\n[main]\n#activeDirectoryRealm = 
org.apache.zeppelin.server.ActiveDirectoryGroupRealm\n#activeDirectoryRealm.systemUsername
 = 
CN=Administrator,CN=Users,DC=HW,DC=EXAMPLE,DC=COM\n#activeDirectoryRealm.systemPassword
 = Password1!\n#activeDirectoryRealm.hadoopSecurityCredentialPath = 
jceks://user/zeppelin/zeppelin.jceks\n#activeDirectoryRealm.searchBase = 
CN=Users,DC=HW,DC=TEST,DC=COM\n#activeDirectoryRealm.url = 
ldap://ad-nano.test.example.com:389\n#activeDirectoryRealm.groupRolesMap = 
\"\"\n#activeDirectoryRealm.authorizationCachingEnabled = 
 true\n\n#ldapRealm = 
org.apache.shiro.realm.ldap.JndiLdapRealm\n#ldapRealm.userDnTemplate = 
uid={0},cn=users,cn=accounts,dc=example,dc=com\n#ldapRealm.contextFactory.url = 
ldap://ldaphost:389\n#ldapRealm.contextFactory.authenticationMechanism = 
SIMPLE\n#sessionManager = 
org.apache.shiro.web.session.mgt.DefaultWebSessionManager\n#securityManager.sessionManager
 = $sessionManager\n# 86,400,000 milliseconds = 24 
hour\n#securityManager.sessionManager.globalSessionTimeout = 
86400000\nshiro.loginUrl = /api/login\n\n[urls]\n# anon means the access is 
anonymous.\n# authcBasic means Basic Auth Security\n# To enfore security, 
comment the line below and uncomment the next one\n/api/version = anon\n/** = 
anon\n#/** = authc", 
-        "zeppelin.spark.jar.dir": "/apps/zeppelin", 
-        "zeppelin.executor.mem": "512m", 
-        "zeppelin_pid_dir": "/var/run/zeppelin", 
-        "zeppelin.executor.instances": "2", 
-        "log4j_properties_content": "\nlog4j.rootLogger = INFO, 
dailyfile\nlog4j.appender.stdout = 
org.apache.log4j.ConsoleAppender\nlog4j.appender.stdout.layout = 
org.apache.log4j.PatternLayout\nlog4j.appender.stdout.layout.ConversionPattern=%5p
 [%d] ({%t} %F[%M]:%L) - 
%m%n\nlog4j.appender.dailyfile.DatePattern=.yyyy-MM-dd\nlog4j.appender.dailyfile.Threshold
 = INFO\nlog4j.appender.dailyfile = 
org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.dailyfile.File = 
${zeppelin.log.file}\nlog4j.appender.dailyfile.layout = 
org.apache.log4j.PatternLayout\nlog4j.appender.dailyfile.layout.ConversionPattern=%5p
 [%d] ({%t} %F[%M]:%L) - %m%n", 
+        "zeppelin.server.kerberos.keytab": 
"/etc/security/keytabs/zeppelin.server.kerberos.keytab",
+        "zeppelin.spark.jar.dir": "/apps/zeppelin",
+        "zeppelin.executor.mem": "512m",
+        "zeppelin_pid_dir": "/var/run/zeppelin",
+        "zeppelin.executor.instances": "2",
         "zeppelin.server.kerberos.principal": "[email protected]", 
         "zeppelin_user": "zeppelin", 
         "zeppelin_env_content": "\n# Spark master url. eg. 
spark://master_addr:7077. Leave empty if you want to use local mode\nexport 
MASTER=yarn-client\nexport SPARK_YARN_JAR={{spark_jar}}\n\n\n# Where log files 
are stored.  PWD by default.\nexport ZEPPELIN_LOG_DIR={{zeppelin_log_dir}}\n\n# 
The pid files are stored. /tmp by default.\nexport 
ZEPPELIN_PID_DIR={{zeppelin_pid_dir}}\n\n\nexport 
JAVA_HOME={{java64_home}}\n\n# Additional jvm options. for example, export 
ZEPPELIN_JAVA_OPTS=\"-Dspark.executor.memory=8g -Dspark.cores.max=16\"\nexport 
ZEPPELIN_JAVA_OPTS=\"-Dhdp.version={{full_stack_version}} 
-Dspark.executor.memory={{executor_mem}} 
-Dspark.executor.instances={{executor_instances}} 
-Dspark.yarn.queue={{spark_queue}}\"\n\n\n# Zeppelin jvm mem options Default 
-Xmx1024m -XX:MaxPermSize=512m\n# export ZEPPELIN_MEM\n\n# zeppelin interpreter 
process jvm mem options. Defualt = ZEPPELIN_MEM\n# export 
ZEPPELIN_INTP_MEM\n\n# zeppelin interpreter process jvm options. Default = 
ZEPPELIN_
 JAVA_OPTS\n# export ZEPPELIN_INTP_JAVA_OPTS\n\n# Where notebook saved\n# 
export ZEPPELIN_NOTEBOOK_DIR\n\n# Id of notebook to be displayed in homescreen. 
ex) 2A94M5J1Z\n# export ZEPPELIN_NOTEBOOK_HOMESCREEN\n\n# hide homescreen 
notebook from list when this value set to \"true\". default \"false\"\n# export 
ZEPPELIN_NOTEBOOK_HOMESCREEN_HIDE\n\n# Bucket where notebook saved\n# export 
ZEPPELIN_NOTEBOOK_S3_BUCKET\n\n# User in bucket where notebook saved. For 
example bucket/user/notebook/2A94M5J1Z/note.json\n# export 
ZEPPELIN_NOTEBOOK_S3_USER\n\n# A string representing this instance of zeppelin. 
$USER by default\n# export ZEPPELIN_IDENT_STRING\n\n# The scheduling priority 
for daemons. Defaults to 0.\n# export ZEPPELIN_NICENESS\n\n\n#### Spark 
interpreter configuration ####\n\n## Use provided spark installation ##\n## 
defining SPARK_HOME makes Zeppelin run spark interpreter process using 
spark-submit\n##\n# (required) When it is defined, load it instead of Zeppelin 
embedded Spark libraries
 \nexport SPARK_HOME={{spark_home}}\n\n# (optional) extra options to pass to 
spark submit. eg) \"--driver-memory 512M --executor-memory 1G\".\n# export 
SPARK_SUBMIT_OPTIONS\n\n## Use embedded spark binaries ##\n## without 
SPARK_HOME defined, Zeppelin still able to run spark interpreter process using 
embedded spark binaries.\n## however, it is not encouraged when you can define 
SPARK_HOME\n##\n# Options read in YARN client mode\n# yarn-site.xml is located 
in configuration directory in HADOOP_CONF_DIR.\nexport 
HADOOP_CONF_DIR=/etc/hadoop/conf\n\n# Pyspark (supported with Spark 1.2.1 and 
above)\n# To configure pyspark, you need to set spark distribution's path to 
'spark.home' property in Interpreter setting screen in Zeppelin GUI\n# path to 
the python command. must be the same path on the driver(Zeppelin) and all 
workers.\n# export PYSPARK_PYTHON\n\nexport 
PYTHONPATH=\"${SPARK_HOME}/python:${SPARK_HOME}/python/lib/py4j-0.8.2.1-src.zip\"\nexport
 SPARK_YARN_USER_ENV=\"PYTHONPATH=${PYTHONP
 ATH}\"\n\n## Spark interpreter options ##\n##\n# Use HiveContext instead of 
SQLContext if set true. true by default.\n# export 
ZEPPELIN_SPARK_USEHIVECONTEXT\n\n# Execute multiple SQL concurrently if set 
true. false by default.\n# export ZEPPELIN_SPARK_CONCURRENTSQL\n\n# Max number 
of SparkSQL result to display. 1000 by default.\n# export 
ZEPPELIN_SPARK_MAXRESULT", 

Reply via email to