Repository: ambari
Updated Branches:
  refs/heads/trunk 784cae2b0 -> fecc9b80a


AMBARI-19248. Add Livy to HDP 2.6 as slave component of Spark2 (Mingjie Tang 
via alejandro)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/fecc9b80
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/fecc9b80
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/fecc9b80

Branch: refs/heads/trunk
Commit: fecc9b80a83535734f1fbc9db8c35f48f6f7fc4b
Parents: 784cae2
Author: Alejandro Fernandez <afernan...@hortonworks.com>
Authored: Tue Jan 10 16:44:44 2017 -0800
Committer: Alejandro Fernandez <afernan...@hortonworks.com>
Committed: Tue Jan 10 16:44:44 2017 -0800

----------------------------------------------------------------------
 .../libraries/functions/constants.py            |   1 +
 .../2.0.0/package/scripts/livy2_server.py       | 149 +++++++++++++++++++
 .../2.0.0/package/scripts/livy2_service.py      |  48 ++++++
 .../SPARK2/2.0.0/package/scripts/params.py      |  65 +++++++-
 .../2.0.0/package/scripts/service_check.py      |  31 +++-
 .../SPARK2/2.0.0/package/scripts/setup_livy2.py |  80 ++++++++++
 .../2.0.0/package/scripts/status_params.py      |   6 +
 .../HDP/2.0.6/properties/stack_features.json    |   5 +
 .../stacks/HDP/2.5/services/stack_advisor.py    |   2 +-
 .../SPARK2/configuration/livy2-conf.xml         |  82 ++++++++++
 .../services/SPARK2/configuration/livy2-env.xml |  95 ++++++++++++
 .../configuration/livy2-log4j-properties.xml    |  42 ++++++
 .../configuration/livy2-spark-blacklist.xml     |  52 +++++++
 .../HDP/2.6/services/SPARK2/kerberos.json       | 126 ++++++++++++++++
 .../stacks/HDP/2.6/services/SPARK2/metainfo.xml |  92 ++++++++++++
 .../stacks/2.6/SPARK2/test_spark_livy2.py       | 120 +++++++++++++++
 .../test/python/stacks/2.6/configs/default.json |  30 +++-
 17 files changed, 1015 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/fecc9b80/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
 
b/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
index a352f71..a633c13 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
@@ -60,6 +60,7 @@ class StackFeature:
   SPARK_16PLUS = "spark_16plus"
   SPARK_THRIFTSERVER = "spark_thriftserver"
   SPARK_LIVY = "spark_livy"
+  SPARK_LIVY2 = "spark_livy2"
   STORM_KERBEROS = "storm_kerberos"
   STORM_AMS = "storm_ams"
   CREATE_KAFKA_BROKER_ID = "create_kafka_broker_id"

http://git-wip-us.apache.org/repos/asf/ambari/blob/fecc9b80/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/livy2_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/livy2_server.py
 
b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/livy2_server.py
new file mode 100644
index 0000000..8c66998
--- /dev/null
+++ 
b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/livy2_server.py
@@ -0,0 +1,149 @@
+#!/usr/bin/python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management.libraries.script.script import Script
+from resource_management.libraries.functions.check_process_status import 
check_process_status
+from resource_management.libraries.functions.stack_features import 
check_stack_feature
+from resource_management.libraries.functions.constants import StackFeature
+from resource_management.core.exceptions import Fail
+from resource_management.core.resources.system import Execute
+from resource_management.libraries.providers.hdfs_resource import WebHDFSUtil
+from resource_management.libraries.providers.hdfs_resource import 
HdfsResourceProvider
+from resource_management import is_empty
+from resource_management import shell
+from resource_management.libraries.functions.decorator import retry
+from resource_management.core.logger import Logger
+from resource_management.libraries.functions.format import format
+from resource_management.libraries.functions import conf_select, stack_select
+
+from livy2_service import livy2_service
+from setup_livy2 import setup_livy
+
+class LivyServer(Script):
+
+  def install(self, env):
+    import params
+    env.set_params(params)
+
+    self.install_packages(env)
+
+  def configure(self, env, upgrade_type=None):
+    import params
+    env.set_params(params)
+
+    setup_livy(env, 'server', upgrade_type=upgrade_type, action = 'config')
+
+  def start(self, env, upgrade_type=None):
+    import params
+    env.set_params(params)
+
+    if params.has_ats and params.has_livyserver:
+      Logger.info("Verifying DFS directories where ATS stores time line data 
for active and completed applications.")
+      self.wait_for_dfs_directories_created([params.entity_groupfs_store_dir, 
params.entity_groupfs_active_dir])
+
+    self.configure(env)
+    livy2_service('server', upgrade_type=upgrade_type, action='start')
+
+  def stop(self, env, upgrade_type=None):
+    import params
+    env.set_params(params)
+
+    livy2_service('server', upgrade_type=upgrade_type, action='stop')
+
+  def status(self, env):
+    import status_params
+    env.set_params(status_params)
+
+    check_process_status(status_params.livy2_server_pid_file)
+
+  #  TODO move out and compose with similar method in resourcemanager.py
+  def wait_for_dfs_directories_created(self, dirs):
+    import params
+
+    ignored_dfs_dirs = 
HdfsResourceProvider.get_ignored_resources_list(params.hdfs_resource_ignore_file)
+
+    if params.security_enabled:
+      Execute(format("{kinit_path_local} -kt {livy_kerberos_keytab} 
{livy2_principal}"),
+              user=params.livy2_user
+              )
+      Execute(format("{kinit_path_local} -kt {hdfs_user_keytab} 
{hdfs_principal_name}"),
+              user=params.hdfs_user
+              )
+
+    for dir_path in dirs:
+        self.wait_for_dfs_directory_created(dir_path, ignored_dfs_dirs)
+
+  def get_pid_files(self):
+    import status_params
+    return [status_params.livy2_server_pid_file]
+
+
+  @retry(times=8, sleep_time=20, backoff_factor=1, err_class=Fail)
+  def wait_for_dfs_directory_created(self, dir_path, ignored_dfs_dirs):
+    import params
+
+    if not is_empty(dir_path):
+      dir_path = HdfsResourceProvider.parse_path(dir_path)
+
+      if dir_path in ignored_dfs_dirs:
+        Logger.info("Skipping DFS directory '" + dir_path + "' as it's marked 
to be ignored.")
+        return
+
+      Logger.info("Verifying if DFS directory '" + dir_path + "' exists.")
+
+      dir_exists = None
+
+      if WebHDFSUtil.is_webhdfs_available(params.is_webhdfs_enabled, 
params.default_fs):
+        # check with webhdfs is much faster than executing hdfs dfs -test
+        util = WebHDFSUtil(params.hdfs_site, params.hdfs_user, 
params.security_enabled)
+        list_status = util.run_command(dir_path, 'GETFILESTATUS', 
method='GET', ignore_status_codes=['404'], assertable_result=False)
+        dir_exists = ('FileStatus' in list_status)
+      else:
+        # have to do time expensive hdfs dfs -d check.
+        dfs_ret_code = shell.call(format("hdfs --config {hadoop_conf_dir} dfs 
-test -d " + dir_path), user=params.livy_user)[0]
+        dir_exists = not dfs_ret_code #dfs -test -d returns 0 in case the dir 
exists
+
+      if not dir_exists:
+        raise Fail("DFS directory '" + dir_path + "' does not exist !")
+      else:
+        Logger.info("DFS directory '" + dir_path + "' exists.")
+
+  def get_component_name(self):
+    return "livy2-server"
+
+  def pre_upgrade_restart(self, env, upgrade_type=None):
+    import params
+
+    env.set_params(params)
+    if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
+      Logger.info("Executing Livy2 Server Stack Upgrade pre-restart")
+      conf_select.select(params.stack_name, "spark2", params.version)
+      stack_select.select("livy2-server", params.version)
+
+  def get_log_folder(self):
+    import params
+    return params.livy2_log_dir
+
+  def get_user(self):
+    import params
+    return params.livy2_user
+if __name__ == "__main__":
+    LivyServer().execute()
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/fecc9b80/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/livy2_service.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/livy2_service.py
 
b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/livy2_service.py
new file mode 100644
index 0000000..0d60cf4
--- /dev/null
+++ 
b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/livy2_service.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+from resource_management.libraries.functions import format
+from resource_management.core.resources.system import File, Execute
+import threading
+
+def livy2_service(name, upgrade_type=None, action=None):
+  import params
+
+  if action == 'start':
+    livyserver_no_op_test = format(
+      'ls {livy2_server_pid_file} >/dev/null 2>&1 && ps -p `cat 
{livy2_server_pid_file}` >/dev/null 2>&1')
+    Execute(format('{livy2_server_start}'),
+            user=params.livy2_user,
+            environment={'JAVA_HOME': params.java_home},
+            not_if=livyserver_no_op_test
+    )
+
+  elif action == 'stop':
+    Execute(format('{livy2_server_stop}'),
+            user=params.livy2_user,
+            environment={'JAVA_HOME': params.java_home}
+            )
+    File(params.livy2_server_pid_file,
+        action="delete"
+        )
+
+
+
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/fecc9b80/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py
index e49756d..452746d 100755
--- 
a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py
@@ -38,7 +38,10 @@ from resource_management.libraries.script.script import 
Script
 SERVER_ROLE_DIRECTORY_MAP = {
   'SPARK2_JOBHISTORYSERVER' : 'spark2-historyserver',
   'SPARK2_CLIENT' : 'spark2-client',
-  'SPARK2_THRIFTSERVER' : 'spark2-thriftserver'
+  'SPARK2_THRIFTSERVER' : 'spark2-thriftserver',
+  'LIVY2_SERVER' : 'livy2-server',
+  'LIVY2_CLIENT' : 'livy2-client'
+
 }
 
 component_directory = 
Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP, "SPARK2_CLIENT")
@@ -178,16 +181,71 @@ if has_spark_thriftserver and 'spark2-thrift-sparkconf' 
in config['configuration
 
 default_fs = config['configurations']['core-site']['fs.defaultFS']
 hdfs_site = config['configurations']['hdfs-site']
+hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore"
+
+ats_host = set(default("/clusterHostInfo/app_timeline_server_hosts", []))
+has_ats = len(ats_host) > 0
 
 dfs_type = default("/commandParams/dfs_type", "")
 
+# livy related config
+
+# livy for spark2 is only supported from HDP 2.6
+has_livyserver = False
+
+#if stack_version_formatted and check_stack_feature(StackFeature.SPARK_LIVY2, 
stack_version_formatted):
+livy2_component_directory = 
Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP, "LIVY2_SERVER")
+livy2_conf = format("{stack_root}/current/{livy2_component_directory}/conf")
+livy2_log_dir = config['configurations']['livy2-env']['livy2_log_dir']
+livy2_pid_dir = status_params.livy2_pid_dir
+livy2_home = format("{stack_root}/current/{livy2_component_directory}")
+livy2_user = status_params.livy2_user
+livy2_group = status_params.livy2_group
+user_group = status_params.user_group
+livy2_hdfs_user_dir = format("/user/{livy2_user}")
+livy2_server_pid_file = status_params.livy2_server_pid_file
+
+livy2_server_start = format("{livy2_home}/bin/livy-server start")
+livy2_server_stop = format("{livy2_home}/bin/livy-server stop")
+livy2_logs_dir = format("{livy2_home}/logs")
+
+livy2_env_sh = config['configurations']['livy2-env']['content']
+livy2_log4j_properties = 
config['configurations']['livy2-log4j-properties']['content']
+livy2_spark_blacklist_properties = 
config['configurations']['livy2-spark-blacklist']['content']
+
+if 'livy.server.kerberos.keytab' in config['configurations']['livy2-conf']:
+  livy_kerberos_keytab =  
config['configurations']['livy2-conf']['livy.server.kerberos.keytab']
+else:
+  livy_kerberos_keytab =  
config['configurations']['livy2-conf']['livy.server.launch.kerberos.keytab']
+if 'livy.server.kerberos.principal' in config['configurations']['livy2-conf']:
+  livy_kerberos_principal = 
config['configurations']['livy2-conf']['livy.server.kerberos.principal']
+else:
+  livy_kerberos_principal = 
config['configurations']['livy2-conf']['livy.server.launch.kerberos.principal']
+
+livy2_livyserver_hosts = default("/clusterHostInfo/livy2_server_hosts", [])
+
+  # ats 1.5 properties
+entity_groupfs_active_dir = 
config['configurations']['yarn-site']['yarn.timeline-service.entity-group-fs-store.active-dir']
+entity_groupfs_active_dir_mode = 01777
+entity_groupfs_store_dir = 
config['configurations']['yarn-site']['yarn.timeline-service.entity-group-fs-store.done-dir']
+entity_groupfs_store_dir_mode = 0700
+is_webhdfs_enabled = hdfs_site['dfs.webhdfs.enabled']
+
+if len(livy2_livyserver_hosts) > 0:
+  has_livyserver = True
+  if security_enabled:
+    livy2_principal = livy_kerberos_principal.replace('_HOST', 
config['hostname'].lower())
+
+livy2_livyserver_port = 
default('configurations/livy2-conf/livy.server.port',8999)
+
+
 import functools
 #create partial functions with common arguments for every HdfsResource call
 #to create/delete hdfs directory/file/copyfromlocal we need to call 
params.HdfsResource in code
 HdfsResource = functools.partial(
   HdfsResource,
   user=hdfs_user,
-  hdfs_resource_ignore_file = 
"/var/lib/ambari-agent/data/.hdfs_resource_ignore",
+  hdfs_resource_ignore_file = hdfs_resource_ignore_file,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
@@ -198,4 +256,5 @@ HdfsResource = functools.partial(
   default_fs = default_fs,
   immutable_paths = get_not_managed_resources(),
   dfs_type = dfs_type
- )
+)
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/fecc9b80/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/service_check.py
index 4d49e75..a27eda2 100755
--- 
a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/service_check.py
+++ 
b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/service_check.py
@@ -31,13 +31,32 @@ class SparkServiceCheck(Script):
     if params.security_enabled:
       spark_kinit_cmd = format("{kinit_path_local} -kt {spark_kerberos_keytab} 
{spark_principal}; ")
       Execute(spark_kinit_cmd, user=params.spark_user)
+      if (params.has_livyserver):
+        livy_kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} 
{smokeuser_principal}; ")
+        Execute(livy_kinit_cmd, user=params.livy2_user)
 
-    scheme = "https" if params.ui_ssl_enabled else "http"
-    Execute(format("curl -s -o /dev/null -w'%{{http_code}}' --negotiate -u: -k 
{scheme}://{spark_history_server_host}:{spark_history_ui_port} | grep 200"),
-      tries = 10,
-      try_sleep=3,
-      logoutput=True
-    )
+    Execute(format("curl -s -o /dev/null -w'%{{http_code}}' --negotiate -u: -k 
http://{spark_history_server_host}:{spark_history_ui_port} | grep 200"),
+            tries=5,
+            try_sleep=3,
+            logoutput=True
+            )
+    if params.has_livyserver:
+      live_livyserver_host = "";
+      for livyserver_host in params.livy2_livyserver_hosts:
+        try:
+          Execute(format("curl -s -o /dev/null -w'%{{http_code}}' --negotiate 
-u: -k http://{livyserver_host}:{livy2_livyserver_port}/sessions | grep 200"),
+                  tries=3,
+                  try_sleep=1,
+                  logoutput=True,
+                  user=params.livy2_user
+                  )
+          live_livyserver_host = livyserver_host
+          break
+        except:
+          pass
+      if len(params.livy2_livyserver_hosts) > 0 and live_livyserver_host == "":
+        raise Fail(format("Connection to all Livy servers failed"))
 
 if __name__ == "__main__":
   SparkServiceCheck().execute()
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/fecc9b80/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/setup_livy2.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/setup_livy2.py
 
b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/setup_livy2.py
new file mode 100644
index 0000000..f2178f7
--- /dev/null
+++ 
b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/setup_livy2.py
@@ -0,0 +1,80 @@
+#!/usr/bin/python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import os
+from resource_management import Directory, File, PropertiesFile, 
InlineTemplate, format
+
+
+def setup_livy(env, type, upgrade_type = None, action = None):
+  import params
+
+  Directory([params.livy2_pid_dir, params.livy2_log_dir],
+            owner=params.livy2_user,
+            group=params.user_group,
+            mode=0775,
+            create_parents = True
+  )
+  if type == 'server' and action == 'config':
+    params.HdfsResource(params.livy2_hdfs_user_dir,
+                        type="directory",
+                        action="create_on_execute",
+                        owner=params.livy2_user,
+                        mode=0775
+    )
+    params.HdfsResource(None, action="execute")
+
+  # create livy-env.sh in etc/conf dir
+  File(os.path.join(params.livy2_conf, 'livy-env.sh'),
+       owner=params.livy2_user,
+       group=params.livy2_group,
+       content=InlineTemplate(params.livy2_env_sh),
+       mode=0644,
+  )
+
+  # create livy.conf in etc/conf dir
+  PropertiesFile(format("{livy2_conf}/livy.conf"),
+                properties = params.config['configurations']['livy2-conf'],
+                key_value_delimiter = " ",
+                owner=params.livy2_user,
+                group=params.livy2_group,
+  )
+
+  # create log4j.properties in etc/conf dir
+  File(os.path.join(params.livy2_conf, 'log4j.properties'),
+       owner=params.livy2_user,
+       group=params.livy2_group,
+       content=params.livy2_log4j_properties,
+       mode=0644,
+  )
+
+  # create spark-blacklist.properties in etc/conf dir
+  File(os.path.join(params.livy2_conf, 'spark-blacklist.conf'),
+       owner=params.livy2_user,
+       group=params.livy2_group,
+       content=params.livy2_spark_blacklist_properties,
+       mode=0644,
+  )
+
+  Directory(params.livy2_logs_dir,
+            owner=params.livy2_user,
+            group=params.livy2_group,
+            mode=0755,
+  )
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/fecc9b80/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/status_params.py
 
b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/status_params.py
index 2b6aafa..2d11ac2 100755
--- 
a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/status_params.py
+++ 
b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/status_params.py
@@ -37,3 +37,9 @@ spark_pid_dir = 
config['configurations']['spark2-env']['spark_pid_dir']
 spark_history_server_pid_file = 
format("{spark_pid_dir}/spark-{spark_user}-org.apache.spark.deploy.history.HistoryServer-1.pid")
 spark_thrift_server_pid_file = 
format("{spark_pid_dir}/spark-{hive_user}-org.apache.spark.sql.hive.thriftserver.HiveThriftServer2-1.pid")
 stack_name = default("/hostLevelParams/stack_name", None)
+
+if "livy2-env" in config['configurations']:
+  livy2_user = config['configurations']['livy2-env']['livy2_user']
+  livy2_group = config['configurations']['livy2-env']['livy2_group']
+  livy2_pid_dir = config['configurations']['livy2-env']['livy2_pid_dir']
+  livy2_server_pid_file = 
format("{livy2_pid_dir}/livy-{livy2_user}-server.pid")
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/fecc9b80/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json
index b94d376..ca4c9f5 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json
@@ -253,6 +253,11 @@
       "min_version": "2.5.0.0"
     },
     {
+      "name": "spark_livy2",
+      "description": "Livy as slave component of spark",
+      "min_version": "2.6.0.0"
+    },
+    {
       "name": "atlas_ranger_plugin_support",
       "description": "Atlas Ranger plugin support",
       "min_version": "2.5.0.0"

http://git-wip-us.apache.org/repos/asf/ambari/blob/fecc9b80/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py 
b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
index ddf407e..f244cb9 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
@@ -2083,4 +2083,4 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
     return hosts_for_component
 
   def isComponentUsingCardinalityForLayout(self, componentName):
-    return super(HDP25StackAdvisor, self).isComponentUsingCardinalityForLayout 
(componentName) or  componentName in ['SPARK2_THRIFTSERVER', 'LIVY_SERVER']
+    return super(HDP25StackAdvisor, self).isComponentUsingCardinalityForLayout 
(componentName) or  componentName in ['SPARK2_THRIFTSERVER', 'LIVY2_SERVER', 
'LIVY_SERVER']

http://git-wip-us.apache.org/repos/asf/ambari/blob/fecc9b80/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/livy2-conf.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/livy2-conf.xml
 
b/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/livy2-conf.xml
new file mode 100644
index 0000000..457869d
--- /dev/null
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/livy2-conf.xml
@@ -0,0 +1,82 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="true">
+    <property>
+        <name>livy.environment</name>
+        <value>production</value>
+        <description>
+            Specifies Livy's environment. May either be "production" or 
"development". In "development"
+            mode, Livy will enable debugging options, such as reporting 
possible routes on a 404.
+            defaults to development
+        </description>
+        <on-ambari-upgrade add="false"/>
+    </property>
+    <property>
+        <name>livy.server.port</name>
+        <value>8999</value>
+        <description>
+            What port to start the server on. Defaults to 8999.
+        </description>
+        <on-ambari-upgrade add="false"/>
+    </property>
+    <property>
+        <name>livy.server.session.timeout</name>
+        <value>3600000</value>
+        <description>
+            Time in milliseconds on how long Livy will wait before timing out 
an idle session.
+            Default is one hour.
+        </description>
+        <on-ambari-upgrade add="false"/>
+    </property>
+    <property>
+        <name>livy.impersonation.enabled</name>
+        <value>true</value>
+        <description>
+            If livy should use proxy users when submitting a job.
+        </description>
+        <on-ambari-upgrade add="false"/>
+    </property>
+    <property>
+        <name>livy.impersonation.enabled</name>
+        <value>true</value>
+        <description>
+            If livy should use proxy users when submitting a job.
+        </description>
+        <on-ambari-upgrade add="false"/>
+    </property>
+    <property>
+        <name>livy.server.csrf_protection.enabled</name>
+        <value>true</value>
+        <description>
+            Whether to enable csrf protection for livy's rest api.
+        </description>
+        <on-ambari-upgrade add="false"/>
+    </property>
+    <property>
+        <name>livy.spark.master</name>
+        <value>yarn-cluster</value>
+        <description>
+            spark.master property for spark engine
+        </description>
+        <on-ambari-upgrade add="false"/>
+    </property>
+</configuration>
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/fecc9b80/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/livy2-env.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/livy2-env.xml
 
b/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/livy2-env.xml
new file mode 100644
index 0000000..afb1d17
--- /dev/null
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/livy2-env.xml
@@ -0,0 +1,95 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_adding_forbidden="true">
+    <property>
+        <name>livy2_user</name>
+        <display-name>Livy User</display-name>
+        <value>livy</value>
+        <property-type>USER</property-type>
+        <value-attributes>
+            <type>user</type>
+            <overridable>false</overridable>
+        </value-attributes>
+        <on-ambari-upgrade add="false"/>
+    </property>
+    <property>
+        <name>livy2_group</name>
+        <display-name>Livy Group</display-name>
+        <value>livy</value>
+        <property-type>GROUP</property-type>
+        <description>livy group</description>
+        <value-attributes>
+            <type>user</type>
+        </value-attributes>
+        <on-ambari-upgrade add="false"/>
+    </property>
+    <property>
+        <name>livy2_log_dir</name>
+        <value>/var/log/livy2</value>
+        <description>Livy Log Dir</description>
+        <value-attributes>
+            <type>directory</type>
+        </value-attributes>
+        <on-ambari-upgrade add="false"/>
+    </property>
+    <property>
+        <name>livy2_pid_dir</name>
+        <value>/var/run/livy2</value>
+        <value-attributes>
+            <type>directory</type>
+        </value-attributes>
+        <on-ambari-upgrade add="false"/>
+    </property>
+    <property>
+        <name>spark_home</name>
+        <value>{{stack_root}}/current/spark2-client</value>
+        <value-attributes>
+            <type>directory</type>
+        </value-attributes>
+        <on-ambari-upgrade add="false"/>
+    </property>
+    <!-- livy-env.sh -->
+    <property>
+        <name>content</name>
+        <description>This is the jinja template for livy-env.sh 
file</description>
+        <value>
+            #!/usr/bin/env bash
+
+            # - SPARK_HOME      Spark which you would like to use in livy
+            # - HADOOP_CONF_DIR Directory containing the Hadoop / YARN 
configuration to use.
+            # - LIVY_LOG_DIR    Where log files are stored.  (Default: 
${LIVY_HOME}/logs)
+            # - LIVY_PID_DIR    Where the pid file is stored. (Default: /tmp)
+            # - LIVY_SERVER_JAVA_OPTS  Java Opts for running livy server (You 
can set jvm related setting here, like jvm memory/gc algorithm and etc.)
+            export SPARK_HOME=/usr/hdp/current/spark2-client
+            export JAVA_HOME={{java_home}}
+            export HADOOP_CONF_DIR=/etc/hadoop/conf
+            export LIVY_LOG_DIR={{livy2_log_dir}}
+            export LIVY_PID_DIR={{livy2_pid_dir}}
+            export LIVY_SERVER_JAVA_OPTS="-Xmx2g"
+        </value>
+        <value-attributes>
+            <type>content</type>
+        </value-attributes>
+        <on-ambari-upgrade add="false"/>
+    </property>
+</configuration>
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/fecc9b80/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/livy2-log4j-properties.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/livy2-log4j-properties.xml
 
b/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/livy2-log4j-properties.xml
new file mode 100644
index 0000000..ccc1dcf
--- /dev/null
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/livy2-log4j-properties.xml
@@ -0,0 +1,42 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false" supports_adding_forbidden="true">
+    <property>
+        <name>content</name>
+        <description>Livy2-log4j-Properties</description>
+        <value>
+            # Set everything to be logged to the console
+            log4j.rootCategory=INFO, console
+            log4j.appender.console=org.apache.log4j.ConsoleAppender
+            log4j.appender.console.target=System.err
+            log4j.appender.console.layout=org.apache.log4j.PatternLayout
+            log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd 
HH:mm:ss} %p %c{1}: %m%n
+
+            log4j.logger.org.eclipse.jetty=WARN
+        </value>
+        <value-attributes>
+            <type>content</type>
+            <show-property-name>false</show-property-name>
+        </value-attributes>
+        <on-ambari-upgrade add="false"/>
+    </property>
+</configuration>
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/fecc9b80/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/livy2-spark-blacklist.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/livy2-spark-blacklist.xml
 
b/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/livy2-spark-blacklist.xml
new file mode 100644
index 0000000..43b312a
--- /dev/null
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/livy2-spark-blacklist.xml
@@ -0,0 +1,52 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false" supports_adding_forbidden="true">
+    <property>
+        <name>content</name>
+        <description>spark2-blacklist.properties</description>
+        <value>
+            #
+            # Configuration override / blacklist. Defines a list of properties 
that users are not allowed
+            # to override when starting Spark sessions.
+            #
+            # This file takes a list of property names (one per line). Empty 
lines and lines starting with "#"
+            # are ignored.
+            #
+
+            # Disallow overriding the master and the deploy mode.
+            spark.master
+            spark.submit.deployMode
+
+            # Disallow overriding the location of Spark cached jars.
+            spark.yarn.jar
+            spark.yarn.jars
+            spark.yarn.archive
+
+            # Don't allow users to override the RSC timeout.
+            livy.rsc.server.idle_timeout
+        </value>
+        <value-attributes>
+            <type>content</type>
+            <show-property-name>false</show-property-name>
+        </value-attributes>
+        <on-ambari-upgrade add="false"/>
+    </property>
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/fecc9b80/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/kerberos.json
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/kerberos.json 
b/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/kerberos.json
new file mode 100644
index 0000000..a16e2e1
--- /dev/null
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/kerberos.json
@@ -0,0 +1,126 @@
+{
+  "services": [
+    {
+      "name": "SPARK2",
+      "identities": [
+        {
+          "name": "/smokeuser"
+        },
+        {
+          "name": "spark2user",
+          "principal": {
+            "value": 
"${spark2-env/spark_user}-${cluster_name|toLower()}@${realm}",
+            "type" : "user",
+            "configuration": 
"spark2-defaults/spark.history.kerberos.principal",
+            "local_username" : "${spark2-env/spark_user}"
+          },
+          "keytab": {
+            "file": "${keytab_dir}/spark.headless.keytab",
+            "owner": {
+              "name": "${spark2-env/spark_user}",
+              "access": "r"
+            },
+            "group": {
+              "name": "${cluster-env/user_group}",
+              "access": ""
+            },
+            "configuration": "spark2-defaults/spark.history.kerberos.keytab"
+          }
+        }
+      ],
+      "configurations": [
+        {
+          "spark-defaults": {
+            "spark.history.kerberos.enabled": "true"
+          }
+        },
+        {
+          "spark-thrift-sparkconf": {
+            "spark.yarn.keytab": "${spark2-env/hive_kerberos_keytab}",
+            "spark.yarn.principal": "${spark2-env/hive_kerberos_principal}"
+          }
+        },
+        {
+          "livy2-conf": {
+            "livy.server.auth.type": "kerberos",
+            "livy.impersonation.enabled": "true",
+            "livy.superusers": "zeppelin-${cluster_name}"
+          }
+        },
+        {
+          "core-site": {
+            "hadoop.proxyuser.${livy2-env/livy2_user}.groups": "*",
+            "hadoop.proxyuser.${livy2-env/livy2_user}.hosts": "*"
+          }
+        }
+      ],
+      "components": [
+        {
+          "name": "SPARK2_JOBHISTORYSERVER",
+          "identities": [
+            {
+              "name": "hdfs",
+              "reference": "/HDFS/NAMENODE/hdfs"
+            }
+          ]
+        },
+        {
+          "name": "SPARK2_CLIENT"
+        },
+        {
+          "name": "SPARK2_THRIFTSERVER",
+          "identities": [
+            {
+              "name": "hdfs",
+              "reference": "/HDFS/NAMENODE/hdfs"
+            },
+            {
+              "name": "hive_server_hive",
+              "reference": "/HIVE/HIVE_SERVER/hive_server_hive"
+            }
+          ]
+        },
+        {
+          "name": "LIVY2_SERVER",
+          "identities": [
+            {
+              "name": "hdfs",
+              "reference": "/HDFS/NAMENODE/hdfs"
+            },
+            {
+              "name": "livyuser",
+              "principal": {
+                "value": "${livy2-env/livy2_user}/_HOST@${realm}",
+                "type" : "service",
+                "configuration": 
"livy2-conf/livy.server.launch.kerberos.principal",
+                "local_username": "${livy2-env/livy2_user}"
+              },
+              "keytab": {
+                "file": "${keytab_dir}/livy.service.keytab",
+                "owner": {
+                  "name": "${livy2-env/livy2_user}",
+                  "access": "r"
+                },
+                "group": {
+                  "name": "${cluster-env/user_group}",
+                  "access": ""
+                },
+                "configuration": 
"livy2-conf/livy.server.launch.kerberos.keytab"
+              }
+            },
+            {
+              "name": "livy_spnego",
+              "reference": "/spnego",
+              "principal": {
+                "configuration": 
"livy2-conf/livy.server.auth.kerberos.principal"
+              },
+              "keytab": {
+                "configuration": "livy2-conf/livy.server.auth.kerberos.keytab"
+              }
+            }
+          ]
+        }
+      ]
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/fecc9b80/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/metainfo.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/metainfo.xml 
b/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/metainfo.xml
index 5916b08..bda4b1b 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/metainfo.xml
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/metainfo.xml
@@ -25,6 +25,98 @@
       <name>SPARK2</name>
       <comment>Apache Spark 2.1 is a fast and general engine for large-scale 
data processing. This service is &lt;b&gt;Technical Preview&lt;/b&gt;.</comment>
       <version>2.1.x</version>
+
+      <components>
+        <component>
+          <name>LIVY2_SERVER</name>
+          <displayName>Livy for Spark2 Server</displayName>
+          <category>SLAVE</category>
+          <cardinality>0+</cardinality>
+          <versionAdvertised>true</versionAdvertised>
+          <dependencies>
+            <dependency>
+              <name>SPARK2/SPARK2_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>HDFS/HDFS_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>YARN/YARN_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+          </dependencies>
+          <commandScript>
+            <script>scripts/livy2_server.py</script>
+            <scriptType>PYTHON</scriptType>
+            <timeout>600</timeout>
+          </commandScript>
+          <logs>
+            <log>
+              <logId>livy2_server</logId>
+              <primary>true</primary>
+            </log>
+          </logs>
+        </component>
+      </components>
+
+      <extends>common-services/SPARK2/2.0.0</extends>
+
+      <configuration-dependencies>
+        <config-type>spark2-defaults</config-type>
+        <config-type>spark2-env</config-type>
+        <config-type>spark2-log4j-properties</config-type>
+        <config-type>spark2-metrics-properties</config-type>
+        <config-type>spark2-thrift-sparkconf</config-type>
+        <config-type>spark2-hive-site-override</config-type>
+        <config-type>spark2-thrift-fairscheduler</config-type>
+        <config-type>livy2-conf</config-type>
+        <config-type>livy2-env</config-type>
+        <config-type>livy2-log4j-properties</config-type>
+        <config-type>livy2-spark-blacklist</config-type>
+      </configuration-dependencies>
+
+      <osSpecifics>
+        <osSpecific>
+          <osFamily>redhat7,amazon2015,redhat6,suse11,suse12</osFamily>
+          <packages>
+            <package>
+              <name>spark2_${stack_version}</name>
+            </package>
+            <package>
+              <name>spark2_${stack_version}-python</name>
+            </package>
+            <package>
+              <name>livy2_${stack_version}</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>debian7,ubuntu12,ubuntu14</osFamily>
+          <packages>
+            <package>
+              <name>spark2-${stack_version}</name>
+            </package>
+            <package>
+              <name>spark2-${stack_version}-python</name>
+            </package>
+            <package>
+              <name>livy2-${stack_version}</name>
+            </package>
+          </packages>
+        </osSpecific>
+      </osSpecifics>
+
     </service>
   </services>
 </metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/fecc9b80/ambari-server/src/test/python/stacks/2.6/SPARK2/test_spark_livy2.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.6/SPARK2/test_spark_livy2.py 
b/ambari-server/src/test/python/stacks/2.6/SPARK2/test_spark_livy2.py
new file mode 100644
index 0000000..6244d87
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.6/SPARK2/test_spark_livy2.py
@@ -0,0 +1,120 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+import json
+from mock.mock import MagicMock, patch
+from stacks.utils.RMFTestCase import *
+
+from only_for_platform import not_for_platform, PLATFORM_WINDOWS
+
+@not_for_platform(PLATFORM_WINDOWS)
+@patch("resource_management.libraries.functions.get_stack_version", 
new=MagicMock(return_value="2.5.0.0-1597"))
+class TestSparkClient(RMFTestCase):
+    COMMON_SERVICES_PACKAGE_DIR = "SPARK2/2.0.0/package"
+    STACK_VERSION = "2.6"
+    DEFAULT_IMMUTABLE_PATHS = ['/apps/hive/warehouse', '/apps/falcon', 
'/mr-history/done', '/app-logs', '/tmp']
+    def test_configure_default(self):
+        self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/livy2_server.py",
+                           classname = "LivyServer",
+                           command = "start",
+                           config_file="default.json",
+                           stack_version = self.STACK_VERSION,
+                           target = RMFTestCase.TARGET_COMMON_SERVICES
+                           )
+        self.assert_start_default()
+        self.assertNoMoreResources()
+
+    def assert_start_default(self):
+        self.assertResourceCalled('Directory', '/var/run/livy2',
+                                  owner = 'livy',
+                                  group = 'hadoop',
+                                  create_parents = True,
+                                  mode = 0775
+                                  )
+        self.assertResourceCalled('Directory', '/var/log/livy2',
+                                  owner = 'livy',
+                                  group = 'hadoop',
+                                  create_parents = True,
+                                  mode = 0775
+                                  )
+        self.assertResourceCalled('HdfsResource', '/user/livy',
+                                  immutable_paths = 
self.DEFAULT_IMMUTABLE_PATHS,
+                                  security_enabled = False,
+                                  hadoop_bin_dir = 
'/usr/hdp/current/hadoop-client/bin',
+                                  keytab = UnknownConfigurationMock(),
+                                  default_fs = 
'hdfs://c6401.ambari.apache.org:8020',
+                                  hdfs_site = {u'a': u'b'},
+                                  kinit_path_local = '/usr/bin/kinit',
+                                  principal_name = UnknownConfigurationMock(),
+                                  user = 'hdfs',
+                                  owner = 'livy',
+                                  hadoop_conf_dir = 
'/usr/hdp/current/hadoop-client/conf',
+                                  type = 'directory',
+                                  action = ['create_on_execute'], 
hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
+                                  dfs_type = '',
+                                  mode = 0775,
+                                  )
+        self.assertResourceCalled('HdfsResource', None,
+                                  immutable_paths = 
self.DEFAULT_IMMUTABLE_PATHS,
+                                  security_enabled = False,
+                                  hadoop_bin_dir = 
'/usr/hdp/current/hadoop-client/bin',
+                                  keytab = UnknownConfigurationMock(),
+                                  default_fs = 
'hdfs://c6401.ambari.apache.org:8020',
+                                  hdfs_site = {u'a': u'b'},
+                                  kinit_path_local = '/usr/bin/kinit',
+                                  principal_name = UnknownConfigurationMock(),
+                                  user = 'hdfs',
+                                  action = ['execute'], 
hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
+                                  dfs_type = '',
+                                  hadoop_conf_dir = 
'/usr/hdp/current/hadoop-client/conf',
+                                  )
+        self.assertResourceCalled('File', 
'/usr/hdp/current/livy2-server/conf/livy-env.sh',
+                                  content = 
InlineTemplate(self.getConfig()['configurations']['livy2-env']['content']),
+                                  owner = 'livy',
+                                  group = 'livy',
+                                  mode = 0644,
+                                  )
+        self.assertResourceCalled('PropertiesFile', 
'/usr/hdp/current/livy2-server/conf/livy.conf',
+                                  owner = 'livy',
+                                  key_value_delimiter = ' ',
+                                  group = 'livy',
+                                  properties = 
self.getConfig()['configurations']['livy2-conf'],
+                                  )
+        self.assertResourceCalled('File', 
'/usr/hdp/current/livy2-server/conf/log4j.properties',
+                                  content = '\n            # Set everything to 
be logged to the console\n            log4j.rootCategory=INFO, console\n        
    log4j.appender.console=org.apache.log4j.ConsoleAppender\n            
log4j.appender.console.target=System.err\n            
log4j.appender.console.layout=org.apache.log4j.PatternLayout\n            
log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: 
%m%n\n\n            log4j.logger.org.eclipse.jetty=WARN',
+                                  owner = 'livy',
+                                  group = 'livy',
+                                  mode = 0644,
+                                  )
+        self.assertResourceCalled('File', 
'/usr/hdp/current/livy2-server/conf/spark-blacklist.conf',
+                                  content = 
self.getConfig()['configurations']['livy2-spark-blacklist']['content'],
+                                  owner = 'livy',
+                                  group = 'livy',
+                                  mode = 0644,
+                                  )
+        self.assertResourceCalled('Directory', 
'/usr/hdp/current/livy2-server/logs',
+                                  owner = 'livy',
+                                  group = 'livy',
+                                  mode = 0755,
+                                  )
+        self.assertResourceCalled('Execute', 
'/usr/hdp/current/livy2-server/bin/livy-server start',
+                                  environment = {'JAVA_HOME': 
'/usr/jdk64/jdk1.7.0_45'},
+                                  not_if = 'ls 
/var/run/livy2/livy-livy-server.pid >/dev/null 2>&1 && ps -p `cat 
/var/run/livy2/livy-livy-server.pid` >/dev/null 2>&1',
+                                  user = 'livy'
+                                  )

http://git-wip-us.apache.org/repos/asf/ambari/blob/fecc9b80/ambari-server/src/test/python/stacks/2.6/configs/default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.6/configs/default.json 
b/ambari-server/src/test/python/stacks/2.6/configs/default.json
index fa12757..0cbe5c1 100644
--- a/ambari-server/src/test/python/stacks/2.6/configs/default.json
+++ b/ambari-server/src/test/python/stacks/2.6/configs/default.json
@@ -205,6 +205,13 @@
       "spark_group": "spark",
       "spark_user": "spark"
     },
+    "spark2-env": {
+      "content": "\n#!/usr/bin/env bash\n\n# This file is sourced when running 
various Spark programs.\n# Copy it as spark-env.sh and edit that to configure 
Spark for your site.\n\n# Options read in YARN client 
mode\n#SPARK_EXECUTOR_INSTANCES=\"2\" #Number of workers to start (Default: 
2)\n#SPARK_EXECUTOR_CORES=\"1\" #Number of cores for the workers (Default: 
1).\n#SPARK_EXECUTOR_MEMORY=\"1G\" #Memory per Worker (e.g. 1000M, 2G) 
(Default: 1G)\n#SPARK_DRIVER_MEMORY=\"512 Mb\" #Memory for Master (e.g. 1000M, 
2G) (Default: 512 Mb)\n#SPARK_YARN_APP_NAME=\"spark\" #The name of your 
application (Default: Spark)\n#SPARK_YARN_QUEUE=\"~@~Xdefault~@~Y\" #The hadoop 
queue to use for allocation requests (Default: 
@~Xdefault~@~Y)\n#SPARK_YARN_DIST_FILES=\"\" #Comma separated list of files to 
be distributed with the job.\n#SPARK_YARN_DIST_ARCHIVES=\"\" #Comma separated 
list of archives to be distributed with the job.\n\n# Generic options for the 
daemons used in the standalone deploy mode\n\n# Alt
 ernate conf dir. (Default: ${SPARK_HOME}/conf)\nexport 
SPARK_CONF_DIR=${SPARK_HOME:-{{spark_home}}}/conf\n\n# Where log files are 
stored.(Default:${SPARK_HOME}/logs)\n#export 
SPARK_LOG_DIR=${SPARK_HOME:-{{spark_home}}}/logs\nexport 
SPARK_LOG_DIR={{spark_log_dir}}\n\n# Where the pid file is stored. (Default: 
/tmp)\nexport SPARK_PID_DIR={{spark_pid_dir}}\n\n# A string representing this 
instance of spark.(Default: $USER)\nSPARK_IDENT_STRING=$USER\n\n# The 
scheduling priority for daemons. (Default: 0)\nSPARK_NICENESS=0\n\nexport 
HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\nexport 
HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-{{hadoop_conf_dir}}}\n\n# The java 
implementation to use.\nexport JAVA_HOME={{java_home}}\n\nif [ -d 
\"/etc/tez/conf/\" ]; then\n  export TEZ_CONF_DIR=/etc/tez/conf\nelse\n  export 
TEZ_CONF_DIR=\nfi",
+      "spark_pid_dir": "/var/run/spark",
+      "spark_log_dir": "/var/log/spark",
+      "spark_group": "spark",
+      "spark_user": "spark"
+    },
     "spark-metrics-properties": {
       "content": "\n# syntax: 
[instance].sink|source.[name].[options]=[value]\n\n# This file configures 
Spark's internal metrics system. The metrics system is\n# divided into 
instances which correspond to internal components.\n# Each instance can be 
configured to report its metrics to one or more sinks.\n# Accepted values for 
[instance] are \"master\", \"worker\", \"executor\", \"driver\",\n# and 
\"applications\". A wild card \"*\" can be used as an instance name, in\n# 
which case all instances will inherit the supplied property.\n#\n# Within an 
instance, a \"source\" specifies a particular set of grouped metrics.\n# there 
are two kinds of sources:\n# 1. Spark internal sources, like MasterSource, 
WorkerSource, etc, which will\n# collect a Spark component's internal state. 
Each instance is paired with a\n# Spark source that is added automatically.\n# 
2. Common sources, like JvmSource, which will collect low level state.\n# These 
can be added through configuration options and are then
  loaded\n# using reflection.\n#\n# A \"sink\" specifies where metrics are 
delivered to. Each instance can be\n# assigned one or more sinks.\n#\n# The 
sink|source field specifies whether the property relates to a sink or\n# 
source.\n#\n# The [name] field specifies the name of source or sink.\n#\n# The 
[options] field is the specific property of this source or sink. The\n# source 
or sink is responsible for parsing this property.\n#\n# Notes:\n# 1. To add a 
new sink, set the \"class\" option to a fully qualified class\n# name (see 
examples below).\n# 2. Some sinks involve a polling period. The minimum allowed 
polling period\n# is 1 second.\n# 3. Wild card properties can be overridden by 
more specific properties.\n# For example, master.sink.console.period takes 
precedence over\n# *.sink.console.period.\n# 4. A metrics specific 
configuration\n# \"spark.metrics.conf=${SPARK_HOME}/conf/metrics.properties\" 
should be\n# added to Java properties using -Dspark.metrics.conf=xxx if you 
want to\
 n# customize metrics system. You can also put the file in 
${SPARK_HOME}/conf\n# and it will be loaded automatically.\n# 5. MetricsServlet 
is added by default as a sink in master, worker and client\n# driver, you can 
send http request \"/metrics/json\" to get a snapshot of all the\n# registered 
metrics in json format. For master, requests \"/metrics/master/json\" and\n# 
\"/metrics/applications/json\" can be sent seperately to get metrics snapshot 
of\n# instance master and applications. MetricsServlet may not be configured by 
self.\n#\n\n## List of available sinks and their properties.\n\n# 
org.apache.spark.metrics.sink.ConsoleSink\n# Name: Default: Description:\n# 
period 10 Poll period\n# unit seconds Units of poll period\n\n# 
org.apache.spark.metrics.sink.CSVSink\n# Name: Default: Description:\n# period 
10 Poll period\n# unit seconds Units of poll period\n# directory /tmp Where to 
store CSV files\n\n# org.apache.spark.metrics.sink.GangliaSink\n# Name: 
Default: Description:\n# host N
 ONE Hostname or multicast group of Ganglia server\n# port NONE Port of Ganglia 
server(s)\n# period 10 Poll period\n# unit seconds Units of poll period\n# ttl 
1 TTL of messages sent by Ganglia\n# mode multicast Ganglia network mode 
('unicast' or 'multicast')\n\n# org.apache.spark.metrics.sink.JmxSink\n\n# 
org.apache.spark.metrics.sink.MetricsServlet\n# Name: Default: Description:\n# 
path VARIES* Path prefix from the web server root\n# sample false Whether to 
show entire set of samples for histograms ('false' or 'true')\n#\n# * Default 
path is /metrics/json for all instances except the master. The master has two 
paths:\n# /metrics/aplications/json # App information\n# /metrics/master/json # 
Master information\n\n# org.apache.spark.metrics.sink.GraphiteSink\n# Name: 
Default: Description:\n# host NONE Hostname of Graphite server\n# port NONE 
Port of Graphite server\n# period 10 Poll period\n# unit seconds Units of poll 
period\n# prefix EMPTY STRING Prefix to prepend to metric name\n\n##
  Examples\n# Enable JmxSink for all instances by class 
name\n#*.sink.jmx.class=org.apache.spark.metrics.sink.JmxSink\n\n# Enable 
ConsoleSink for all instances by class 
name\n#*.sink.console.class=org.apache.spark.metrics.sink.ConsoleSink\n\n# 
Polling period for 
ConsoleSink\n#*.sink.console.period=10\n\n#*.sink.console.unit=seconds\n\n# 
Master instance overlap polling 
period\n#master.sink.console.period=15\n\n#master.sink.console.unit=seconds\n\n#
 Enable CsvSink for all 
instances\n#*.sink.csv.class=org.apache.spark.metrics.sink.CsvSink\n\n# Polling 
period for CsvSink\n#*.sink.csv.period=1\n\n#*.sink.csv.unit=minutes\n\n# 
Polling directory for CsvSink\n#*.sink.csv.directory=/tmp/\n\n# Worker instance 
overlap polling 
period\n#worker.sink.csv.period=10\n\n#worker.sink.csv.unit=minutes\n\n# Enable 
jvm source for instance master, worker, driver and 
executor\n#master.source.jvm.class=org.apache.spark.metrics.source.JvmSource\n\n#worker.source.jvm.class=org.apache.spark.metrics.source.JvmSo
 
urce\n\n#driver.source.jvm.class=org.apache.spark.metrics.source.JvmSource\n\n#executor.source.jvm.class=org.apache.spark.metrics.source.JvmSource"
     },
@@ -214,6 +221,9 @@
     "livy-log4j-properties": {
       "content": "\n            # Set everything to be logged to the console\n 
           log4j.rootCategory=INFO, console\n            
log4j.appender.console=org.apache.log4j.ConsoleAppender\n            
log4j.appender.console.target=System.err\n            
log4j.appender.console.layout=org.apache.log4j.PatternLayout\n            
log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: 
%m%n\n\n            log4j.logger.org.eclipse.jetty=WARN"
     },
+    "livy2-log4j-properties": {
+      "content": "\n            # Set everything to be logged to the console\n 
           log4j.rootCategory=INFO, console\n            
log4j.appender.console=org.apache.log4j.ConsoleAppender\n            
log4j.appender.console.target=System.err\n            
log4j.appender.console.layout=org.apache.log4j.PatternLayout\n            
log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: 
%m%n\n\n            log4j.logger.org.eclipse.jetty=WARN"
+    },
     "livy-conf": {
       "livy.server.port": "8998",
       "livy.server.csrf_protection.enabled": "true",
@@ -221,9 +231,19 @@
       "livy.impersonation.enabled": "true",
       "livy.server.session.timeout": "3600000"
     },
+    "livy2-conf": {
+      "livy.server.port": "8999",
+      "livy.server.csrf_protection.enabled": "true",
+      "livy.environment": "production",
+      "livy.impersonation.enabled": "true",
+      "livy.server.session.timeout": "3600000"
+    },
     "livy-spark-blacklist": {
       "content": "\n            #\n            # Configuration override / 
blacklist. Defines a list of properties that users are not allowed\n            
# to override when starting Spark sessions.\n            #\n            # This 
file takes a list of property names (one per line). Empty lines and lines 
starting with \"#\"\n            # are ignored.\n            #"
     },
+    "livy2-spark-blacklist": {
+      "content": "\n            #\n            # Configuration override / 
blacklist. Defines a list of properties that users are not allowed\n            
# to override when starting Spark sessions.\n            #\n            # This 
file takes a list of property names (one per line). Empty lines and lines 
starting with \"#\"\n            # are ignored.\n            #"
+    },
     "livy-env": {
       "livy_group": "livy",
       "spark_home": "/usr/hdp/current/spark-client",
@@ -232,6 +252,14 @@
       "livy_log_dir": "/var/log/livy",
       "livy_user": "livy"
     },
+    "livy2-env": {
+      "livy2_group": "livy",
+      "spark_home": "/usr/hdp/current/spark2-client",
+      "content": "\n            #!/usr/bin/env bash\n\n            # - 
SPARK_HOME      Spark which you would like to use in livy\n            # - 
HADOOP_CONF_DIR Directory containing the Hadoop / YARN configuration to use.\n  
          # - LIVY_LOG_DIR    Where log files are stored.  (Default: 
${LIVY_HOME}/logs)\n            # - LIVY_PID_DIR    Where the pid file is 
stored. (Default: /tmp)\n            # - LIVY_SERVER_JAVA_OPTS  Java Opts for 
running livy server (You can set jvm related setting here, like jvm memory/gc 
algorithm and etc.)\n        export SPARK_HOME=/usr/hdp/current/spark2-client\n 
       export HADOOP_CONF_DIR=/etc/hadoop/conf\n        export 
LIVY_LOG_DIR={{livy_log_dir}}\n        export LIVY_PID_DIR={{livy_pid_dir}}\n   
     export LIVY_SERVER_JAVA_OPTS=\"-Xmx2g\"",
+      "livy2_pid_dir": "/var/run/livy2",
+      "livy2_log_dir": "/var/log/livy2",
+      "livy2_user": "livy"
+    },
     "infra-solr-env": {
       "infra_solr_znode": "/infra-solr",
       "infra_solr_user": "solr",
@@ -297,7 +325,7 @@
       "zeppelin.executor.mem": "512m", 
       "zeppelin_pid_dir": "/var/run/zeppelin", 
       "zeppelin.executor.instances": "2", 
-      "log4j_properties_content": "\nlog4j.rootLogger = INFO, 
dailyfile\nlog4j.appender.stdout = 
org.apache.log4j.ConsoleAppender\nlog4j.appender.stdout.layout = 
org.apache.log4j.PatternLayout\nlog4j.appender.stdout.layout.ConversionPattern=%5p
 [%d] ({%t} %F[%M]:%L) - 
%m%n\nlog4j.appender.dailyfile.DatePattern=.yyyy-MM-dd\nlog4j.appender.dailyfile.Threshold
 = INFO\nlog4j.appender.dailyfile = 
org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.dailyfile.File = 
${zeppelin.log.file}\nlog4j.appender.dailyfile.layout = 
org.apache.log4j.PatternLayout\nlog4j.appender.dailyfile.layout.ConversionPattern=%5p
 [%d] ({%t} %F[%M]:%L) - %m%n", 
+      "log4j_properties_content": "\nlog4j.rootLogger = INFO, 
dailyfile\nlog4j.appender.stdout = 
org.apache.log4j.ConsoleAppender\nlog4j.appender.stdout.layout = 
org.apache.log4j.PatternLayout\nlog4j.appender.stdout.layout.ConversionPattern=%5p
 [%d] ({%t} %F[%M]:%L) - 
%m%n\nlog4j.appender.dailyfile.DatePattern=.yyyy-MM-dd\nlog4j.appender.dailyfile.Threshold
 = INFO\nlog4j.appender.dailyfile = 
org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.dailyfile.File = 
${zeppelin.log.file}\nlog4j.appender.dailyfile.layout = 
org.apache.log4j.PatternLayout\nlog4j.appender.dailyfile.layout.ConversionPattern=%5p
 [%d] ({%t} %F[%M]:%L) - %m%n",
       "zeppelin.server.kerberos.principal": "", 
       "zeppelin_user": "zeppelin", 
       "zeppelin_env_content": "\n# Spark master url. eg. 
spark://master_addr:7077. Leave empty if you want to use local mode\nexport 
MASTER=yarn-client\nexport SPARK_YARN_JAR={{spark_jar}}\n\n\n# Where log files 
are stored.  PWD by default.\nexport ZEPPELIN_LOG_DIR={{zeppelin_log_dir}}\n\n# 
The pid files are stored. /tmp by default.\nexport 
ZEPPELIN_PID_DIR={{zeppelin_pid_dir}}\n\n\nexport 
JAVA_HOME={{java64_home}}\n\n# Additional jvm options. for example, export 
ZEPPELIN_JAVA_OPTS=\"-Dspark.executor.memory=8g -Dspark.cores.max=16\"\nexport 
ZEPPELIN_JAVA_OPTS=\"-Dhdp.version={{full_stack_version}} 
-Dspark.executor.memory={{executor_mem}} 
-Dspark.executor.instances={{executor_instances}} 
-Dspark.yarn.queue={{spark_queue}}\"\n\n\n# Zeppelin jvm mem options Default 
-Xmx1024m -XX:MaxPermSize=512m\n# export ZEPPELIN_MEM\n\n# zeppelin interpreter 
process jvm mem options. Defualt = ZEPPELIN_MEM\n# export 
ZEPPELIN_INTP_MEM\n\n# zeppelin interpreter process jvm options. Default = 
ZEPPELIN_JA
 VA_OPTS\n# export ZEPPELIN_INTP_JAVA_OPTS\n\n# Where notebook saved\n# export 
ZEPPELIN_NOTEBOOK_DIR\n\n# Id of notebook to be displayed in homescreen. ex) 
2A94M5J1Z\n# export ZEPPELIN_NOTEBOOK_HOMESCREEN\n\n# hide homescreen notebook 
from list when this value set to \"true\". default \"false\"\n# export 
ZEPPELIN_NOTEBOOK_HOMESCREEN_HIDE\n\n# Bucket where notebook saved\n# export 
ZEPPELIN_NOTEBOOK_S3_BUCKET\n\n# User in bucket where notebook saved. For 
example bucket/user/notebook/2A94M5J1Z/note.json\n# export 
ZEPPELIN_NOTEBOOK_S3_USER\n\n# A string representing this instance of zeppelin. 
$USER by default\n# export ZEPPELIN_IDENT_STRING\n\n# The scheduling priority 
for daemons. Defaults to 0.\n# export ZEPPELIN_NICENESS\n\n\n#### Spark 
interpreter configuration ####\n\n## Use provided spark installation ##\n## 
defining SPARK_HOME makes Zeppelin run spark interpreter process using 
spark-submit\n##\n# (required) When it is defined, load it instead of Zeppelin 
embedded Spark libraries\n
 export SPARK_HOME={{spark_home}}\n\n# (optional) extra options to pass to 
spark submit. eg) \"--driver-memory 512M --executor-memory 1G\".\n# export 
SPARK_SUBMIT_OPTIONS\n\n## Use embedded spark binaries ##\n## without 
SPARK_HOME defined, Zeppelin still able to run spark interpreter process using 
embedded spark binaries.\n## however, it is not encouraged when you can define 
SPARK_HOME\n##\n# Options read in YARN client mode\n# yarn-site.xml is located 
in configuration directory in HADOOP_CONF_DIR.\nexport 
HADOOP_CONF_DIR=/etc/hadoop/conf\n\n# Pyspark (supported with Spark 1.2.1 and 
above)\n# To configure pyspark, you need to set spark distribution's path to 
'spark.home' property in Interpreter setting screen in Zeppelin GUI\n# path to 
the python command. must be the same path on the driver(Zeppelin) and all 
workers.\n# export PYSPARK_PYTHON\n\nexport 
PYTHONPATH=\"${SPARK_HOME}/python:${SPARK_HOME}/python/lib/py4j-0.8.2.1-src.zip\"\nexport
 SPARK_YARN_USER_ENV=\"PYTHONPATH=${PYTHONPAT
 H}\"\n\n## Spark interpreter options ##\n##\n# Use HiveContext instead of 
SQLContext if set true. true by default.\n# export 
ZEPPELIN_SPARK_USEHIVECONTEXT\n\n# Execute multiple SQL concurrently if set 
true. false by default.\n# export ZEPPELIN_SPARK_CONCURRENTSQL\n\n# Max number 
of SparkSQL result to display. 1000 by default.\n# export 
ZEPPELIN_SPARK_MAXRESULT", 

Reply via email to