AMBARI-21769. zeppelin principal and livy.superusers property do not match on 
upgraded cluster from Ambari 2.4.2 -and HDP 2.5.5 (rlevas)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c129baa1
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c129baa1
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c129baa1

Branch: refs/heads/feature-branch-AMBARI-21307
Commit: c129baa1f6e4438b7af9e7a01735d8ead05df1be
Parents: 5895040
Author: Robert Levas <rle...@hortonworks.com>
Authored: Mon Aug 21 17:00:52 2017 -0400
Committer: Robert Levas <rle...@hortonworks.com>
Committed: Mon Aug 21 17:00:52 2017 -0400

----------------------------------------------------------------------
 .../kerberos/DeconstructedPrincipal.java        |   2 +-
 .../server/upgrade/UpgradeCatalog252.java       | 174 +++++++
 .../SPARK/2.2.0/service_advisor.py              |  48 +-
 .../ZEPPELIN/0.6.0.3.0/service_advisor.py       |  46 +-
 .../stacks/HDP/2.5/services/SPARK/kerberos.json |   3 +-
 .../stacks/HDP/2.5/services/stack_advisor.py    |  58 ++-
 .../stacks/HDP/2.6/services/SPARK/kerberos.json |   3 +-
 .../HDP/2.6/services/SPARK2/kerberos.json       |   3 +-
 .../stacks/HDP/2.6/services/stack_advisor.py    |  60 ++-
 .../src/main/resources/stacks/stack_advisor.py  |   6 +-
 .../server/upgrade/UpgradeCatalog252Test.java   | 203 ++++++++
 .../SPARK/2.2.0/test_service_advisor.py         | 289 ++++++++++++
 .../stacks/2.5/common/test_stack_advisor.py     | 197 ++++++++
 .../stacks/2.6/common/test_stack_advisor.py     | 471 +++++++++++++++++++
 14 files changed, 1545 insertions(+), 18 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/c129baa1/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/DeconstructedPrincipal.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/DeconstructedPrincipal.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/DeconstructedPrincipal.java
index 764324b..692b130 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/DeconstructedPrincipal.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/DeconstructedPrincipal.java
@@ -36,7 +36,7 @@ import javax.annotation.Nullable;
  * This class will create a DeconstructedPrincipal from a String containing a 
principal using
  * {@link DeconstructedPrincipal#valueOf(String, String)}
  */
-class DeconstructedPrincipal {
+public class DeconstructedPrincipal {
   /**
    * Regular expression to parse the different principal formats:
    * <ul>

http://git-wip-us.apache.org/repos/asf/ambari/blob/c129baa1/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog252.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog252.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog252.java
index 2227675..1192c11 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog252.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog252.java
@@ -18,18 +18,30 @@
 package org.apache.ambari.server.upgrade;
 
 import java.sql.SQLException;
+import java.util.Arrays;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Set;
+import java.util.TreeSet;
 
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.orm.DBAccessor.DBColumnInfo;
+import org.apache.ambari.server.orm.dao.ArtifactDAO;
+import org.apache.ambari.server.orm.entities.ArtifactEntity;
+import org.apache.ambari.server.serveraction.kerberos.DeconstructedPrincipal;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.ConfigHelper;
 import org.apache.ambari.server.state.PropertyInfo;
+import 
org.apache.ambari.server.state.kerberos.AbstractKerberosDescriptorContainer;
+import org.apache.ambari.server.state.kerberos.KerberosConfigurationDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptorFactory;
 import org.apache.commons.lang.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.collect.Sets;
 import com.google.inject.Inject;
@@ -46,6 +58,11 @@ public class UpgradeCatalog252 extends 
AbstractUpgradeCatalog {
   private static final String CLUSTER_ENV = "cluster-env";
 
   /**
+   * Logger.
+   */
+  private static final Logger LOG = 
LoggerFactory.getLogger(UpgradeCatalog252.class);
+
+  /**
    * Constructor.
    *
    * @param injector
@@ -93,6 +110,8 @@ public class UpgradeCatalog252 extends 
AbstractUpgradeCatalog {
   protected void executeDMLUpdates() throws AmbariException, SQLException {
     addNewConfigurationsFromXml();
     resetStackToolsAndFeatures();
+    updateKerberosDescriptorArtifacts();
+    fixLivySuperusers();
   }
 
   /**
@@ -153,4 +172,159 @@ public class UpgradeCatalog252 extends 
AbstractUpgradeCatalog {
       updateConfigurationPropertiesForCluster(cluster, CLUSTER_ENV, 
newStackProperties, true, false);
     }
   }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  protected void updateKerberosDescriptorArtifact(ArtifactDAO artifactDAO, 
ArtifactEntity artifactEntity) throws AmbariException {
+    if (artifactEntity != null) {
+      Map<String, Object> data = artifactEntity.getArtifactData();
+
+      if (data != null) {
+        final KerberosDescriptor kerberosDescriptor = new 
KerberosDescriptorFactory().createInstance(data);
+
+        if (kerberosDescriptor != null) {
+          // Find and remove configuration specifications for 
<code>livy-conf/livy.superusers</code>
+          // in SPARK since this logic has been moved to the relevant 
stack/service advisors
+          boolean updatedSpark = 
removeConfigurationSpecification(kerberosDescriptor.getService("SPARK"), 
"livy-conf", "livy.superusers");
+
+          // Find and remove configuration specifications for 
<code>livy-conf2/livy.superusers</code>
+          // in SPARK2 since this logic has been moved to the relevant 
stack/service advisors
+          boolean updatedSpark2 = 
removeConfigurationSpecification(kerberosDescriptor.getService("SPARK2"), 
"livy2-conf", "livy.superusers");
+
+          if (updatedSpark || updatedSpark2) {
+            artifactEntity.setArtifactData(kerberosDescriptor.toMap());
+            artifactDAO.merge(artifactEntity);
+          }
+        }
+      }
+    }
+  }
+
+  /**
+   * Fixes the <code>livy.superusers</code> value in <code>livy-conf</code> and
+   * <code>livy2-conf</code>.
+   * <p>
+   * When Kerberos is enabled, the values of <code>livy.superusers</code> in 
<code>livy-conf</code>
+   * and <code>livy2-conf</code> are potentially incorrect due to an issue 
with the Spark and Spark2
+   * kerberos.json files.  In Ambari 2.5.2, the logic to set 
<code>livy.superusers</code> has been
+   * moved to the stack advisor and removed from the kerberos.json files.  The 
user-supplied Kerberos
+   * descriptor is fixed in {@link 
#updateKerberosDescriptorArtifact(ArtifactDAO, ArtifactEntity)}.
+   * <p>
+   * If Zeppelin is installed and Kerberos is enabled, then 
<code>livy.superusers</code> should be
+   * updated to contain the proper value for the Zeppelin user. If the 
incorrect value is there and
+   * in the form of <code>zeppelin-clustername</code> then it will be removed.
+   */
+  void fixLivySuperusers() throws AmbariException {
+    Clusters clusters = injector.getInstance(Clusters.class);
+    if (clusters != null) {
+      Map<String, Cluster> clusterMap = clusters.getClusters();
+
+      if (clusterMap != null && !clusterMap.isEmpty()) {
+        for (final Cluster cluster : clusterMap.values()) {
+          Config zeppelinEnvProperties = 
cluster.getDesiredConfigByType("zeppelin-env");
+          if (zeppelinEnvProperties != null) {
+            Map<String, String> zeppelinProperties = 
zeppelinEnvProperties.getProperties();
+            if (zeppelinProperties != null) {
+              String zeppelinPrincipal = 
zeppelinProperties.get("zeppelin.server.kerberos.principal");
+
+              if (!StringUtils.isEmpty(zeppelinPrincipal)) {
+                // Parse the principal name component from the full principal. 
The default realm of
+                // EXAMPLE.COM is used because we really don't care what the 
realm is.
+                DeconstructedPrincipal deconstructedPrincipal = 
DeconstructedPrincipal.valueOf(zeppelinPrincipal, "EXAMPLE.COM");
+                String newZeppelinPrincipalName = 
deconstructedPrincipal.getPrincipalName();
+                String oldZeppelinPrincipalName = "zeppelin-" + 
cluster.getClusterName().toLowerCase();
+
+                // Fix livy-conf/livy.supserusers
+                updateListValues(cluster, "livy-conf", "livy.superusers",
+                    Collections.singleton(newZeppelinPrincipalName), 
Collections.singleton(oldZeppelinPrincipalName));
+
+                // Fix livy2-conf/livy.supserusers
+                updateListValues(cluster, "livy2-conf", "livy.superusers",
+                    Collections.singleton(newZeppelinPrincipalName), 
Collections.singleton(oldZeppelinPrincipalName));
+              }
+            }
+          }
+        }
+      }
+    }
+  }
+
+  /**
+   * Updates the contents of a configuration with comma-delimited list of 
values.
+   * <p>
+   * Items will be added and/or removed as needed. If changes are made to the 
value, the configuration
+   * is updated in the cluster.
+   *
+   * @param cluster        the cluster
+   * @param configType     the configuration type
+   * @param propertyName   the property name
+   * @param valuesToAdd    a set of values to add to the list
+   * @param valuesToRemove a set of values to remove from the list
+   * @throws AmbariException
+   */
+  private void updateListValues(Cluster cluster, String configType, String 
propertyName, Set<String> valuesToAdd, Set<String> valuesToRemove)
+      throws AmbariException {
+    Config config = cluster.getDesiredConfigByType(configType);
+    if (config != null) {
+      Map<String, String> properties = config.getProperties();
+      if (properties != null) {
+        String existingValue = properties.get(propertyName);
+        String newValue = null;
+
+        if (StringUtils.isEmpty(existingValue)) {
+          if ((valuesToAdd != null) && !valuesToAdd.isEmpty()) {
+            newValue = StringUtils.join(valuesToAdd, ',');
+          }
+        } else {
+          Set<String> valueSet = new 
TreeSet<>(Arrays.asList(existingValue.split("\\s*,\\s*")));
+
+          boolean removedValues = false;
+          if (valuesToRemove != null) {
+            removedValues = valueSet.removeAll(valuesToRemove);
+          }
+
+          boolean addedValues = false;
+          if (valuesToAdd != null) {
+            addedValues = valueSet.addAll(valuesToAdd);
+          }
+
+          if (removedValues || addedValues) {
+            newValue = StringUtils.join(valueSet, ',');
+          }
+        }
+
+        if (!StringUtils.isEmpty(newValue)) {
+          updateConfigurationPropertiesForCluster(cluster, configType, 
Collections.singletonMap(propertyName, newValue), true, true);
+        }
+      }
+    }
+  }
+
+  /**
+   * Given an {@link AbstractKerberosDescriptorContainer}, attempts to remove 
the specified property
+   * (<code>configType/propertyName</code> from it.
+   *
+   * @param kerberosDescriptorContainer the container to update
+   * @param configType                  the configuration type
+   * @param propertyName                the property name
+   * @return true if changes where made to the container; false otherwise
+   */
+  private boolean 
removeConfigurationSpecification(AbstractKerberosDescriptorContainer 
kerberosDescriptorContainer, String configType, String propertyName) {
+    boolean updated = false;
+    if (kerberosDescriptorContainer != null) {
+      KerberosConfigurationDescriptor configurationDescriptor = 
kerberosDescriptorContainer.getConfiguration(configType);
+      if (configurationDescriptor != null) {
+        Map<String, String> properties = 
configurationDescriptor.getProperties();
+        if ((properties != null) && properties.containsKey(propertyName)) {
+          properties.remove(propertyName);
+          LOG.info("Removed {}/{} from the descriptor named {}", configType, 
propertyName, kerberosDescriptorContainer.getName());
+          updated = true;
+        }
+      }
+    }
+
+    return updated;
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/c129baa1/ambari-server/src/main/resources/common-services/SPARK/2.2.0/service_advisor.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SPARK/2.2.0/service_advisor.py
 
b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/service_advisor.py
index b876cd7..389dfae 100644
--- 
a/ambari-server/src/main/resources/common-services/SPARK/2.2.0/service_advisor.py
+++ 
b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/service_advisor.py
@@ -121,8 +121,9 @@ class SparkServiceAdvisor(service_advisor.ServiceAdvisor):
     Entry point.
     Must be overriden in child class.
     """
-    self.logger.info("Class: %s, Method: %s. Recommending Service 
Configurations." %
-                (self.__class__.__name__, inspect.stack()[0][3]))
+    # TODO: Breaks unit tests, need to figure out why
+    # self.logger.info("Class: %s, Method: %s. Recommending Service 
Configurations." %
+    #             (self.__class__.__name__, inspect.stack()[0][3]))
     recommender = SparkRecommender()
 
     recommender.recommendSparkConfigurationsFromHDP25(configurations, 
clusterData, services, hosts)
@@ -135,8 +136,9 @@ class SparkServiceAdvisor(service_advisor.ServiceAdvisor):
     Validate configurations for the service. Return a list of errors.
     The code for this function should be the same for each Service Advisor.
     """
-    self.logger.info("Class: %s, Method: %s. Validating Configurations." %
-                (self.__class__.__name__, inspect.stack()[0][3]))
+    # TODO: Breaks unit tests, need to figure out why
+    # self.logger.info("Class: %s, Method: %s. Validating Configurations." %
+    #             (self.__class__.__name__, inspect.stack()[0][3]))
 
     validator = SparkValidator()
     # Calls the methods of the validator using arguments,
@@ -170,6 +172,44 @@ class SparkRecommender(service_advisor.ServiceAdvisor):
     if spark_thrift_queue is not None:
       putSparkThriftSparkConf("spark.yarn.queue", spark_thrift_queue)
 
+    self.__recommendLivySuperUsers(configurations, services)
+
+  def __recommendLivySuperUsers(self, configurations, services):
+    """
+    If Kerberos is enabled AND Zeppelin is installed and Spark Livy Server is 
installed, then set
+    livy-conf/livy.superusers to contain the Zeppelin principal name from
+    zeppelin-env/zeppelin.server.kerberos.principal
+
+    :param configurations:
+    :param services:
+    """
+    if self.isSecurityEnabled(services):
+      zeppelin_env = self.getServicesSiteProperties(services, "zeppelin-env")
+
+      if zeppelin_env and 'zeppelin.server.kerberos.principal' in zeppelin_env:
+        zeppelin_principal = zeppelin_env['zeppelin.server.kerberos.principal']
+        zeppelin_user = zeppelin_principal.split('@')[0] if zeppelin_principal 
else None
+
+        if zeppelin_user:
+          livy_conf = self.getServicesSiteProperties(services, 'livy-conf')
+
+          if livy_conf:
+            superusers = livy_conf['livy.superusers'] if livy_conf and 
'livy.superusers' in livy_conf else None
+
+            # add the Zeppelin user to the set of users
+            if superusers:
+              _superusers = superusers.split(',')
+              _superusers = [x.strip() for x in _superusers]
+              _superusers = filter(None, _superusers)  # Removes empty string 
elements from array
+            else:
+              _superusers = []
+
+            if zeppelin_user not in _superusers:
+              _superusers.append(zeppelin_user)
+
+              putLivyProperty = self.putProperty(configurations, 'livy-conf', 
services)
+              putLivyProperty('livy.superusers', ','.join(_superusers))
+
 class SparkValidator(service_advisor.ServiceAdvisor):
   """
   Spark Validator checks the correctness of properties whenever the service is 
first added or the user attempts to

http://git-wip-us.apache.org/repos/asf/ambari/blob/c129baa1/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/service_advisor.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/service_advisor.py
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/service_advisor.py
index 9ff9b8b..4548961 100644
--- 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/service_advisor.py
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/service_advisor.py
@@ -119,8 +119,7 @@ class 
ZeppelinServiceAdvisor(service_advisor.ServiceAdvisor):
     #            (self.__class__.__name__, inspect.stack()[0][3]))
 
     recommender = ZeppelinRecommender()
-
-
+    recommender.recommendZeppelinConfigurationsFromHDP25(configurations, 
clusterData, services, hosts)
 
   def getServiceConfigurationsValidationItems(self, configurations, 
recommendedDefaults, services, hosts):
     """
@@ -147,7 +146,50 @@ class ZeppelinRecommender(service_advisor.ServiceAdvisor):
     self.as_super = super(ZeppelinRecommender, self)
     self.as_super.__init__(*args, **kwargs)
 
+  def recommendZeppelinConfigurationsFromHDP25(self, configurations, 
clusterData, services, hosts):
+    """
+    :type configurations dict
+    :type clusterData dict
+    :type services dict
+    :type hosts dict
+    """
+    self.__recommendLivySuperUsers(configurations, services)
+
+  def __recommendLivySuperUsers(self, configurations, services):
+    """
+    If Kerberos is enabled AND Zeppelin is installed and Spark Livy Server is 
installed, then set
+    livy-conf/livy.superusers to contain the Zeppelin principal name from
+    zeppelin-env/zeppelin.server.kerberos.principal
+
+    :param configurations:
+    :param services:
+    """
+    if self.isSecurityEnabled(services):
+      zeppelin_env = self.getServicesSiteProperties(services, "zeppelin-env")
+
+      if zeppelin_env and 'zeppelin.server.kerberos.principal' in zeppelin_env:
+        zeppelin_principal = zeppelin_env['zeppelin.server.kerberos.principal']
+        zeppelin_user = zeppelin_principal.split('@')[0] if zeppelin_principal 
else None
+
+        if zeppelin_user:
+          livy_conf = self.getServicesSiteProperties(services, 'livy-conf')
+
+          if livy_conf:
+            superusers = livy_conf['livy.superusers'] if livy_conf and 
'livy.superusers' in livy_conf else None
+
+            # add the Zeppelin user to the set of users
+            if superusers:
+              _superusers = superusers.split(',')
+              _superusers = [x.strip() for x in _superusers]
+              _superusers = filter(None, _superusers)  # Removes empty string 
elements from array
+            else:
+              _superusers = []
+
+            if zeppelin_user not in _superusers:
+              _superusers.append(zeppelin_user)
 
+              putLivyProperty = self.putProperty(configurations, 'livy-conf', 
services)
+              putLivyProperty('livy.superusers', ','.join(_superusers))
 
 class ZeppelinValidator(service_advisor.ServiceAdvisor):
   """

http://git-wip-us.apache.org/repos/asf/ambari/blob/c129baa1/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/kerberos.json
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/kerberos.json 
b/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/kerberos.json
index aa81edb..54ddf02 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/kerberos.json
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/kerberos.json
@@ -43,8 +43,7 @@
         {
           "livy-conf": {
             "livy.server.auth.type": "kerberos",
-            "livy.impersonation.enabled": "true",
-            "livy.superusers": "zeppelin${principal_suffix}"
+            "livy.impersonation.enabled": "true"
           }
         },
         {

http://git-wip-us.apache.org/repos/asf/ambari/blob/c129baa1/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py 
b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
index 246bbcc..2dc493a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
@@ -460,11 +460,22 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
       "RANGER_KMS": self.recommendRangerKMSConfigurations,
       "STORM": self.recommendStormConfigurations,
       "OOZIE": self.recommendOozieConfigurations,
-      "SPARK2": self.recommendSpark2Configurations
+      "SPARK": self.recommendSparkConfigurations,
+      "SPARK2": self.recommendSpark2Configurations,
+      "ZEPPELIN": self.recommendZeppelinConfigurations
     }
     parentRecommendConfDict.update(childRecommendConfDict)
     return parentRecommendConfDict
 
+  def recommendSparkConfigurations(self, configurations, clusterData, 
services, hosts):
+    """
+    :type configurations dict
+    :type clusterData dict
+    :type services dict
+    :type hosts dict
+    """
+    self.__addZeppelinToLivySuperUsers(configurations, services)
+
   def recommendSpark2Configurations(self, configurations, clusterData, 
services, hosts):
     """
     :type configurations dict
@@ -537,6 +548,15 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
       putStormSiteProperty('storm.cluster.metrics.consumer.register', 'null')
       putStormSiteProperty('topology.metrics.consumer.register', 'null')
 
+  def recommendZeppelinConfigurations(self, configurations, clusterData, 
services, hosts):
+    """
+    :type configurations dict
+    :type clusterData dict
+    :type services dict
+    :type hosts dict
+    """
+    self.__addZeppelinToLivySuperUsers(configurations, services)
+
   def constructAtlasRestAddress(self, services, hosts):
     """
     :param services: Collection of services in the cluster with configs
@@ -2141,5 +2161,41 @@ 
yarn.scheduler.capacity.root.{0}.maximum-am-resource-percent=1""".format(llap_qu
 
     return self.toConfigurationValidationProblems(validationItems, 
"ranger-tagsync-site")
 
+  def __addZeppelinToLivySuperUsers(self, configurations, services):
+    """
+    If Kerberos is enabled AND Zeppelin is installed and Spark Livy Server is 
installed, then set
+    livy-conf/livy.superusers to contain the Zeppelin principal name from
+    zeppelin-env/zeppelin.server.kerberos.principal
+
+    :param configurations:
+    :param services:
+    """
+    if self.isSecurityEnabled(services):
+      zeppelin_env = self.getServicesSiteProperties(services, "zeppelin-env")
+
+      if zeppelin_env and 'zeppelin.server.kerberos.principal' in zeppelin_env:
+        zeppelin_principal = zeppelin_env['zeppelin.server.kerberos.principal']
+        zeppelin_user = zeppelin_principal.split('@')[0] if zeppelin_principal 
else None
+
+        if zeppelin_user:
+          livy_conf = self.getServicesSiteProperties(services, 'livy-conf')
+
+          if livy_conf:
+            superusers = livy_conf['livy.superusers'] if livy_conf and 
'livy.superusers' in livy_conf else None
+
+            # add the Zeppelin user to the set of users
+            if superusers:
+              _superusers = superusers.split(',')
+              _superusers = [x.strip() for x in _superusers]
+              _superusers = filter(None, _superusers)  # Removes empty string 
elements from array
+            else:
+              _superusers = []
+
+            if zeppelin_user not in _superusers:
+              _superusers.append(zeppelin_user)
+
+              putLivyProperty = self.putProperty(configurations, 'livy-conf', 
services)
+              putLivyProperty('livy.superusers', ','.join(_superusers))
+
   def isComponentUsingCardinalityForLayout(self, componentName):
     return super(HDP25StackAdvisor, self).isComponentUsingCardinalityForLayout 
(componentName) or  componentName in ['SPARK2_THRIFTSERVER', 'LIVY2_SERVER', 
'LIVY_SERVER']

http://git-wip-us.apache.org/repos/asf/ambari/blob/c129baa1/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK/kerberos.json
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK/kerberos.json 
b/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK/kerberos.json
index 872f78b..c68d627 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK/kerberos.json
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK/kerberos.json
@@ -43,8 +43,7 @@
         {
           "livy-conf": {
             "livy.server.auth.type": "kerberos",
-            "livy.impersonation.enabled": "true",
-            "livy.superusers": "zeppelin${principal_suffix}"
+            "livy.impersonation.enabled": "true"
           }
         },
         {

http://git-wip-us.apache.org/repos/asf/ambari/blob/c129baa1/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/kerberos.json
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/kerberos.json 
b/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/kerberos.json
index 0f99bbb..8b6b41d 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/kerberos.json
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/kerberos.json
@@ -43,8 +43,7 @@
         {
           "livy2-conf": {
             "livy.server.auth.type": "kerberos",
-            "livy.impersonation.enabled": "true",
-            "livy.superusers": "zeppelin${principal_suffix}"
+            "livy.impersonation.enabled": "true"
           }
         },
         {

http://git-wip-us.apache.org/repos/asf/ambari/blob/c129baa1/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py 
b/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py
index e9b8d15..6108351 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py
@@ -40,11 +40,33 @@ class HDP26StackAdvisor(HDP25StackAdvisor):
         "HIVE": self.recommendHIVEConfigurations,
         "HBASE": self.recommendHBASEConfigurations,
         "YARN": self.recommendYARNConfigurations,
-        "KAFKA": self.recommendKAFKAConfigurations
+        "KAFKA": self.recommendKAFKAConfigurations,
+        "SPARK2": self.recommendSPARK2Configurations,
+        "ZEPPELIN": self.recommendZEPPELINConfigurations
       }
       parentRecommendConfDict.update(childRecommendConfDict)
       return parentRecommendConfDict
 
+  def recommendSPARK2Configurations(self, configurations, clusterData, 
services, hosts):
+    """
+    :type configurations dict
+    :type clusterData dict
+    :type services dict
+    :type hosts dict
+    """
+    super(HDP26StackAdvisor, 
self).recommendSpark2Configurations(configurations, clusterData, services, 
hosts)
+    self.__addZeppelinToLivy2SuperUsers(configurations, services)
+
+  def recommendZEPPELINConfigurations(self, configurations, clusterData, 
services, hosts):
+    """
+    :type configurations dict
+    :type clusterData dict
+    :type services dict
+    :type hosts dict
+    """
+    super(HDP26StackAdvisor, 
self).recommendZeppelinConfigurations(configurations, clusterData, services, 
hosts)
+    self.__addZeppelinToLivy2SuperUsers(configurations, services)
+
   def recommendAtlasConfigurations(self, configurations, clusterData, 
services, hosts):
     super(HDP26StackAdvisor, 
self).recommendAtlasConfigurations(configurations, clusterData, services, hosts)
     servicesList = [service["StackServices"]["service_name"] for service in 
services["services"]]
@@ -644,3 +666,39 @@ class HDP26StackAdvisor(HDP25StackAdvisor):
       putRangerKafkaPluginProperty("REPOSITORY_CONFIG_USERNAME",kafka_user)
     else:
       self.logger.info("Not setting Kafka Repo user for Ranger.")
+
+  def __addZeppelinToLivy2SuperUsers(self, configurations, services):
+    """
+    If Kerberos is enabled AND Zeppelin is installed AND Spark2 Livy Server is 
installed, then set
+    livy2-conf/livy.superusers to contain the Zeppelin principal name from
+    zeppelin-env/zeppelin.server.kerberos.principal
+
+    :param configurations:
+    :param services:
+    """
+    if self.isSecurityEnabled(services):
+      zeppelin_env = self.getServicesSiteProperties(services, "zeppelin-env")
+
+      if zeppelin_env and 'zeppelin.server.kerberos.principal' in zeppelin_env:
+        zeppelin_principal = zeppelin_env['zeppelin.server.kerberos.principal']
+        zeppelin_user = zeppelin_principal.split('@')[0] if zeppelin_principal 
else None
+
+        if zeppelin_user:
+          livy2_conf = self.getServicesSiteProperties(services, 'livy2-conf')
+
+          if livy2_conf:
+            superusers = livy2_conf['livy.superusers'] if livy2_conf and 
'livy.superusers' in livy2_conf else None
+
+            # add the Zeppelin user to the set of users
+            if superusers:
+              _superusers = superusers.split(',')
+              _superusers = [x.strip() for x in _superusers]
+              _superusers = filter(None, _superusers)  # Removes empty string 
elements from array
+            else:
+              _superusers = []
+
+            if zeppelin_user not in _superusers:
+              _superusers.append(zeppelin_user)
+
+              putLivy2ConfProperty = self.putProperty(configurations, 
'livy2-conf', services)
+              putLivy2ConfProperty('livy.superusers', ','.join(_superusers))
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/c129baa1/ambari-server/src/main/resources/stacks/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/stack_advisor.py 
b/ambari-server/src/main/resources/stacks/stack_advisor.py
index 321ac4e..89f2997 100644
--- a/ambari-server/src/main/resources/stacks/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/stack_advisor.py
@@ -2031,12 +2031,12 @@ class DefaultStackAdvisor(StackAdvisor):
     If the property exists and is equal to "true", then is it enabled; 
otherwise is it assumed to be
     disabled.
 
+    This is an alias for 
stacks.stack_advisor.DefaultStackAdvisor#is_secured_cluster
+
     :param services: the services structure containing the current 
configurations
     :return: true if security is enabled; otherwise false
     """
-    return "cluster-env" in services["configurations"] \
-           and "security_enabled" in 
services["configurations"]["cluster-env"]["properties"] \
-           and 
services["configurations"]["cluster-env"]["properties"]["security_enabled"].lower()
 == "true"
+    return self.is_secured_cluster(services)
 
   def parseCardinality(self, cardinality, hostsCount):
     """

http://git-wip-us.apache.org/repos/asf/ambari/blob/c129baa1/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog252Test.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog252Test.java
 
b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog252Test.java
index b71b335..d7df68c 100644
--- 
a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog252Test.java
+++ 
b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog252Test.java
@@ -19,30 +19,44 @@
 package org.apache.ambari.server.upgrade;
 
 import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.anyString;
 import static org.easymock.EasyMock.capture;
+import static org.easymock.EasyMock.createMock;
 import static org.easymock.EasyMock.eq;
 import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.expectLastCall;
 import static org.easymock.EasyMock.newCapture;
 import static org.easymock.EasyMock.replay;
 import static org.easymock.EasyMock.reset;
 import static org.easymock.EasyMock.verify;
 
+import java.lang.reflect.Type;
 import java.sql.Connection;
 import java.sql.ResultSet;
 import java.sql.Statement;
+import java.util.Collections;
+import java.util.Map;
+import java.util.Set;
 
 import javax.persistence.EntityManager;
 
+import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.actionmanager.ActionManager;
 import org.apache.ambari.server.configuration.Configuration;
+import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.controller.KerberosHelper;
 import org.apache.ambari.server.controller.MaintenanceStateHelper;
 import org.apache.ambari.server.orm.DBAccessor;
 import org.apache.ambari.server.orm.DBAccessor.DBColumnInfo;
+import org.apache.ambari.server.orm.dao.ArtifactDAO;
+import org.apache.ambari.server.orm.entities.ArtifactEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.Service;
+import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptorFactory;
 import org.apache.ambari.server.state.stack.OsFamily;
 import org.easymock.Capture;
 import org.easymock.EasyMockRunner;
@@ -55,6 +69,7 @@ import org.junit.Test;
 import org.junit.runner.RunWith;
 
 import com.google.gson.Gson;
+import com.google.gson.reflect.TypeToken;
 import com.google.inject.Binder;
 import com.google.inject.Guice;
 import com.google.inject.Injector;
@@ -163,4 +178,192 @@ public class UpgradeCatalog252Test {
     Assert.assertEquals(0, captured.getDefaultValue());
     Assert.assertEquals(Short.class, captured.getType());
   }
+
+  @Test
+  public void testFixLivySuperUsers() throws AmbariException {
+
+    final Clusters clusters = createMock(Clusters.class);
+    final Cluster cluster = createMock(Cluster.class);
+    final Config zeppelinEnv = createMock(Config.class);
+    final Config livyConf = createMock(Config.class);
+    final Config livyConfNew = createMock(Config.class);
+    final Config livy2Conf = createMock(Config.class);
+    final Config livy2ConfNew = createMock(Config.class);
+    final AmbariManagementController controller = 
createMock(AmbariManagementController.class);
+
+    StackId stackId = new StackId("HDP", "2.2");
+
+
+    Capture<? extends Map<String, String>> captureLivyConfProperties = 
newCapture();
+    Capture<? extends Map<String, String>> captureLivy2ConfProperties = 
newCapture();
+
+    Module module = new Module() {
+      @Override
+      public void configure(Binder binder) {
+        binder.bind(DBAccessor.class).toInstance(dbAccessor);
+        binder.bind(OsFamily.class).toInstance(osFamily);
+        binder.bind(EntityManager.class).toInstance(entityManager);
+        binder.bind(Configuration.class).toInstance(configuration);
+        binder.bind(Clusters.class).toInstance(clusters);
+        binder.bind(AmbariManagementController.class).toInstance(controller);
+      }
+    };
+
+    expect(clusters.getClusters()).andReturn(Collections.singletonMap("c1", 
cluster)).once();
+
+    expect(cluster.getClusterName()).andReturn("c1").atLeastOnce();
+    expect(cluster.getDesiredStackVersion()).andReturn(stackId).atLeastOnce();
+    
expect(cluster.getDesiredConfigByType("zeppelin-env")).andReturn(zeppelinEnv).atLeastOnce();
+    
expect(cluster.getServiceByConfigType("livy-conf")).andReturn("SPARK").atLeastOnce();
+    
expect(cluster.getDesiredConfigByType("livy-conf")).andReturn(livyConf).atLeastOnce();
+    
expect(cluster.getConfigsByType("livy-conf")).andReturn(Collections.singletonMap("tag1",
 livyConf)).atLeastOnce();
+    expect(cluster.getConfig(eq("livy-conf"), 
anyString())).andReturn(livyConfNew).atLeastOnce();
+    
expect(cluster.getServiceByConfigType("livy2-conf")).andReturn("SPARK2").atLeastOnce();
+    
expect(cluster.getDesiredConfigByType("livy2-conf")).andReturn(livy2Conf).atLeastOnce();
+    
expect(cluster.getConfigsByType("livy2-conf")).andReturn(Collections.singletonMap("tag1",
 livy2Conf)).atLeastOnce();
+    expect(cluster.getConfig(eq("livy2-conf"), 
anyString())).andReturn(livy2ConfNew).atLeastOnce();
+    expect(cluster.addDesiredConfig(eq("ambari-upgrade"), 
anyObject(Set.class), anyString())).andReturn(null).atLeastOnce();
+
+    
expect(zeppelinEnv.getProperties()).andReturn(Collections.singletonMap("zeppelin.server.kerberos.principal",
 "zeppelin_user@AMBARI.LOCAL")).once();
+
+    
expect(livyConf.getProperties()).andReturn(Collections.singletonMap("livy.superusers",
 "zeppelin-c1, some_user")).atLeastOnce();
+    expect(livyConf.getPropertiesAttributes()).andReturn(Collections.<String, 
Map<String, String>>emptyMap()).atLeastOnce();
+    expect(livy2Conf.getProperties()).andReturn(Collections.<String, 
String>emptyMap()).atLeastOnce();
+    expect(livy2Conf.getPropertiesAttributes()).andReturn(Collections.<String, 
Map<String, String>>emptyMap()).atLeastOnce();
+
+    expect(controller.createConfig(eq(cluster), eq(stackId), eq("livy-conf"), 
capture(captureLivyConfProperties), anyString(), anyObject(Map.class)))
+        .andReturn(livyConfNew)
+        .once();
+    expect(controller.createConfig(eq(cluster), eq(stackId), eq("livy2-conf"), 
capture(captureLivy2ConfProperties), anyString(), anyObject(Map.class)))
+        .andReturn(livy2ConfNew)
+        .once();
+
+    replay(clusters, cluster, zeppelinEnv, livy2Conf, livyConf, controller);
+
+    Injector injector = Guice.createInjector(module);
+    UpgradeCatalog252 upgradeCatalog252 = 
injector.getInstance(UpgradeCatalog252.class);
+    upgradeCatalog252.fixLivySuperusers();
+
+    verify(clusters, cluster, zeppelinEnv, livy2Conf, livyConf, controller);
+
+    Assert.assertTrue(captureLivyConfProperties.hasCaptured());
+    Assert.assertEquals("some_user,zeppelin_user", 
captureLivyConfProperties.getValue().get("livy.superusers"));
+
+    Assert.assertTrue(captureLivy2ConfProperties.hasCaptured());
+    Assert.assertEquals("zeppelin_user", 
captureLivy2ConfProperties.getValue().get("livy.superusers"));
+  }
+
+  @Test
+  public void testUpdateKerberosDescriptorArtifact() throws AmbariException {
+    String initialJson = "{" +
+        "  \"services\": [" +
+        "    {" +
+        "      \"name\": \"SPARK\"," +
+        "      \"configurations\": [" +
+        "        {" +
+        "          \"livy-conf\": {" +
+        "            \"property1\": \"true\"," +
+        "            \"property2\": \"true\"," +
+        "            \"livy.superusers\": \"somevalue\"" +
+        "          }" +
+        "        }," +
+        "        {" +
+        "          \"some-env\": {" +
+        "            \"groups\": \"${hadoop-env/proxyuser_group}\"," +
+        "            \"hosts\": 
\"${clusterHostInfo/existing_service_master_hosts}\"" +
+        "          }" +
+        "        }" +
+        "      ]" +
+        "    }," +
+        "    {" +
+        "      \"name\": \"SPARK2\"," +
+        "      \"configurations\": [" +
+        "        {" +
+        "          \"livy2-conf\": {" +
+        "            \"property1\": \"true\"," +
+        "            \"property2\": \"true\"," +
+        "            \"livy.superusers\": \"somevalue\"" +
+        "          }" +
+        "        }," +
+        "        {" +
+        "          \"some2-env\": {" +
+        "            \"groups\": \"${hadoop-env/proxyuser_group}\"," +
+        "            \"hosts\": 
\"${clusterHostInfo/existing_service_master_hosts}\"" +
+        "          }" +
+        "        }" +
+        "      ]" +
+        "    }," +
+        "    {" +
+        "      \"name\": \"NOT_SPARK\"," +
+        "      \"configurations\": [" +
+        "        {" +
+        "          \"not-livy-conf\": {" +
+        "            \"property1\": \"true\"," +
+        "            \"property2\": \"true\"," +
+        "            \"livy.superusers\": \"somevalue\"" +
+        "          }" +
+        "        }," +
+        "        {" +
+        "          \"some2-env\": {" +
+        "            \"groups\": \"${hadoop-env/proxyuser_group}\"," +
+        "            \"hosts\": 
\"${clusterHostInfo/existing_service_master_hosts}\"" +
+        "          }" +
+        "        }" +
+        "      ]" +
+        "    }" +
+        "  ]" +
+        "}";
+
+
+    Type type = new TypeToken<Map<String, Object>>() {
+    }.getType();
+    Map<String,Object> map = new Gson().fromJson(initialJson, type);
+
+    Module module = new Module() {
+      @Override
+      public void configure(Binder binder) {
+        binder.bind(DBAccessor.class).toInstance(dbAccessor);
+        binder.bind(OsFamily.class).toInstance(osFamily);
+        binder.bind(EntityManager.class).toInstance(entityManager);
+        binder.bind(Configuration.class).toInstance(configuration);
+      }
+    };
+
+    Capture<? extends Map<String, Object>> captureMap = newCapture();
+    ArtifactEntity artifactEntity = createMock(ArtifactEntity.class);
+
+    expect(artifactEntity.getArtifactData()).andReturn(map).once();
+    artifactEntity.setArtifactData(capture(captureMap));
+    expectLastCall().once();
+
+    ArtifactDAO artifactDAO = createMock(ArtifactDAO.class);
+    expect(artifactDAO.merge(artifactEntity)).andReturn(artifactEntity).once();
+
+    replay(artifactDAO, artifactEntity);
+
+    Injector injector = Guice.createInjector(module);
+    UpgradeCatalog252 upgradeCatalog252 = 
injector.getInstance(UpgradeCatalog252.class);
+    upgradeCatalog252.updateKerberosDescriptorArtifact(artifactDAO, 
artifactEntity);
+
+    verify(artifactDAO, artifactEntity);
+
+    Assert.assertTrue(captureMap.hasCaptured());
+
+    KerberosDescriptor result = new 
KerberosDescriptorFactory().createInstance(captureMap.getValue());
+
+    Assert.assertNotNull(result.getService("SPARK"));
+    
Assert.assertNotNull(result.getService("SPARK").getConfiguration("livy-conf"));
+    
Assert.assertNotNull(result.getService("SPARK").getConfiguration("livy-conf").getProperties());
+    
Assert.assertFalse(result.getService("SPARK").getConfiguration("livy-conf").getProperties().containsKey("livy.superusers"));
+
+    Assert.assertNotNull(result.getService("SPARK2"));
+    
Assert.assertNotNull(result.getService("SPARK2").getConfiguration("livy2-conf"));
+    
Assert.assertNotNull(result.getService("SPARK2").getConfiguration("livy2-conf").getProperties());
+    
Assert.assertFalse(result.getService("SPARK2").getConfiguration("livy2-conf").getProperties().containsKey("livy.superusers"));
+
+    Assert.assertNotNull(result.getService("NOT_SPARK"));
+    
Assert.assertNotNull(result.getService("NOT_SPARK").getConfiguration("not-livy-conf"));
+    
Assert.assertNotNull(result.getService("NOT_SPARK").getConfiguration("not-livy-conf").getProperties());
+    
Assert.assertTrue(result.getService("NOT_SPARK").getConfiguration("not-livy-conf").getProperties().containsKey("livy.superusers"));
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/c129baa1/ambari-server/src/test/python/common-services/SPARK/2.2.0/test_service_advisor.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/common-services/SPARK/2.2.0/test_service_advisor.py
 
b/ambari-server/src/test/python/common-services/SPARK/2.2.0/test_service_advisor.py
new file mode 100644
index 0000000..2606973
--- /dev/null
+++ 
b/ambari-server/src/test/python/common-services/SPARK/2.2.0/test_service_advisor.py
@@ -0,0 +1,289 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import imp
+import os
+from unittest import TestCase
+
+class TestSPARKServiceAdvisor(TestCase):
+  testDirectory = os.path.dirname(os.path.abspath(__file__))
+  stack_advisor_path = os.path.join(testDirectory, 
'../../../../../main/resources/stacks/stack_advisor.py')
+  with open(stack_advisor_path, 'rb') as fp:
+    imp.load_module('stack_advisor', fp, stack_advisor_path, ('.py', 'rb', 
imp.PY_SOURCE))
+
+  serviceAdvisorPath = 
'../../../../../main/resources/common-services/SPARK/2.2.0/service_advisor.py'
+  sparkServiceAdvisorPath = os.path.join(testDirectory, serviceAdvisorPath)
+  with open(sparkServiceAdvisorPath, 'rb') as fp:
+    service_advisor_impl = imp.load_module('service_advisor_impl', fp, 
sparkServiceAdvisorPath, ('.py', 'rb', imp.PY_SOURCE))
+
+  def setUp(self):
+    serviceAdvisorClass = getattr(self.service_advisor_impl, 
'SparkServiceAdvisor')
+    self.serviceAdvisor = serviceAdvisorClass()
+
+  def test_recommendSPARKConfigurations_SecurityEnabledZeppelinInstalled(self):
+    configurations = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "true",
+        }
+      },
+      "livy-conf": {
+        "properties": {
+          "livy.property1": "value1"
+        }
+      },
+      "zeppelin-env": {
+        "properties": {
+          "zeppelin.server.kerberos.principal": "zeppelin_user@REALM"
+        }
+      }
+    }
+    services = {"configurations": configurations}
+    services['services'] = [
+      {
+        "StackServices": {
+          "service_name": "SPARK"
+        },
+      }
+    ]
+    expected = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "true",
+        }
+      },
+      "livy-conf": {
+        "properties": {
+          "livy.superusers": "zeppelin_user",
+          "livy.property1": "value1"
+        }
+      },
+      "spark-defaults": {
+        "properties": {
+          "spark.yarn.queue": "default"
+        }
+      },
+      "spark-thrift-sparkconf": {
+        "properties": {
+          "spark.yarn.queue": "default"
+        }
+      },
+      "zeppelin-env": {
+        "properties": {
+          "zeppelin.server.kerberos.principal": "zeppelin_user@REALM"
+        }
+      }
+    }
+
+    self.serviceAdvisor.getServiceConfigurationRecommendations(configurations, 
None, services, None)
+    self.assertEquals(configurations, expected)
+
+  def 
test_recommendSPARKConfigurations_SecurityNotEnabledZeppelinInstalled(self):
+    configurations = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "false",
+        }
+      },
+      "livy-conf": {
+        "properties": {
+        }
+      },
+      "zeppelin-env": {
+        "properties": {
+        }
+      }
+    }
+    services = {"configurations": configurations}
+    services['services'] = [
+      {
+        "StackServices": {
+          "service_name": "SPARK"
+        },
+      }
+    ]
+    expected = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "false",
+        }
+      },
+      "livy-conf": {
+        "properties": {
+        }
+      },
+      "spark-defaults": {
+        "properties": {
+          "spark.yarn.queue": "default"
+        }
+      },
+      "spark-thrift-sparkconf": {
+        "properties": {
+          "spark.yarn.queue": "default"
+        }
+      },
+      "zeppelin-env": {
+        "properties": {
+        }
+      }
+    }
+
+    self.serviceAdvisor.getServiceConfigurationRecommendations(configurations, 
None, services, None)
+    self.assertEquals(configurations, expected)
+
+  def 
test_recommendSPARKConfigurations_SecurityEnabledZeppelinInstalledExistingValue(self):
+    configurations = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "true",
+        }
+      },
+      "livy-conf": {
+        "properties": {
+          "livy.superusers": "livy_user"
+        }
+      },
+      "spark-defaults": {
+        "properties": {
+          "spark.yarn.queue": "default"
+        }
+      },
+      "spark-thrift-sparkconf": {
+        "properties": {
+          "spark.yarn.queue": "default"
+        }
+      },
+      "zeppelin-env": {
+        "properties": {
+          "zeppelin.server.kerberos.principal": "zeppelin_user@REALM"
+        }
+      }
+    }
+    services = {"configurations": configurations}
+    services['services'] = [
+      {
+        "StackServices": {
+          "service_name": "SPARK"
+        },
+      }
+    ]
+    expected = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "true",
+        }
+      },
+      "livy-conf": {
+        "properties": {
+          "livy.superusers": "livy_user,zeppelin_user"
+        }
+      },
+      "spark-defaults": {
+        "properties": {
+          "spark.yarn.queue": "default"
+        }
+      },
+      "spark-thrift-sparkconf": {
+        "properties": {
+          "spark.yarn.queue": "default"
+        }
+      },
+      "zeppelin-env": {
+        "properties": {
+          "zeppelin.server.kerberos.principal": "zeppelin_user@REALM"
+        }
+      }
+    }
+
+    self.serviceAdvisor.getServiceConfigurationRecommendations(configurations, 
None, services, None)
+    self.assertEquals(configurations, expected)
+
+  def 
test_recommendSPARKConfigurations_SecurityEnabledZeppelinNotInstalled(self):
+    configurations = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "true",
+        }
+      },
+      "livy-conf": {
+        "properties": {
+          "livy.property1" : "value1"
+        }
+      }
+    }
+
+    services = {"configurations": configurations}
+    services['services'] = [
+      {
+        "StackServices": {
+          "service_name": "SPARK"
+        },
+      }
+    ]
+    expected = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "true",
+        }
+      },
+      "livy-conf": {
+        "properties": {
+          "livy.property1" : "value1"
+        }
+      },
+      "spark-defaults": {
+        "properties": {
+          "spark.yarn.queue": "default"
+        }
+      },
+      "spark-thrift-sparkconf": {
+        "properties": {
+          "spark.yarn.queue": "default"
+        }
+      }
+    }
+
+    self.serviceAdvisor.getServiceConfigurationRecommendations(configurations, 
None, services, None)
+    self.assertEquals(configurations, expected)
+
+  def test_recommendSPARK2Configurations(self):
+    configurations = {}
+    services = {"configurations": configurations}
+    services['services'] = [
+      {
+        "StackServices": {
+          "service_name": "SPARK2"
+        },
+      }
+    ]
+    expected = {
+      "spark-defaults": {
+        "properties": {
+          "spark.yarn.queue": "default"
+        }
+      },
+      "spark-thrift-sparkconf": {
+        "properties": {
+          "spark.yarn.queue": "default"
+        }
+      }
+    }
+
+    self.serviceAdvisor.getServiceConfigurationRecommendations(configurations, 
None, services, None)
+    self.assertEquals(configurations, expected)
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/c129baa1/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py 
b/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
index 51d1678..cf462de 100644
--- a/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
@@ -509,6 +509,203 @@ class TestHDP25StackAdvisor(TestCase):
     self.expected_visibility_false = {'visible': 'false'}
     self.expected_visibility_true = {'visible': 'true'}
 
+  def test_recommendSPARKConfigurations_SecurityEnabledZeppelinInstalled(self):
+    configurations = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "true",
+        }
+      },
+      "livy-conf": {
+        "properties": {
+          "livy.property1": "value1"
+        }
+      },
+      "zeppelin-env": {
+        "properties": {
+          "zeppelin.server.kerberos.principal": "zeppelin_user@REALM"
+        }
+      }
+    }
+    services = {"configurations": configurations}
+    services['services'] = [
+      {
+        "StackServices": {
+          "service_name": "SPARK"
+        },
+      }
+    ]
+    clusterData = {
+      "cpu": 4,
+      "containers": 5,
+      "ramPerContainer": 256,
+      "yarnMinContainerSize": 256
+    }
+    expected = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "true",
+        }
+      },
+      "livy-conf": {
+        "properties": {
+          "livy.superusers": "zeppelin_user",
+          "livy.property1": "value1"
+        }
+      },
+      "zeppelin-env": {
+        "properties": {
+          "zeppelin.server.kerberos.principal": "zeppelin_user@REALM"
+        }
+      }
+    }
+
+    self.stackAdvisor.recommendSparkConfigurations(configurations, 
clusterData, services, None)
+    self.assertEquals(configurations, expected)
+
+  def 
test_recommendSPARKConfigurations_SecurityNotEnabledZeppelinInstalled(self):
+    configurations = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "false",
+        }
+      },
+      "livy-conf": {
+        "properties": {
+        }
+      },
+      "zeppelin-env": {
+        "properties": {
+        }
+      }
+    }
+    services = {"configurations": configurations}
+    services['services'] = [
+      {
+        "StackServices": {
+          "service_name": "SPARK"
+        },
+      }
+    ]
+    clusterData = {
+      "cpu": 4,
+      "containers": 5,
+      "ramPerContainer": 256,
+      "yarnMinContainerSize": 256
+    }
+    expected = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "false",
+        }
+      },
+      "livy-conf": {
+        "properties": {
+        }
+      },
+      "zeppelin-env": {
+        "properties": {
+        }
+      }
+    }
+
+    self.stackAdvisor.recommendSparkConfigurations(configurations, 
clusterData, services, None)
+    self.assertEquals(configurations, expected)
+
+  def 
test_recommendSPARKConfigurations_SecurityEnabledZeppelinInstalledExistingValue(self):
+    configurations = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "true",
+        }
+      },
+      "livy-conf": {
+        "properties": {
+          "livy.superusers": "livy_user"
+        }
+      },
+      "zeppelin-env": {
+        "properties": {
+          "zeppelin.server.kerberos.principal": "zeppelin_user@REALM"
+        }
+      }
+    }
+    services = {"configurations": configurations}
+    services['services'] = [
+      {
+        "StackServices": {
+          "service_name": "SPARK"
+        },
+      }
+    ]
+    clusterData = {
+      "cpu": 4,
+      "containers": 5,
+      "ramPerContainer": 256,
+      "yarnMinContainerSize": 256
+    }
+    expected = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "true",
+        }
+      },
+      "livy-conf": {
+        "properties": {
+          "livy.superusers": "livy_user,zeppelin_user"
+        }
+      },
+      "zeppelin-env": {
+        "properties": {
+          "zeppelin.server.kerberos.principal": "zeppelin_user@REALM"
+        }
+      }
+    }
+
+    self.stackAdvisor.recommendSparkConfigurations(configurations, 
clusterData, services, None)
+    self.assertEquals(configurations, expected)
+
+  def 
test_recommendSPARKConfigurations_SecurityEnabledZeppelinNotInstalled(self):
+    configurations = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "true",
+        }
+      },
+      "livy-conf": {
+        "properties": {
+        }
+      }
+    }
+    services = {"configurations": configurations}
+    services['services'] = [
+      {
+        "StackServices": {
+          "service_name": "SPARK"
+        },
+      }
+    ]
+    clusterData = {
+      "cpu": 4,
+      "containers": 5,
+      "ramPerContainer": 256,
+      "yarnMinContainerSize": 256
+    }
+    expected = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "true",
+        }
+      },
+      "livy-conf": {
+        "properties": {
+        }
+      }
+    }
+
+    self.stackAdvisor.recommendSparkConfigurations(configurations, 
clusterData, services, None)
+    self.assertEquals(configurations, expected)
+
   def test_recommendSPARK2Configurations(self):
     configurations = {}
     services = {"configurations": configurations}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c129baa1/ambari-server/src/test/python/stacks/2.6/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.6/common/test_stack_advisor.py 
b/ambari-server/src/test/python/stacks/2.6/common/test_stack_advisor.py
index 63e2229..f8483c1 100644
--- a/ambari-server/src/test/python/stacks/2.6/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.6/common/test_stack_advisor.py
@@ -204,6 +204,477 @@ class TestHDP26StackAdvisor(TestCase):
                                                               
'druid.broker.jvm.heap.memory': {'maximum': '49152'}}}}
                       )
 
+  def 
test_recommendSPARK2Configurations_SecurityEnabledZeppelinInstalled(self):
+    configurations = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "true",
+        }
+      },
+      "livy2-conf": {
+        "properties": {
+          "livy.property1": "value1"
+        }
+      },
+      "zeppelin-env": {
+        "properties": {
+          "zeppelin.server.kerberos.principal": "zeppelin_user@REALM"
+        }
+      }
+    }
+    services = {"configurations": configurations}
+    services['services'] = [
+      {
+        "StackServices": {
+          "service_name": "SPARK2"
+        },
+      }
+    ]
+    clusterData = {
+      "cpu": 4,
+      "containers": 5,
+      "ramPerContainer": 256,
+      "yarnMinContainerSize": 256
+    }
+    expected = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "true",
+        }
+      },
+      "livy2-conf": {
+        "properties": {
+          "livy.superusers": "zeppelin_user",
+          "livy.property1": "value1"
+        }
+      },
+      "spark2-defaults": {
+        "properties": {
+          "spark.yarn.queue": "default"
+        }
+      },
+      "spark2-thrift-sparkconf": {
+        "properties": {
+          "spark.yarn.queue": "default"
+        }
+      },
+      "zeppelin-env": {
+        "properties": {
+          "zeppelin.server.kerberos.principal": "zeppelin_user@REALM"
+        }
+      }
+    }
+
+    self.stackAdvisor.recommendSPARK2Configurations(configurations, 
clusterData, services, None)
+    self.assertEquals(configurations, expected)
+
+  def 
test_recommendSPARK2Configurations_SecurityNotEnabledZeppelinInstalled(self):
+    configurations = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "false",
+        }
+      },
+      "livy2-conf": {
+        "properties": {
+          "livy.property1": "value1"
+        }
+      },
+      "zeppelin-env": {
+        "properties": {
+        }
+      }
+    }
+    services = {"configurations": configurations}
+    services['services'] = [
+      {
+        "StackServices": {
+          "service_name": "SPARK2"
+        },
+      }
+    ]
+    clusterData = {
+      "cpu": 4,
+      "containers": 5,
+      "ramPerContainer": 256,
+      "yarnMinContainerSize": 256
+    }
+    expected = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "false",
+        }
+      },
+      "livy2-conf": {
+        "properties": {
+          "livy.property1": "value1"
+        }
+      },
+      "spark2-defaults": {
+        "properties": {
+          "spark.yarn.queue": "default"
+        }
+      },
+      "spark2-thrift-sparkconf": {
+        "properties": {
+          "spark.yarn.queue": "default"
+        }
+      },
+      "zeppelin-env": {
+        "properties": {
+        }
+      }
+    }
+
+    self.stackAdvisor.recommendSPARK2Configurations(configurations, 
clusterData, services, None)
+    self.assertEquals(configurations, expected)
+
+  def 
test_recommendSPARK2Configurations_SecurityEnabledZeppelinInstalledExistingValue(self):
+    configurations = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "true",
+        }
+      },
+      "livy2-conf": {
+        "properties": {
+          "livy.property1": "value1",
+          "livy.superusers": "livy_user"
+        }
+      },
+      "zeppelin-env": {
+        "properties": {
+          "zeppelin.server.kerberos.principal": "zeppelin_user@REALM"
+        }
+      }
+    }
+    services = {"configurations": configurations}
+    services['services'] = [
+      {
+        "StackServices": {
+          "service_name": "SPARK2"
+        },
+      }
+    ]
+    clusterData = {
+      "cpu": 4,
+      "containers": 5,
+      "ramPerContainer": 256,
+      "yarnMinContainerSize": 256
+    }
+    expected = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "true",
+        }
+      },
+      "livy2-conf": {
+        "properties": {
+          "livy.property1": "value1",
+          "livy.superusers": "livy_user,zeppelin_user"
+        }
+      },
+      "spark2-defaults": {
+        "properties": {
+          "spark.yarn.queue": "default"
+        }
+      },
+      "spark2-thrift-sparkconf": {
+        "properties": {
+          "spark.yarn.queue": "default"
+        }
+      },
+      "zeppelin-env": {
+        "properties": {
+          "zeppelin.server.kerberos.principal": "zeppelin_user@REALM"
+        }
+      }
+    }
+
+    self.stackAdvisor.recommendSPARK2Configurations(configurations, 
clusterData, services, None)
+    self.assertEquals(configurations, expected)
+
+  def 
test_recommendSPARK2Configurations_SecurityEnabledZeppelinNotInstalled(self):
+    configurations = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "true",
+        }
+      },
+      "livy2-conf": {
+        "properties": {
+          "livy.property1": "value1"
+        }
+      }
+    }
+    services = {"configurations": configurations}
+    services['services'] = [
+      {
+        "StackServices": {
+          "service_name": "SPARK2"
+        },
+      }
+    ]
+    clusterData = {
+      "cpu": 4,
+      "containers": 5,
+      "ramPerContainer": 256,
+      "yarnMinContainerSize": 256
+    }
+    expected = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "true"
+        }
+      },
+      "livy2-conf": {
+        "properties": {
+          "livy.property1": "value1"
+        }
+      },
+      "spark2-defaults": {
+        "properties": {
+          "spark.yarn.queue": "default"
+        }
+      },
+      "spark2-thrift-sparkconf": {
+        "properties": {
+          "spark.yarn.queue": "default"
+        }
+      }
+    }
+
+    self.stackAdvisor.recommendSPARK2Configurations(configurations, 
clusterData, services, None)
+    self.assertEquals(configurations, expected)
+
+  def test_recommendZEPPELINConfigurations_SecurityEnabledSPARKInstalled(self):
+    configurations = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "true",
+        }
+      },
+      "livy-conf": {
+        "properties": {
+          "livy.property1": "value1"
+        }
+      },
+      "livy2-conf": {
+        "properties": {
+          "livy.property1": "value1"
+        }
+      },
+      "zeppelin-env": {
+        "properties": {
+          "zeppelin.server.kerberos.principal": "zeppelin_user@REALM"
+        }
+      }
+    }
+    services = {"configurations": configurations}
+    services['services'] = [
+      {
+        "StackServices": {
+          "service_name": "ZEPPELIN"
+        },
+      }
+    ]
+    clusterData = {
+      "cpu": 4,
+      "containers": 5,
+      "ramPerContainer": 256,
+      "yarnMinContainerSize": 256
+    }
+    expected = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "true",
+        }
+      },
+      "livy-conf": {
+        "properties": {
+          "livy.superusers": "zeppelin_user",
+          "livy.property1": "value1"
+        }
+      },
+      "livy2-conf": {
+        "properties": {
+          "livy.superusers": "zeppelin_user",
+          "livy.property1": "value1"
+        }
+      },
+      "zeppelin-env": {
+        "properties": {
+          "zeppelin.server.kerberos.principal": "zeppelin_user@REALM"
+        }
+      }
+    }
+
+    self.stackAdvisor.recommendZEPPELINConfigurations(configurations, 
clusterData, services, None)
+    self.assertEquals(configurations, expected)
+
+  def 
test_recommendZEPPELINConfigurations_SecurityNotEnabledSparkInstalled(self):
+    configurations = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "false",
+        }
+      },
+      "livy-conf": {
+        "properties": {
+        }
+      },
+      "livy2-conf": {
+        "properties": {
+        }
+      },
+      "zeppelin-env": {
+        "properties": {
+        }
+      }
+    }
+    services = {"configurations": configurations}
+    services['services'] = [
+      {
+        "StackServices": {
+          "service_name": "ZEPPELIN"
+        },
+      }
+    ]
+    clusterData = {
+      "cpu": 4,
+      "containers": 5,
+      "ramPerContainer": 256,
+      "yarnMinContainerSize": 256
+    }
+    expected = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "false",
+        }
+      },
+      "livy-conf": {
+        "properties": {
+        }
+      },
+      "livy2-conf": {
+        "properties": {
+        }
+      },
+      "zeppelin-env": {
+        "properties": {
+        }
+      }
+    }
+
+    self.stackAdvisor.recommendZEPPELINConfigurations(configurations, 
clusterData, services, None)
+    self.assertEquals(configurations, expected)
+
+  def 
test_recommendZEPPELINConfigurations_SecurityEnabledZeppelinInstalledExistingValue(self):
+    configurations = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "true",
+        }
+      },
+      "livy-conf": {
+        "properties": {
+          "livy.superusers": "livy_user, hdfs"
+        }
+      },
+      "livy2-conf": {
+        "properties": {
+          "livy.superusers": "livy2_user"
+        }
+      },
+      "zeppelin-env": {
+        "properties": {
+          "zeppelin.server.kerberos.principal": "zeppelin_user@REALM"
+        }
+      }
+    }
+    services = {"configurations": configurations}
+    services['services'] = [
+      {
+        "StackServices": {
+          "service_name": "ZEPPELIN"
+        },
+      }
+    ]
+    clusterData = {
+      "cpu": 4,
+      "containers": 5,
+      "ramPerContainer": 256,
+      "yarnMinContainerSize": 256
+    }
+    expected = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "true",
+        }
+      },
+      "livy-conf": {
+        "properties": {
+          "livy.superusers": "livy_user,hdfs,zeppelin_user"
+        }
+      },
+      "livy2-conf": {
+        "properties": {
+          "livy.superusers": "livy2_user,zeppelin_user"
+        }
+      },
+      "zeppelin-env": {
+        "properties": {
+          "zeppelin.server.kerberos.principal": "zeppelin_user@REALM"
+        }
+      }
+    }
+
+    self.stackAdvisor.recommendZEPPELINConfigurations(configurations, 
clusterData, services, None)
+    self.assertEquals(configurations, expected)
+
+  def 
test_recommendZEPPELINConfigurations_SecurityEnabledSparkNotInstalled(self):
+    configurations = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "true",
+        },
+        "zeppelin-env": {
+          "properties": {
+            "zeppelin.server.kerberos.principal": "zeppelin_user@REALM"
+          }
+        }
+      }
+    }
+    services = {"configurations": configurations}
+    services['services'] = [
+      {
+        "StackServices": {
+          "service_name": "ZEPPELIN"
+        },
+      }
+    ]
+    clusterData = {
+      "cpu": 4,
+      "containers": 5,
+      "ramPerContainer": 256,
+      "yarnMinContainerSize": 256
+    }
+    expected = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "true",
+        },
+        "zeppelin-env": {
+          "properties": {
+            "zeppelin.server.kerberos.principal": "zeppelin_user@REALM"
+          }
+        }
+      }
+    }
+
+    self.stackAdvisor.recommendZEPPELINConfigurations(configurations, 
clusterData, services, None)
+    self.assertEquals(configurations, expected)
+
   def test_recommendDruidConfigurations_WithPostgresql(self):
     hosts = {
       "items": [

Reply via email to