Repository: ambari
Updated Branches:
  refs/heads/branch-2.4 c0e8e43f0 -> 24aca1254


AMBARI-16171. Changes to Phoenix QueryServer Kerberos configuration (Josh Elser 
via srimanth)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/24aca125
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/24aca125
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/24aca125

Branch: refs/heads/branch-2.4
Commit: 24aca1254092977737ce3f625eb915763c7e17da
Parents: c0e8e43
Author: Srimanth Gunturi <sgunt...@hortonworks.com>
Authored: Fri May 27 15:54:13 2016 -0700
Committer: Srimanth Gunturi <sgunt...@hortonworks.com>
Committed: Fri May 27 15:58:18 2016 -0700

----------------------------------------------------------------------
 .../server/upgrade/AbstractUpgradeCatalog.java  |  43 ++
 .../server/upgrade/UpgradeCatalog240.java       | 155 +++++
 .../HBASE/0.96.0.2.0/kerberos.json              |  21 +-
 .../stacks/HDP/2.0.6/services/stack_advisor.py  |  27 +
 .../stacks/HDP/2.5/services/stack_advisor.py    |  31 +-
 .../server/upgrade/UpgradeCatalog240Test.java   |  92 +++
 .../stacks/2.5/common/test_stack_advisor.py     | 584 ++++++++++++++++++-
 7 files changed, 937 insertions(+), 16 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/24aca125/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
index 2e857ed..3ee8bba 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
@@ -29,12 +29,14 @@ import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
+import java.util.TreeMap;
 
 import javax.persistence.EntityManager;
 import javax.xml.parsers.DocumentBuilder;
 import javax.xml.parsers.DocumentBuilderFactory;
 
 import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.configuration.Configuration.DatabaseType;
 import org.apache.ambari.server.controller.AmbariManagementController;
@@ -49,7 +51,10 @@ import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.ConfigHelper;
 import org.apache.ambari.server.state.PropertyInfo;
 import org.apache.ambari.server.state.ServiceInfo;
+import org.apache.ambari.server.state.StackId;
 import 
org.apache.ambari.server.state.kerberos.AbstractKerberosDescriptorContainer;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptorFactory;
 import org.apache.ambari.server.state.kerberos.KerberosIdentityDescriptor;
 import org.apache.ambari.server.state.kerberos.KerberosServiceDescriptor;
 import org.apache.ambari.server.utils.VersionUtils;
@@ -625,7 +630,45 @@ public abstract class AbstractUpgradeCatalog implements 
UpgradeCatalog {
     }
   }
 
+  /**
+   * Retrieve the composite Kerberos Descriptor.
+   * <p>
+   * The composite Kerberos Descriptor is the cluster's stack-specific 
Kerberos Descriptor overlaid
+   * with changes specified by the user via the cluster's Kerberos Descriptor 
artifact.
+   *
+   * @param cluster the relevant cluster
+   * @return the composite Kerberos Descriptor
+   * @throws AmbariException
+   */
+  protected KerberosDescriptor getKerberosDescriptor(Cluster cluster) throws 
AmbariException {
+    // Get the Stack-defined Kerberos Descriptor (aka default Kerberos 
Descriptor)
+    AmbariMetaInfo ambariMetaInfo = injector.getInstance(AmbariMetaInfo.class);
+    StackId stackId = cluster.getCurrentStackVersion();
+    KerberosDescriptor defaultDescriptor = 
ambariMetaInfo.getKerberosDescriptor(stackId.getStackName(), 
stackId.getStackVersion());
 
+    // Get the User-set Kerberos Descriptor
+    ArtifactDAO artifactDAO = injector.getInstance(ArtifactDAO.class);
+    KerberosDescriptor artifactDescriptor = null;
+    ArtifactEntity artifactEntity = 
artifactDAO.findByNameAndForeignKeys("kerberos_descriptor",
+        new TreeMap<String, String>(Collections.singletonMap("cluster", 
String.valueOf(cluster.getClusterId()))));
+    if (artifactEntity != null) {
+      Map<String, Object> data = artifactEntity.getArtifactData();
+
+      if (data != null) {
+        artifactDescriptor = new 
KerberosDescriptorFactory().createInstance(data);
+      }
+    }
+
+    // Calculate and return the composite Kerberos Descriptor
+    if (defaultDescriptor == null) {
+      return artifactDescriptor;
+    } else if (artifactDescriptor == null) {
+      return defaultDescriptor;
+    } else {
+      defaultDescriptor.update(artifactDescriptor);
+      return defaultDescriptor;
+    }
+  }
 
   /**
    * Update the specified Kerberos Descriptor artifact to conform to the new 
structure.

http://git-wip-us.apache.org/repos/asf/ambari/blob/24aca125/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
index db6d73a9..77d4444 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
@@ -43,6 +43,7 @@ import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.orm.DBAccessor.DBColumnInfo;
 import org.apache.ambari.server.orm.dao.AlertDefinitionDAO;
+import org.apache.ambari.server.orm.dao.ArtifactDAO;
 import org.apache.ambari.server.orm.dao.ClusterDAO;
 import org.apache.ambari.server.orm.dao.PermissionDAO;
 import org.apache.ambari.server.orm.dao.PrincipalDAO;
@@ -55,6 +56,7 @@ import org.apache.ambari.server.orm.dao.UserDAO;
 import org.apache.ambari.server.orm.dao.ViewInstanceDAO;
 import org.apache.ambari.server.orm.dao.WidgetDAO;
 import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
+import org.apache.ambari.server.orm.entities.ArtifactEntity;
 import org.apache.ambari.server.orm.entities.ClusterEntity;
 import org.apache.ambari.server.orm.entities.PermissionEntity;
 import org.apache.ambari.server.orm.entities.PrincipalEntity;
@@ -75,10 +77,18 @@ import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.ConfigHelper;
 import org.apache.ambari.server.state.RepositoryType;
+import org.apache.ambari.server.state.SecurityType;
 import org.apache.ambari.server.state.ServiceInfo;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.StackInfo;
 import org.apache.ambari.server.state.State;
+import org.apache.ambari.server.state.kerberos.KerberosComponentDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptorFactory;
+import org.apache.ambari.server.state.kerberos.KerberosIdentityDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosKeytabDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosPrincipalDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosServiceDescriptor;
 import org.apache.ambari.server.state.stack.WidgetLayout;
 import org.apache.ambari.server.state.stack.WidgetLayoutInfo;
 import org.apache.ambari.server.view.DefaultMasker;
@@ -146,6 +156,9 @@ public class UpgradeCatalog240 extends 
AbstractUpgradeCatalog {
   public static final String CLUSTER_HANDLE_COLUMN = "cluster_handle";
   protected static final String CLUSTER_VERSION_TABLE = "cluster_version";
   protected static final String HOST_VERSION_TABLE = "host_version";
+  protected static final String PHOENIX_QUERY_SERVER_PRINCIPAL_KEY = 
"phoenix.queryserver.kerberos.principal";
+  protected static final String PHOENIX_QUERY_SERVER_KEYTAB_KEY = 
"phoenix.queryserver.keytab.file";
+
 
   private static final String OOZIE_ENV_CONFIG = "oozie-env";
   private static final String HIVE_ENV_CONFIG = "hive-env";
@@ -156,6 +169,7 @@ public class UpgradeCatalog240 extends 
AbstractUpgradeCatalog {
   public static final String URL_ID_COLUMN = "url_id";
   private static final String PRINCIPAL_TYPE_TABLE = "adminprincipaltype";
   private static final String PRINCIPAL_TABLE = "adminprincipal";
+  protected static final String HBASE_SITE_CONFIG = "hbase-site";
 
   private static final Map<String, Integer> ROLE_ORDER;
 
@@ -338,6 +352,8 @@ public class UpgradeCatalog240 extends 
AbstractUpgradeCatalog {
     updateHostRoleCommandTableDML();
     updateKerberosConfigs();
     updateYarnEnv();
+    updatePhoenixConfigs();
+    updateKerberosDescriptorArtifacts();
     removeHiveOozieDBConnectionConfigs();
     updateClustersAndHostsVersionStateTableDML();
     removeStandardDeviationAlerts();
@@ -2022,6 +2038,69 @@ public class UpgradeCatalog240 extends 
AbstractUpgradeCatalog {
   }
 
   /**
+   * {@inheritDoc}
+   */
+  @Override
+  protected void updateKerberosDescriptorArtifact(ArtifactDAO artifactDAO, 
ArtifactEntity artifactEntity) throws AmbariException {
+    if (artifactEntity != null) {
+      Map<String, Object> data = artifactEntity.getArtifactData();
+
+      if (data != null) {
+        final KerberosDescriptor kerberosDescriptor = new 
KerberosDescriptorFactory().createInstance(data);
+
+        if (kerberosDescriptor != null) {
+          // Get the service that needs to be updated
+          KerberosServiceDescriptor serviceDescriptor = 
kerberosDescriptor.getService("HBASE");
+
+          if(serviceDescriptor != null) {
+            KerberosComponentDescriptor componentDescriptor = 
serviceDescriptor.getComponent("PHOENIX_QUERY_SERVER");
+
+            if (componentDescriptor != null) {
+              // Get the identity that needs to be updated
+              KerberosIdentityDescriptor origIdentityDescriptor = 
componentDescriptor.getIdentity("hbase_queryserver_hbase");
+
+              if (origIdentityDescriptor != null) {
+
+                // Create the new principal descriptor
+                KerberosPrincipalDescriptor origPrincipalDescriptor = 
origIdentityDescriptor.getPrincipalDescriptor();
+                KerberosPrincipalDescriptor newPrincipalDescriptor = new 
KerberosPrincipalDescriptor(
+                    null,
+                    null,
+                    (origPrincipalDescriptor == null)
+                        ? "hbase-site/phoenix.queryserver.kerberos.principal"
+                        : origPrincipalDescriptor.getConfiguration(),
+                    null);
+
+                // Create the new keytab descriptor
+                KerberosKeytabDescriptor origKeytabDescriptor = 
origIdentityDescriptor.getKeytabDescriptor();
+                KerberosKeytabDescriptor newKeytabDescriptor = new 
KerberosKeytabDescriptor(
+                    null,
+                    null,
+                    null,
+                    null,
+                    null,
+                    (origKeytabDescriptor == null)
+                        ? "hbase-site/phoenix.queryserver.keytab.file"
+                        : origKeytabDescriptor.getConfiguration(),
+                    false);
+
+                // Remove the old identity
+                componentDescriptor.removeIdentity("hbase_queryserver_hbase");
+
+                // Add the new identity
+                componentDescriptor.putIdentity(new 
KerberosIdentityDescriptor("/spnego", newPrincipalDescriptor, 
newKeytabDescriptor));
+
+                artifactEntity.setArtifactData(kerberosDescriptor.toMap());
+                artifactDAO.merge(artifactEntity);
+              }
+            }
+          }
+        }
+      }
+    }
+  }
+
+  /**
    * Given a {@link ResourceEntity}, attempts to find the relevant cluster's 
name.
    *
    * @param resourceEntity a {@link ResourceEntity}
@@ -2135,6 +2214,80 @@ public class UpgradeCatalog240 extends 
AbstractUpgradeCatalog {
   }
 
   /**
+   * @return True if the stack is >=HDP-2.5, false otherwise.
+   */
+  protected boolean isAtLeastHdp25(StackId stackId) {
+    if (null == stackId) {
+      return false;
+    }
+
+    try {
+      return stackId.compareTo(new StackId("HDP-2.5")) >= 0;
+    } catch (Exception e) {
+      // Different stack names throw an exception.
+      return false;
+    }
+  }
+
+  /**
+   * Update Phoenix Query Server Kerberos configurations. Ambari 2.4 will 
alter the Phoenix Query Server to
+   * supporting SPNEGO authentication which requires that the "HTTP/_HOST" 
principal and corresponding
+   * keytab file instead of the generic HBase service keytab and principal it 
previously had.
+   */
+  protected void updatePhoenixConfigs() throws AmbariException {
+    final AmbariManagementController controller = 
injector.getInstance(AmbariManagementController.class);
+    final Clusters clusters = controller.getClusters();
+
+    if (null != clusters) {
+      Map<String, Cluster> clusterMap = clusters.getClusters();
+
+      if (null != clusterMap && !clusterMap.isEmpty()) {
+        for (final Cluster cluster : clusterMap.values()) {
+          Set<String> installedServices = cluster.getServices().keySet();
+          StackId stackId = cluster.getCurrentStackVersion();
+
+          // HBase is installed and Kerberos is enabled
+          if (installedServices.contains("HBASE") && SecurityType.KERBEROS == 
cluster.getSecurityType() && isAtLeastHdp25(stackId)) {
+            Config hbaseSite = 
cluster.getDesiredConfigByType(HBASE_SITE_CONFIG);
+            if (null != hbaseSite) {
+              Map<String, String> hbaseSiteProperties = 
hbaseSite.getProperties();
+              // Get Phoenix Query Server kerberos config properties
+              String pqsKrbPrincipal = 
hbaseSiteProperties.get(PHOENIX_QUERY_SERVER_PRINCIPAL_KEY);
+              String pqsKrbKeytab = 
hbaseSiteProperties.get(PHOENIX_QUERY_SERVER_KEYTAB_KEY);
+
+              // Principal and Keytab are set
+              if (null != pqsKrbPrincipal && null != pqsKrbKeytab) {
+                final Map<String, String> updatedKerberosProperties = new 
HashMap<>();
+                final KerberosDescriptor defaultDescriptor = 
getKerberosDescriptor(cluster);
+
+                KerberosIdentityDescriptor spnegoDescriptor = 
defaultDescriptor.getIdentity("spnego");
+                if (null != spnegoDescriptor) {
+                  // Add the SPNEGO config for the principal
+                  KerberosPrincipalDescriptor principalDescriptor = 
spnegoDescriptor.getPrincipalDescriptor();
+                  if (null != principalDescriptor) {
+                    
updatedKerberosProperties.put(PHOENIX_QUERY_SERVER_PRINCIPAL_KEY, 
principalDescriptor.getValue());
+                  }
+
+                  // Add the SPNEGO config for the keytab
+                  KerberosKeytabDescriptor keytabDescriptor = 
spnegoDescriptor.getKeytabDescriptor();
+                  if (null != keytabDescriptor) {
+                    
updatedKerberosProperties.put(PHOENIX_QUERY_SERVER_KEYTAB_KEY, 
keytabDescriptor.getFile());
+                  }
+
+                  // Update the configuration if we changed anything
+                  if (!updatedKerberosProperties.isEmpty()) {
+                    updateConfigurationProperties(HBASE_SITE_CONFIG, 
updatedKerberosProperties, true, false);
+                  }
+                }
+              }
+            }
+          }
+        }
+      }
+    }
+  }
+
+  /**
    *  Update properties with name
    *  yarn.timeline-server.url to yarn.ats.url
    */
@@ -2221,4 +2374,6 @@ public class UpgradeCatalog240 extends 
AbstractUpgradeCatalog {
       }
     }
   }
+
+
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/24aca125/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/kerberos.json
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/kerberos.json
 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/kerberos.json
index c9536f8..f887f92 100644
--- 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/kerberos.json
+++ 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/kerberos.json
@@ -42,6 +42,11 @@
             "hbase.coprocessor.regionserver.classes": 
"{{hbase_coprocessor_regionserver_classes}}",
             "hbase.bulkload.staging.dir": "/apps/hbase/staging"
           }
+        },
+        {
+          "core-site": {
+            "hadoop.proxyuser.HTTP.hosts": 
"${clusterHostInfo/phoenix_query_server_hosts}"
+          }
         }
       ],
       "components": [
@@ -104,23 +109,11 @@
           "name": "PHOENIX_QUERY_SERVER",
           "identities": [
             {
-              "name": "hbase_queryserver_hbase",
+              "name": "/spnego",
               "principal": {
-                "value": "hbase/_HOST@${realm}",
-                "type" : "service",
-                "configuration": 
"hbase-site/phoenix.queryserver.kerberos.principal",
-                "local_username": "${hbase-env/hbase_user}"
+                "configuration": 
"hbase-site/phoenix.queryserver.kerberos.principal"
               },
               "keytab": {
-                "file": "${keytab_dir}/hbase.service.keytab",
-                "owner": {
-                  "name": "${hbase-env/hbase_user}",
-                  "access": "r"
-                },
-                "group": {
-                  "name": "${cluster-env/user_group}",
-                  "access": ""
-                },
                 "configuration": "hbase-site/phoenix.queryserver.keytab.file"
               }
             }

http://git-wip-us.apache.org/repos/asf/ambari/blob/24aca125/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
index 6e506a0..d2e0d11 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
@@ -115,6 +115,33 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
         config[configType]["properties"][key] = str(value)
     return appendProperty
 
+  def updateProperty(self, config, configType, services=None):
+    userConfigs = {}
+    changedConfigs = []
+    # if services parameter, prefer values, set by user
+    if services:
+      if 'configurations' in services.keys():
+        userConfigs = services['configurations']
+      if 'changed-configurations' in services.keys():
+        changedConfigs = services["changed-configurations"]
+
+    if configType not in config:
+      config[configType] = {}
+    if"properties" not in config[configType]:
+      config[configType]["properties"] = {}
+    def updatePropertyWithCallback(key, value, callback):
+      # If property exists in changedConfigs, do not override, use user 
defined property
+      if self.__isPropertyInChangedConfigs(configType, key, changedConfigs):
+        config[configType]["properties"][key] = 
userConfigs[configType]['properties'][key]
+      else:
+        # Give the callback an empty string if the mapping doesn't exist
+        current_value = ""
+        if key in config[configType]["properties"]:
+          current_value = config[configType]["properties"][key]
+        
+        config[configType]["properties"][key] = callback(current_value, value)
+    return updatePropertyWithCallback
+
   def __isPropertyInChangedConfigs(self, configType, propertyName, 
changedConfigs):
     for changedConfig in changedConfigs:
       if changedConfig['type']==configType and 
changedConfig['name']==propertyName:

http://git-wip-us.apache.org/repos/asf/ambari/blob/24aca125/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py 
b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
index 47f27ec..4d1dd16 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
@@ -195,13 +195,43 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
   def recommendHBASEConfigurations(self, configurations, clusterData, 
services, hosts):
     super(HDP25StackAdvisor, 
self).recommendHBASEConfigurations(configurations, clusterData, services, hosts)
     putHbaseSiteProperty = self.putProperty(configurations, "hbase-site", 
services)
+    appendCoreSiteProperty = self.updateProperty(configurations, "core-site", 
services)
     servicesList = [service["StackServices"]["service_name"] for service in 
services["services"]]
 
     if 'KERBEROS' in servicesList:
       putHbaseSiteProperty('hbase.master.ui.readonly', 'true')
+
+      phoenix_query_server_hosts = 
self.get_phoenix_query_server_hosts(services, hosts)
+      if phoenix_query_server_hosts:
+        # The PQS hosts we want to ensure are set
+        new_value = ','.join(phoenix_query_server_hosts)
+        # Compute the unique set of hosts for the property
+        def updateCallback(originalValue, newValue):
+          # Only update the original value if it's not whitespace only
+          if originalValue and not originalValue.isspace():
+            hosts = originalValue.split(',')
+            # Add in the new hosts if we have some
+            if newValue and not newValue.isspace():
+              hosts.extend(newValue.split(','))
+            # Return the combined (unique) list of hosts
+            return ','.join(set(hosts))
+          else:
+            return newValue
+        # Update the proxyuser setting, deferring to out callback to merge 
results together
+        appendCoreSiteProperty('hadoop.proxyuser.HTTP.hosts', new_value, 
updateCallback)
     else:
       putHbaseSiteProperty('hbase.master.ui.readonly', 'false')
 
+  """
+  Returns the list of Phoenix Query Server host names, or None.
+  """
+  def get_phoenix_query_server_hosts(self, services, hosts):
+    if len(hosts['items']) > 0:
+      phoenix_query_server_hosts = self.getHostsWithComponent("HBASE", 
"PHOENIX_QUERY_SERVER", services, hosts)
+      assert (phoenix_query_server_hosts is not None), "Information about 
PHOENIX_QUERY_SERVER not found in cluster."
+      host_names = []
+      return [host['Hosts']['host_name'] for host in 
phoenix_query_server_hosts]
+
   def recommendHIVEConfigurations(self, configurations, clusterData, services, 
hosts):
     super(HDP25StackAdvisor, self).recommendHIVEConfigurations(configurations, 
clusterData, services, hosts)
     putHiveInteractiveEnvProperty = self.putProperty(configurations, 
"hive-interactive-env", services)
@@ -551,7 +581,6 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
       assert (node_manager_hosts is not None), "Information about NODEMANAGER 
not found in cluster."
       return node_manager_hosts
 
-
   """
   Returns the current LLAP queue capacity percentage value. 
(llap_queue_capacity)
   """

http://git-wip-us.apache.org/repos/asf/ambari/blob/24aca125/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
 
b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
index 2490851..56e8a8a 100644
--- 
a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
+++ 
b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
@@ -25,6 +25,7 @@ import junit.framework.Assert;
 
 import static org.easymock.EasyMock.*;
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.assertNull;
 
@@ -84,10 +85,15 @@ import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.ConfigHelper;
+import org.apache.ambari.server.state.SecurityType;
 import org.apache.ambari.server.state.Service;
 import org.apache.ambari.server.state.ServiceInfo;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.StackInfo;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosIdentityDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosKeytabDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosPrincipalDescriptor;
 import org.apache.ambari.server.state.stack.OsFamily;
 import org.apache.ambari.server.view.DefaultMasker;
 import org.apache.ambari.view.ClusterType;
@@ -482,6 +488,8 @@ public class UpgradeCatalog240Test {
     Method createRolePrincipals = 
UpgradeCatalog240.class.getDeclaredMethod("createRolePrincipals");
     Method updateHDFSWidget = 
UpgradeCatalog240.class.getDeclaredMethod("updateHDFSWidgetDefinition");
     Method upgradeCapSchedulerView = 
UpgradeCatalog240.class.getDeclaredMethod("upgradeCapSchedulerView");
+    Method updatePhoenixConfigs = 
UpgradeCatalog240.class.getDeclaredMethod("updatePhoenixConfigs");
+    Method updateKerberosDescriptorArtifacts = 
AbstractUpgradeCatalog.class.getDeclaredMethod("updateKerberosDescriptorArtifacts");
 
     Capture<String> capturedStatements = newCapture(CaptureType.ALL);
 
@@ -513,6 +521,8 @@ public class UpgradeCatalog240Test {
             .addMockedMethod(createRolePrincipals)
             .addMockedMethod(updateHDFSWidget)
             .addMockedMethod(upgradeCapSchedulerView)
+            .addMockedMethod(updatePhoenixConfigs)
+            .addMockedMethod(updateKerberosDescriptorArtifacts)
             .createMock();
 
     Field field = AbstractUpgradeCatalog.class.getDeclaredField("dbAccessor");
@@ -539,6 +549,8 @@ public class UpgradeCatalog240Test {
     upgradeCatalog240.updateClusterInheritedPermissionsConfig();
     upgradeCatalog240.updateHDFSWidgetDefinition();
     upgradeCatalog240.upgradeCapSchedulerView();
+    upgradeCatalog240.updatePhoenixConfigs();
+    upgradeCatalog240.updateKerberosDescriptorArtifacts();
 
     replay(upgradeCatalog240, dbAccessor);
 
@@ -1624,5 +1636,85 @@ public class UpgradeCatalog240Test {
 
     verify(clusterDAO, instanceDAO, instance1);
   }
+
+  @Test
+  public void testPhoenixQueryServerKerberosUpdateConfigs() throws Exception{
+    // Tests that we switch from the HBase service principal and keytab to the 
SPNEGO service principal and keytab.
+    final String spnegoPrincipal = "HTTP/_h...@example.com";
+    final String spnegoKeytab = "/etc/security/keytabs/spnego.service.keytab";
+    final Map<String, String> oldPqsProperties = new HashMap<>();
+    oldPqsProperties.put("phoenix.queryserver.kerberos.principal", 
"hbase/_h...@example.com");
+    oldPqsProperties.put("phoenix.queryserver.keytab.file", 
"/etc/security/keytabs/hbase.service.keytab");
+    final Map<String, String> newPqsProperties = new HashMap<String, String>();
+    newPqsProperties.put("phoenix.queryserver.kerberos.principal", 
spnegoPrincipal);
+    newPqsProperties.put("phoenix.queryserver.keytab.file", spnegoKeytab);
+
+    final EasyMockSupport easyMockSupport = new EasyMockSupport();
+
+    // Set up all of the injected mocks to trigger the upgrade scenario
+    AmbariManagementController controller = 
easyMockSupport.createNiceMock(AmbariManagementController.class);
+    KerberosDescriptor kerberosDescriptor = 
easyMockSupport.createNiceMock(KerberosDescriptor.class);
+    KerberosIdentityDescriptor kerberosIdentityDescriptor = 
easyMockSupport.createNiceMock(KerberosIdentityDescriptor.class);
+    KerberosPrincipalDescriptor principalDescriptor = 
easyMockSupport.createNiceMock(KerberosPrincipalDescriptor.class);
+    KerberosKeytabDescriptor keytabDescriptor = 
easyMockSupport.createNiceMock(KerberosKeytabDescriptor.class);
+    Clusters clusters = easyMockSupport.createNiceMock(Clusters.class);
+    final Cluster cluster = easyMockSupport.createNiceMock(Cluster.class);
+    Config mockHbaseSite = easyMockSupport.createNiceMock(Config.class);
+    // HBase and Kerberos are both "installed"
+    final Map<String, Service> mockServices = new HashMap<>();
+    mockServices.put("HBASE", null);
+    final StackId stackId = new StackId("HDP-2.5");
+
+    expect(controller.getClusters()).andReturn(clusters).once();
+    
expect(clusters.getClusters()).andReturn(Collections.singletonMap("normal", 
cluster)).once();
+    expect(cluster.getCurrentStackVersion()).andReturn(stackId);
+    expect(cluster.getServices()).andReturn(mockServices).once();
+    
expect(cluster.getSecurityType()).andReturn(SecurityType.KERBEROS).anyTimes();
+    
expect(cluster.getDesiredConfigByType(UpgradeCatalog240.HBASE_SITE_CONFIG)).andReturn(mockHbaseSite).atLeastOnce();
+    
expect(mockHbaseSite.getProperties()).andReturn(oldPqsProperties).anyTimes();
+
+    // Stub out the KerberosDescriptor down to the Principal and Keytab 
Descriptors
+    
expect(kerberosDescriptor.getIdentity("spnego")).andReturn(kerberosIdentityDescriptor).once();
+    
expect(kerberosIdentityDescriptor.getPrincipalDescriptor()).andReturn(principalDescriptor).anyTimes();
+    
expect(kerberosIdentityDescriptor.getKeytabDescriptor()).andReturn(keytabDescriptor).anyTimes();
+    
expect(principalDescriptor.getValue()).andReturn(spnegoPrincipal).anyTimes();
+    expect(keytabDescriptor.getFile()).andReturn(spnegoKeytab).anyTimes();
+
+    Injector injector = easyMockSupport.createNiceMock(Injector.class);
+    
expect(injector.getInstance(AmbariManagementController.class)).andReturn(controller).anyTimes();
+
+    easyMockSupport.replayAll();
+
+    UpgradeCatalog240 upgradeCatalog240 = 
createMockBuilder(UpgradeCatalog240.class)
+        .withConstructor(Injector.class)
+        .withArgs(injector)
+        .addMockedMethod("updateConfigurationProperties", String.class, 
Map.class, boolean.class, boolean.class)
+        .addMockedMethod("getKerberosDescriptor", Cluster.class)
+        .createMock();
+
+    
expect(upgradeCatalog240.getKerberosDescriptor(cluster)).andReturn(kerberosDescriptor).once();
+
+    
upgradeCatalog240.updateConfigurationProperties(UpgradeCatalog240.HBASE_SITE_CONFIG,
 newPqsProperties, true, false);
+    expectLastCall().once();
+
+    replay(upgradeCatalog240);
+
+    // Expected that we see the configuration updates fire
+    upgradeCatalog240.updatePhoenixConfigs();
+    easyMockSupport.verifyAll();
+  }
+
+  @Test
+  public void testStackIdVersion() {
+    final EasyMockSupport easyMockSupport = new EasyMockSupport();
+    Injector injector = easyMockSupport.createNiceMock(Injector.class);
+    UpgradeCatalog240 upgradeCatalog240 = new UpgradeCatalog240(injector);
+
+    assertFalse(upgradeCatalog240.isAtLeastHdp25(new StackId("HDP-2.3")));
+    assertFalse(upgradeCatalog240.isAtLeastHdp25(new StackId("HDP-2.4")));
+    assertTrue(upgradeCatalog240.isAtLeastHdp25(new StackId("HDP-2.5")));
+    assertTrue(upgradeCatalog240.isAtLeastHdp25(new StackId("HDP-2.6")));
+    assertFalse(upgradeCatalog240.isAtLeastHdp25(new 
StackId("SOMETHINGELSE-1.4")));
+  }
 }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/24aca125/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py 
b/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
index 1e4ddec..ca4cb82 100644
--- a/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
@@ -5224,7 +5224,589 @@ def test_recommendAtlasConfigurations(self):
     services['ambari-server-properties'] = {'java.home': 
'/usr/jdk64/jdk1.7.3_23'}
     self.stackAdvisor.recommendAtlasConfigurations(configurations, 
clusterData, services, hosts)
     self.assertEquals(configurations, expected)
-    
+
+  def test_phoenixQueryServerSecureConfigsAppendProxyuser(self):
+    self.maxDiff = None
+    phoenix_query_server_hosts = ["c6401.ambari.apache.org", 
"c6402.ambari.apache.org"]
+    # Starting configuration
+    configurations = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "true"
+        }
+      },
+      "core-site": {
+        "properties": {
+          "hadoop.proxyuser.HTTP.hosts": "c6401.ambari.apache.org",
+        }
+      },
+      "hbase-site": {
+        "properties": {}
+      }
+    }
+    # Expected configuration after the recommendation
+    expected_configuration = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "true"
+        }
+      },
+      "core-site": {
+        "properties": {
+          "hadoop.proxyuser.HTTP.hosts": 
"c6401.ambari.apache.org,c6402.ambari.apache.org",
+        }
+      },
+      "hbase-site": {
+        "properties": {
+          "hbase.master.ui.readonly": "true"
+        }
+      }
+    }
+
+    clusterData = {
+      "hbaseRam": 4096,
+    }
+    services = {
+      "services": [
+        {
+          "href": "/api/v1/stacks/HDP/versions/2.4/services/HBASE",
+          "StackServices": {
+            "service_name": "HBASE",
+          },
+          "Versions": {
+            "stack_version": "2.5"
+          },
+          "components": [
+            {
+              "StackServiceComponents": {
+                "component_name": "PHOENIX_QUERY_SERVER",
+                "hostnames": phoenix_query_server_hosts
+              }
+            }
+          ]
+        },
+      ],
+      "configurations": configurations,
+      "changed-configurations": [ ]
+
+    }
+    hosts = {
+      "items" : [
+        {
+          "href" : "/api/v1/hosts/c6401.ambari.apache.org",
+          "Hosts" : {
+            "cpu_count" : 1,
+            "host_name" : "c6401.ambari.apache.org",
+            "os_arch" : "x86_64",
+            "os_type" : "centos6",
+            "ph_cpu_count" : 1,
+            "public_host_name" : "c6401.ambari.apache.org",
+            "rack_info" : "/default-rack",
+            "total_mem" : 1922680
+          }
+        }, {
+          "href" : "/api/v1/hosts/c6402.ambari.apache.org",
+          "Hosts" : {
+            "cpu_count" : 1,
+            "host_name" : "c6402.ambari.apache.org",
+            "os_arch" : "x86_64",
+            "os_type" : "centos6",
+            "ph_cpu_count" : 1,
+            "public_host_name" : "c6402.ambari.apache.org",
+            "rack_info" : "/default-rack",
+            "total_mem" : 1922680
+          }
+        }, {
+          "href" : "/api/v1/hosts/c6403.ambari.apache.org",
+          "Hosts" : {
+            "cpu_count" : 1,
+            "host_name" : "c6403.ambari.apache.org",
+            "os_arch" : "x86_64",
+            "os_type" : "centos6",
+            "ph_cpu_count" : 1,
+            "public_host_name" : "c6403.ambari.apache.org",
+            "rack_info" : "/default-rack",
+            "total_mem" : 1922680
+          }
+        }
+      ]
+    }
+
+    self.stackAdvisor.recommendHBASEConfigurations(configurations, 
clusterData, services, hosts)
+
+    self.assertTrue('core-site' in configurations)
+    self.assertTrue('properties' in configurations['core-site'])
+    # Avoid an unnecessary sort in the stack advisor, sort here for easy 
comparison
+    actualHosts = 
configurations['core-site']['properties']['hadoop.proxyuser.HTTP.hosts']
+    expectedHosts = 
configurations['core-site']['properties']['hadoop.proxyuser.HTTP.hosts']
+    self.assertEquals(splitAndSort(actualHosts), splitAndSort(expectedHosts))
+    # Do a simple check for hbase-site
+    self.assertTrue('hbase-site' in configurations)
+    self.assertTrue('properties' in configurations['hbase-site'])
+    
self.assertEquals(configurations['hbase-site']['properties']['hbase.master.ui.readonly'],
+        
expected_configuration['hbase-site']['properties']['hbase.master.ui.readonly'])
+
+  def test_phoenixQueryServerSecureConfigsNoProxyuser(self):
+    self.maxDiff = None
+    phoenix_query_server_hosts = ["c6401.ambari.apache.org"]
+    # Starting configuration
+    configurations = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "true"
+        }
+      },
+      "core-site": {
+        "properties": {}
+      },
+      "hbase-site": {
+        "properties": {}
+      }
+    }
+    # Expected configuration after the recommendation
+    expected_configuration = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "true"
+        }
+      },
+      "core-site": {
+        "properties": {
+          "hadoop.proxyuser.HTTP.hosts": "c6401.ambari.apache.org",
+        }
+      },
+      "hbase-site": {
+        "properties": {
+          "hbase.master.ui.readonly": "true"
+        }
+      }
+    }
+
+    clusterData = {
+      "hbaseRam": 4096,
+    }
+    services = {
+      "services": [
+        {
+          "href": "/api/v1/stacks/HDP/versions/2.4/services/HBASE",
+          "StackServices": {
+            "service_name": "HBASE",
+          },
+          "Versions": {
+            "stack_version": "2.5"
+          },
+          "components": [
+            {
+              "StackServiceComponents": {
+                "component_name": "PHOENIX_QUERY_SERVER",
+                "hostnames": phoenix_query_server_hosts
+              }
+            }
+          ]
+        },
+      ],
+      "configurations": configurations,
+      "changed-configurations": [ ]
+
+    }
+    hosts = {
+      "items" : [
+        {
+          "href" : "/api/v1/hosts/c6401.ambari.apache.org",
+          "Hosts" : {
+            "cpu_count" : 1,
+            "host_name" : "c6401.ambari.apache.org",
+            "os_arch" : "x86_64",
+            "os_type" : "centos6",
+            "ph_cpu_count" : 1,
+            "public_host_name" : "c6401.ambari.apache.org",
+            "rack_info" : "/default-rack",
+            "total_mem" : 1922680
+          }
+        }, {
+          "href" : "/api/v1/hosts/c6402.ambari.apache.org",
+          "Hosts" : {
+            "cpu_count" : 1,
+            "host_name" : "c6402.ambari.apache.org",
+            "os_arch" : "x86_64",
+            "os_type" : "centos6",
+            "ph_cpu_count" : 1,
+            "public_host_name" : "c6402.ambari.apache.org",
+            "rack_info" : "/default-rack",
+            "total_mem" : 1922680
+          }
+        }, {
+          "href" : "/api/v1/hosts/c6403.ambari.apache.org",
+          "Hosts" : {
+            "cpu_count" : 1,
+            "host_name" : "c6403.ambari.apache.org",
+            "os_arch" : "x86_64",
+            "os_type" : "centos6",
+            "ph_cpu_count" : 1,
+            "public_host_name" : "c6403.ambari.apache.org",
+            "rack_info" : "/default-rack",
+            "total_mem" : 1922680
+          }
+        }
+      ]
+    }
+
+    self.stackAdvisor.recommendHBASEConfigurations(configurations, 
clusterData, services, hosts)
+
+    self.assertTrue('core-site' in configurations)
+    self.assertTrue('properties' in configurations['core-site'])
+    # Avoid an unnecessary sort in the stack advisor, sort here for easy 
comparison
+    actualHosts = 
configurations['core-site']['properties']['hadoop.proxyuser.HTTP.hosts']
+    expectedHosts = 
configurations['core-site']['properties']['hadoop.proxyuser.HTTP.hosts']
+    self.assertEquals(splitAndSort(actualHosts), splitAndSort(expectedHosts))
+    # Do a simple check for hbase-site
+    self.assertTrue('hbase-site' in configurations)
+    self.assertTrue('properties' in configurations['hbase-site'])
+    
self.assertEquals(configurations['hbase-site']['properties']['hbase.master.ui.readonly'],
+        
expected_configuration['hbase-site']['properties']['hbase.master.ui.readonly'])
+
+  def test_phoenixQueryServerSecureConfigsAppendProxyuser(self):
+    self.maxDiff = None
+    phoenix_query_server_hosts = ["c6402.ambari.apache.org"]
+    # Starting configuration
+    configurations = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "true"
+        }
+      },
+      "core-site": {
+        "properties": {
+          "hadoop.proxyuser.HTTP.hosts": "c6401.ambari.apache.org",
+        }
+      },
+      "hbase-site": {
+        "properties": {}
+      }
+    }
+    # Expected configuration after the recommendation
+    expected_configuration = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "true"
+        }
+      },
+      "core-site": {
+        "properties": {
+          "hadoop.proxyuser.HTTP.hosts": 
"c6401.ambari.apache.org,c6402.ambari.apache.org",
+        }
+      },
+      "hbase-site": {
+        "properties": {
+          "hbase.master.ui.readonly": "true"
+        }
+      }
+    }
+
+    clusterData = {
+      "hbaseRam": 4096,
+    }
+    services = {
+      "services": [
+        {
+          "href": "/api/v1/stacks/HDP/versions/2.4/services/HBASE",
+          "StackServices": {
+            "service_name": "HBASE",
+          },
+          "Versions": {
+            "stack_version": "2.5"
+          },
+          "components": [
+            {
+              "StackServiceComponents": {
+                "component_name": "PHOENIX_QUERY_SERVER",
+                "hostnames": phoenix_query_server_hosts
+              }
+            }
+          ]
+        },
+      ],
+      "configurations": configurations,
+      "changed-configurations": [ ]
+
+    }
+    hosts = {
+      "items" : [
+        {
+          "href" : "/api/v1/hosts/c6401.ambari.apache.org",
+          "Hosts" : {
+            "cpu_count" : 1,
+            "host_name" : "c6401.ambari.apache.org",
+            "os_arch" : "x86_64",
+            "os_type" : "centos6",
+            "ph_cpu_count" : 1,
+            "public_host_name" : "c6401.ambari.apache.org",
+            "rack_info" : "/default-rack",
+            "total_mem" : 1922680
+          }
+        }, {
+          "href" : "/api/v1/hosts/c6402.ambari.apache.org",
+          "Hosts" : {
+            "cpu_count" : 1,
+            "host_name" : "c6402.ambari.apache.org",
+            "os_arch" : "x86_64",
+            "os_type" : "centos6",
+            "ph_cpu_count" : 1,
+            "public_host_name" : "c6402.ambari.apache.org",
+            "rack_info" : "/default-rack",
+            "total_mem" : 1922680
+          }
+        }, {
+          "href" : "/api/v1/hosts/c6403.ambari.apache.org",
+          "Hosts" : {
+            "cpu_count" : 1,
+            "host_name" : "c6403.ambari.apache.org",
+            "os_arch" : "x86_64",
+            "os_type" : "centos6",
+            "ph_cpu_count" : 1,
+            "public_host_name" : "c6403.ambari.apache.org",
+            "rack_info" : "/default-rack",
+            "total_mem" : 1922680
+          }
+        }
+      ]
+    }
+
+    self.stackAdvisor.recommendHBASEConfigurations(configurations, 
clusterData, services, hosts)
+
+    self.assertTrue('core-site' in configurations)
+    self.assertTrue('properties' in configurations['core-site'])
+    # Avoid an unnecessary sort in the stack advisor, sort here for easy 
comparison
+    actualHosts = 
configurations['core-site']['properties']['hadoop.proxyuser.HTTP.hosts']
+    expectedHosts = 
configurations['core-site']['properties']['hadoop.proxyuser.HTTP.hosts']
+    self.assertEquals(splitAndSort(actualHosts), splitAndSort(expectedHosts))
+    # Do a simple check for hbase-site
+    self.assertTrue('hbase-site' in configurations)
+    self.assertTrue('properties' in configurations['hbase-site'])
+    
self.assertEquals(configurations['hbase-site']['properties']['hbase.master.ui.readonly'],
+        
expected_configuration['hbase-site']['properties']['hbase.master.ui.readonly'])
+
+  def test_phoenixQueryServerNoChangesWithUnsecure(self):
+    self.maxDiff = None
+    phoenix_query_server_hosts = ["c6402.ambari.apache.org"]
+    # Starting configuration
+    configurations = {
+      "cluster-env": {
+        "properties": {}
+      },
+      "core-site": {
+        "properties": {}
+      },
+      "hbase-site": {
+        "properties": {}
+      }
+    }
+
+    clusterData = {
+      "hbaseRam": 4096,
+    }
+    services = {
+      "services": [
+        {
+          "href": "/api/v1/stacks/HDP/versions/2.4/services/HBASE",
+          "StackServices": {
+            "service_name": "HBASE",
+          },
+          "Versions": {
+            "stack_version": "2.5"
+          },
+          "components": [
+            {
+              "StackServiceComponents": {
+                "component_name": "PHOENIX_QUERY_SERVER",
+                "hostnames": phoenix_query_server_hosts
+              }
+            }
+          ]
+        },
+      ],
+      "configurations": configurations,
+      "changed-configurations": [ ]
+
+    }
+    hosts = {
+      "items" : [
+        {
+          "href" : "/api/v1/hosts/c6401.ambari.apache.org",
+          "Hosts" : {
+            "cpu_count" : 1,
+            "host_name" : "c6401.ambari.apache.org",
+            "os_arch" : "x86_64",
+            "os_type" : "centos6",
+            "ph_cpu_count" : 1,
+            "public_host_name" : "c6401.ambari.apache.org",
+            "rack_info" : "/default-rack",
+            "total_mem" : 1922680
+          }
+        }, {
+          "href" : "/api/v1/hosts/c6402.ambari.apache.org",
+          "Hosts" : {
+            "cpu_count" : 1,
+            "host_name" : "c6402.ambari.apache.org",
+            "os_arch" : "x86_64",
+            "os_type" : "centos6",
+            "ph_cpu_count" : 1,
+            "public_host_name" : "c6402.ambari.apache.org",
+            "rack_info" : "/default-rack",
+            "total_mem" : 1922680
+          }
+        }, {
+          "href" : "/api/v1/hosts/c6403.ambari.apache.org",
+          "Hosts" : {
+            "cpu_count" : 1,
+            "host_name" : "c6403.ambari.apache.org",
+            "os_arch" : "x86_64",
+            "os_type" : "centos6",
+            "ph_cpu_count" : 1,
+            "public_host_name" : "c6403.ambari.apache.org",
+            "rack_info" : "/default-rack",
+            "total_mem" : 1922680
+          }
+        }
+      ]
+    }
+
+    self.stackAdvisor.recommendHBASEConfigurations(configurations, 
clusterData, services, hosts)
+
+    self.assertTrue('core-site' in configurations)
+    self.assertTrue('properties' in configurations['core-site'])
+    # Should have no updates for core-site for unsecure
+    self.assertFalse('hadoop.proxuser.HTTP.hosts' in 
configurations['core-site']['properties'])
+    # Should have no update to hbase-site for unsecure
+    self.assertTrue('hbase-site' in configurations)
+    self.assertTrue('properties' in configurations['hbase-site'])
+    self.assertFalse('hbase.master.ui.readonly' in 
configurations['hbase-site']['properties']['hbase.master.ui.readonly'])
+
+  def test_obtainPhoenixQueryServerHosts(self):
+    self.maxDiff = None
+    phoenix_query_server_hosts = ["c6402.ambari.apache.org"]
+    # Starting configuration
+    configurations = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "true"
+        }
+      },
+      "core-site": {
+        "properties": {
+          "hadoop.proxyuser.HTTP.hosts": "c6401.ambari.apache.org",
+        }
+      },
+      "hbase-site": {
+        "properties": {}
+      }
+    }
+    # Expected configuration after the recommendation
+    expected_configuration = {
+      "cluster-env": {
+        "properties": {
+          "security_enabled": "true"
+        }
+      },
+      "core-site": {
+        "properties": {
+          "hadoop.proxyuser.HTTP.hosts": 
"c6401.ambari.apache.org,c6402.ambari.apache.org",
+        }
+      },
+      "hbase-site": {
+        "properties": {
+          "hbase.master.ui.readonly": "true"
+        }
+      }
+    }
+
+    clusterData = {
+      "hbaseRam": 4096,
+    }
+    services = {
+      "services": [
+        {
+          "href": "/api/v1/stacks/HDP/versions/2.4/services/HBASE",
+          "StackServices": {
+            "service_name": "HBASE",
+          },
+          "Versions": {
+            "stack_version": "2.5"
+          },
+          "components": [
+            {
+              "StackServiceComponents": {
+                "component_name": "PHOENIX_QUERY_SERVER",
+                "hostnames": phoenix_query_server_hosts
+              }
+            }
+          ]
+        },
+      ],
+      "configurations": configurations,
+      "changed-configurations": [ ]
+    }
+
+    hosts = {
+      "items" : [
+        {
+          "href" : "/api/v1/hosts/c6401.ambari.apache.org",
+          "Hosts" : {
+            "cpu_count" : 1,
+            "host_name" : "c6401.ambari.apache.org",
+            "os_arch" : "x86_64",
+            "os_type" : "centos6",
+            "ph_cpu_count" : 1,
+            "public_host_name" : "c6401.ambari.apache.org",
+            "rack_info" : "/default-rack",
+            "total_mem" : 1922680
+          }
+        }, {
+          "href" : "/api/v1/hosts/c6402.ambari.apache.org",
+          "Hosts" : {
+            "cpu_count" : 1,
+            "host_name" : "c6402.ambari.apache.org",
+            "os_arch" : "x86_64",
+            "os_type" : "centos6",
+            "ph_cpu_count" : 1,
+            "public_host_name" : "c6402.ambari.apache.org",
+            "rack_info" : "/default-rack",
+            "total_mem" : 1922680
+          }
+        }, {
+          "href" : "/api/v1/hosts/c6403.ambari.apache.org",
+          "Hosts" : {
+            "cpu_count" : 1,
+            "host_name" : "c6403.ambari.apache.org",
+            "os_arch" : "x86_64",
+            "os_type" : "centos6",
+            "ph_cpu_count" : 1,
+            "public_host_name" : "c6403.ambari.apache.org",
+            "rack_info" : "/default-rack",
+            "total_mem" : 1922680
+          }
+        }
+      ]
+    }
+
+    
self.assertEquals(self.stackAdvisor.get_phoenix_query_server_hosts(services, 
hosts),
+        phoenix_query_server_hosts)
+
+    phoenix_query_server_hosts = []
+    
services['services'][0]['components'][0]['StackServiceComponents']['hostnames'] 
= phoenix_query_server_hosts
+
+    
self.assertEquals(self.stackAdvisor.get_phoenix_query_server_hosts(services, 
hosts),
+        phoenix_query_server_hosts)
+
+"""
+Given a comma-separated string, split the items, sort them, and re-join the 
elements
+back into a comma-separated string
+"""
+def splitAndSort(s):
+  l = s.split(',')
+  l.sort()
+  return ','.join(l)
+
 """
 Helper method to convert string of key-values to dict.
 """

Reply via email to