AMBARI-19466. Sqlline-thin is unable to connect to phoenix (echekankiy via 
dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/fdbe0939
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/fdbe0939
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/fdbe0939

Branch: refs/heads/trunk
Commit: fdbe0939e9e42c54a1e0d4fbd953bc851eb46028
Parents: 59a770f
Author: Lisnichenko Dmitro <[email protected]>
Authored: Thu Jan 12 16:25:28 2017 +0200
Committer: Lisnichenko Dmitro <[email protected]>
Committed: Thu Jan 12 16:25:28 2017 +0200

----------------------------------------------------------------------
 .../stacks/HDP/2.0.6/services/stack_advisor.py  | 53 ++++++++++++--------
 .../stacks/HDP/2.1/services/stack_advisor.py    |  2 +-
 .../stacks/HDP/2.3/services/stack_advisor.py    |  2 +-
 .../stacks/HDP/2.5/services/stack_advisor.py    |  2 +-
 .../stacks/2.0.6/common/test_stack_advisor.py   |  4 +-
 5 files changed, 37 insertions(+), 26 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/fdbe0939/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
index 9b217b3..bcd61b5 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
@@ -248,12 +248,12 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
          ambari_user = ambari_user.split('@')[0]
     return ambari_user
 
-  def recommendAmbariProxyUsersForHDFS(self, services, servicesList, 
putCoreSiteProperty, putCoreSitePropertyAttribute):
+  def recommendAmbariProxyUsersForHDFS(self, services, configurations, 
servicesList, putCoreSiteProperty, putCoreSitePropertyAttribute):
     if "HDFS" in servicesList:
       ambari_user = self.getAmbariUser(services)
       ambariHostName = socket.getfqdn()
-      self.put_proxyuser_value(ambari_user, ambariHostName, services=services, 
put_function=putCoreSiteProperty)
-      self.put_proxyuser_value(ambari_user, "*", is_groups=True, 
services=services, put_function=putCoreSiteProperty)
+      self.put_proxyuser_value(ambari_user, ambariHostName, services=services, 
configurations=configurations, put_function=putCoreSiteProperty)
+      self.put_proxyuser_value(ambari_user, "*", is_groups=True, 
services=services, configurations=configurations, 
put_function=putCoreSiteProperty)
       old_ambari_user = self.getOldAmbariUser(services)
       if old_ambari_user is not None:
         
putCoreSitePropertyAttribute("hadoop.proxyuser.{0}.hosts".format(old_ambari_user),
 'delete', 'true')
@@ -274,7 +274,7 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
 
     return validationItems
 
-  def _getHadoopProxyUsersForService(self, serviceName, serviceUserComponents, 
services, hosts):
+  def _getHadoopProxyUsersForService(self, serviceName, serviceUserComponents, 
services, hosts, configurations):
     Logger.info("Calculating Hadoop Proxy User recommendations for {0} 
service.".format(serviceName))
     servicesList = self.get_services_list(services)
     resultUsers = {}
@@ -298,7 +298,7 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
         for proxyPropertyName, hostSelector in hostSelectorMap.iteritems():
           componentHostNamesString = hostSelector if isinstance(hostSelector, 
basestring) else '*'
           if isinstance(hostSelector, (list, tuple)):
-            _, componentHostNames = self.get_data_for_proxyuser(user, 
services) # preserve old values
+            _, componentHostNames = self.get_data_for_proxyuser(user, 
services, configurations) # preserve old values
             for component in hostSelector:
               componentHosts = self.getHostsWithComponent(serviceName, 
component, services, hosts)
               if componentHosts is not None:
@@ -317,7 +317,7 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
 
     return resultUsers
 
-  def getHadoopProxyUsers(self, services, hosts):
+  def getHadoopProxyUsers(self, services, hosts, configurations):
     """
     Gets Hadoop Proxy User recommendations based on the configuration that is 
provided by
     getServiceHadoopProxyUsersConfigurationDict.
@@ -328,31 +328,42 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
     users = {}
 
     for serviceName, serviceUserComponents in 
self.getServiceHadoopProxyUsersConfigurationDict().iteritems():
-      users.update(self._getHadoopProxyUsersForService(serviceName, 
serviceUserComponents, services, hosts))
+      users.update(self._getHadoopProxyUsersForService(serviceName, 
serviceUserComponents, services, hosts, configurations))
 
     return users
 
-  def get_data_for_proxyuser(self, user_name, services, groups=False):
+  def get_data_for_proxyuser(self, user_name, services, configurations, 
groups=False):
+    """
+    Returns values of proxyuser properties for given user. Properties can be
+    hadoop.proxyuser.username.groups or hadoop.proxyuser.username.hosts
+    :param user_name:
+    :param services:
+    :param groups: if true, will return values for group property, not hosts
+    :return: tuple (wildcard_value, set[values]), where wildcard_value 
indicates if property value was *
+    """
     if "core-site" in services["configurations"]:
       coreSite = services["configurations"]["core-site"]['properties']
     else:
       coreSite = {}
-
     if groups:
-        property_name = "hadoop.proxyuser.{0}.groups".format(user_name)
+      property_name = "hadoop.proxyuser.{0}.groups".format(user_name)
     else:
       property_name = "hadoop.proxyuser.{0}.hosts".format(user_name)
-
     if property_name in coreSite:
       property_value = coreSite[property_name]
       if property_value == "*":
         return True, set()
       else:
-        return False, set(property_value.split(","))
+        result_values = set()
+        if "core-site" in configurations:
+          if property_name in configurations["core-site"]['properties']:
+            result_values = 
result_values.union(configurations["core-site"]['properties'][property_name].split(","))
+        result_values = result_values.union(property_value.split(","))
+        return False, result_values
     return False, set()
 
-  def put_proxyuser_value(self, user_name, value, is_groups=False, 
services=None, put_function=None):
-    is_wildcard_value, current_value = self.get_data_for_proxyuser(user_name, 
services, is_groups)
+  def put_proxyuser_value(self, user_name, value, is_groups=False, 
services=None, configurations=None, put_function=None):
+    is_wildcard_value, current_value = self.get_data_for_proxyuser(user_name, 
services, configurations, is_groups)
     result_value = "*"
     result_values_set = self.merge_proxyusers_values(current_value, value)
     if len(result_values_set) > 0:
@@ -430,7 +441,7 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
     putCoreSiteProperty = self.putProperty(configurations, "core-site", 
services)
     putCoreSitePropertyAttribute = self.putPropertyAttribute(configurations, 
"core-site")
 
-    users = self.getHadoopProxyUsers(services, hosts)
+    users = self.getHadoopProxyUsers(services, hosts, configurations)
 
     # Force dependencies for HIVE
     if "HIVE" in servicesList:
@@ -441,10 +452,10 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
     for user_name, user_properties in users.iteritems():
 
       # Add properties "hadoop.proxyuser.*.hosts", "hadoop.proxyuser.*.groups" 
to core-site for all users
-      self.put_proxyuser_value(user_name, user_properties["propertyHosts"], 
services=services, put_function=putCoreSiteProperty)
+      self.put_proxyuser_value(user_name, user_properties["propertyHosts"], 
services=services, configurations=configurations, 
put_function=putCoreSiteProperty)
       Logger.info("Updated hadoop.proxyuser.{0}.hosts as : 
{1}".format(user_name, user_properties["propertyHosts"]))
       if "propertyGroups" in user_properties:
-        self.put_proxyuser_value(user_name, user_properties["propertyGroups"], 
is_groups=True, services=services, put_function=putCoreSiteProperty)
+        self.put_proxyuser_value(user_name, user_properties["propertyGroups"], 
is_groups=True, services=services, configurations=configurations, 
put_function=putCoreSiteProperty)
 
       # Remove old properties if user was renamed
       userOldValue = getOldValue(self, services, user_properties["config"], 
user_properties["propertyName"])
@@ -458,11 +469,11 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
           services["forced-configurations"].append({"type" : "core-site", 
"name" : "hadoop.proxyuser.{0}.groups".format(userOldValue)})
           services["forced-configurations"].append({"type" : "core-site", 
"name" : "hadoop.proxyuser.{0}.groups".format(user_name)})
 
-    self.recommendAmbariProxyUsersForHDFS(services, servicesList, 
putCoreSiteProperty, putCoreSitePropertyAttribute)
+    self.recommendAmbariProxyUsersForHDFS(services, configurations, 
servicesList, putCoreSiteProperty, putCoreSitePropertyAttribute)
 
-  def getHadoopProxyUsersValidationItems(self, properties, services, hosts):
+  def getHadoopProxyUsersValidationItems(self, properties, services, hosts, 
configurations):
     validationItems = []
-    users = self.getHadoopProxyUsers(services, hosts)
+    users = self.getHadoopProxyUsers(services, hosts, configurations)
     for user_name, user_properties in users.iteritems():
       props = ["hadoop.proxyuser.{0}.hosts".format(user_name)]
       if "propertyGroups" in user_properties:
@@ -1740,7 +1751,7 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
 
   def validateHDFSConfigurationsCoreSite(self, properties, 
recommendedDefaults, configurations, services, hosts):
     validationItems = []
-    validationItems.extend(self.getHadoopProxyUsersValidationItems(properties, 
services, hosts))
+    validationItems.extend(self.getHadoopProxyUsersValidationItems(properties, 
services, hosts, configurations))
     
validationItems.extend(self.getAmbariProxyUsersForHDFSValidationItems(properties,
 services))
     return self.toConfigurationValidationProblems(validationItems, "core-site")
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/fdbe0939/ambari-server/src/main/resources/stacks/HDP/2.1/services/stack_advisor.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.1/services/stack_advisor.py 
b/ambari-server/src/main/resources/stacks/HDP/2.1/services/stack_advisor.py
index 6ae29cb..b275b00 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1/services/stack_advisor.py
@@ -198,7 +198,7 @@ class HDP21StackAdvisor(HDP206StackAdvisor):
 
       meta = self.get_service_component_meta("HIVE", "WEBHCAT_SERVER", 
services)
       if "hostnames" in meta:
-        self.put_proxyuser_value("HTTP", meta["hostnames"], services=services, 
put_function=putCoreSiteProperty)
+        self.put_proxyuser_value("HTTP", meta["hostnames"], services=services, 
configurations=configurations, put_function=putCoreSiteProperty)
 
   def recommendTezConfigurations(self, configurations, clusterData, services, 
hosts):
     putTezProperty = self.putProperty(configurations, "tez-site")

http://git-wip-us.apache.org/repos/asf/ambari/blob/fdbe0939/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py 
b/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
index eb81655..eb7ebc1 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
@@ -485,7 +485,7 @@ class HDP23StackAdvisor(HDP22StackAdvisor):
     if kmsEnvProperties and self.checkSiteProperties(kmsEnvProperties, 
'kms_user') and 'KERBEROS' in servicesList:
       kmsUser = kmsEnvProperties['kms_user']
       kmsUserOld = getOldValue(self, services, 'kms-env', 'kms_user')
-      self.put_proxyuser_value(kmsUser, '*', is_groups=True, 
services=services, put_function=putCoreSiteProperty)
+      self.put_proxyuser_value(kmsUser, '*', is_groups=True, 
services=services, configurations=configurations, 
put_function=putCoreSiteProperty)
       if kmsUserOld is not None and kmsUser != kmsUserOld:
         
putCoreSitePropertyAttribute("hadoop.proxyuser.{0}.groups".format(kmsUserOld), 
'delete', 'true')
         services["forced-configurations"].append({"type" : "core-site", "name" 
: "hadoop.proxyuser.{0}.groups".format(kmsUserOld)})

http://git-wip-us.apache.org/repos/asf/ambari/blob/fdbe0939/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py 
b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
index 45a5351..b08ce75 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
@@ -633,7 +633,7 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
         # The PQS hosts we want to ensure are set
         new_value = ','.join(phoenix_query_server_hosts)
         # Update the proxyuser setting, deferring to out callback to merge 
results together
-        self.put_proxyuser_value("HTTP", new_value, services=services, 
put_function=putCoreSiteProperty)
+        self.put_proxyuser_value("HTTP", new_value, services=services, 
configurations=configurations, put_function=putCoreSiteProperty)
       else:
         Logger.debug("No phoenix query server hosts to update")
     else:

http://git-wip-us.apache.org/repos/asf/ambari/blob/fdbe0939/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py 
b/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
index 355e211..4a936c8 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
@@ -2070,7 +2070,7 @@ class TestHDP206StackAdvisor(TestCase):
       'hdfs-user': {'propertyName': 'hdfs_user', 'config': 'hadoop-env', 
'propertyHosts': '*', 'propertyGroups': '*'},
       'yarn-user': {'propertyName': 'yarn_user', 'config': 'yarn-env', 
'propertyHosts': 'host1,host2'}
     }
-    res = self.stackAdvisor.getHadoopProxyUsers(services, hosts)
+    res = self.stackAdvisor.getHadoopProxyUsers(services, hosts, 
configurations)
     self.assertEquals(res, res_expected)
 
     # 2) test filter function: only one RESOURCE_MANAGER
@@ -2078,7 +2078,7 @@ class TestHDP206StackAdvisor(TestCase):
     res_expected = {
         'hdfs-user': {'propertyName': 'hdfs_user', 'config': 'hadoop-env', 
'propertyHosts': '*', 'propertyGroups': '*'}
     }
-    res = self.stackAdvisor.getHadoopProxyUsers(services, hosts)
+    res = self.stackAdvisor.getHadoopProxyUsers(services, hosts, 
configurations)
     self.assertEquals(res, res_expected)
 
   def test_validateOneDataDirPerPartition(self):

Reply via email to