Repository: ambari
Updated Branches:
  refs/heads/trunk fb32cd331 -> 434fb995c


AMBARI-12192: RU: fails with spnego-enabled cluster and HDFS HA (jluniya)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/434fb995
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/434fb995
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/434fb995

Branch: refs/heads/trunk
Commit: 434fb995c74e0f62a527f65f3ba85994d8963e11
Parents: fb32cd3
Author: Jayush Luniya <[email protected]>
Authored: Sun Jun 28 16:47:21 2015 -0700
Committer: Jayush Luniya <[email protected]>
Committed: Sun Jun 28 16:47:21 2015 -0700

----------------------------------------------------------------------
 .../resource_management/TestPackageResource.py  |  2 +-
 .../HDFS/2.1.0.2.0/package/files/checkWebUI.py  |  4 +-
 .../package/scripts/journalnode_upgrade.py      |  4 +-
 .../2.1.0.2.0/package/scripts/service_check.py  | 46 +++++++++++++-------
 .../HDFS/2.1.0.2.0/package/scripts/utils.py     | 29 +++++++-----
 .../stacks/2.0.6/HDFS/test_journalnode.py       | 15 +++----
 .../stacks/2.2/configs/journalnode-upgrade.json |  2 +-
 7 files changed, 60 insertions(+), 42 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/434fb995/ambari-agent/src/test/python/resource_management/TestPackageResource.py
----------------------------------------------------------------------
diff --git 
a/ambari-agent/src/test/python/resource_management/TestPackageResource.py 
b/ambari-agent/src/test/python/resource_management/TestPackageResource.py
index c67745d..18b2d00 100644
--- a/ambari-agent/src/test/python/resource_management/TestPackageResource.py
+++ b/ambari-agent/src/test/python/resource_management/TestPackageResource.py
@@ -176,7 +176,7 @@ class TestPackageResource(TestCase):
               )
     self.assertEquals(shell_mock.call_args[0][0],
                       ['/usr/bin/yum', '-d', '0', '-e', '0', '-y', 'install',
-                       '--disablerepo=*',
+                       '--disablerepo=',
                        '--enablerepo=HDP-UTILS-2.2.0.1-885,HDP-2.2.0.1-885', 
'some_package'])
 
   @patch.object(shell, "call", new = MagicMock(return_value=(0, None)))

http://git-wip-us.apache.org/repos/asf/ambari/blob/434fb995/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/files/checkWebUI.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/files/checkWebUI.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/files/checkWebUI.py
index 8ae6d2e..aa60ffc 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/files/checkWebUI.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/files/checkWebUI.py
@@ -47,9 +47,7 @@ def main():
       httpCode = 404
 
     if httpCode != 200:
-      if not https:
-        print "Cannot access WEB UI on: http://"; + host + ":" + port if not 
https.lower() == "true" else "Cannot access WEB UI on: https://"; + host + ":" + 
port
-
+      print "Cannot access WEB UI on: http://"; + host + ":" + port if not 
https.lower() == "true" else "Cannot access WEB UI on: https://"; + host + ":" + 
port
       exit(1)
 
 if __name__ == "__main__":

http://git-wip-us.apache.org/repos/asf/ambari/blob/434fb995/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode_upgrade.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode_upgrade.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode_upgrade.py
index a37412c..5e54593 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode_upgrade.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode_upgrade.py
@@ -56,7 +56,7 @@ def post_upgrade_check():
   nn_address = namenode_ha.get_address(NAMENODE_STATE.ACTIVE)
 
   nn_data = get_jmx_data(nn_address, 
'org.apache.hadoop.hdfs.server.namenode.FSNamesystem', 'JournalTransactionInfo',
-                         namenode_ha.is_encrypted())
+                         namenode_ha.is_encrypted(), params.security_enabled)
   if not nn_data:
     raise Fail("Could not retrieve JournalTransactionInfo from JMX")
 
@@ -121,7 +121,7 @@ def ensure_jns_have_new_txn(nodes, last_txn_id):
         continue
 
       url = '%s://%s:%s' % (protocol, node, params.journalnode_port)
-      data = get_jmx_data(url, 'Journal-', 'LastWrittenTxId')
+      data = get_jmx_data(url, 'Journal-', 'LastWrittenTxId', 
params.https_only, params.security_enabled)
       if data:
         actual_txn_ids[node] = int(data)
         if actual_txn_ids[node] >= last_txn_id:

http://git-wip-us.apache.org/repos/asf/ambari/blob/434fb995/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/service_check.py
index d66d70c..6ec3996 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/service_check.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/service_check.py
@@ -21,7 +21,8 @@ from resource_management import *
 from resource_management.core.shell import as_user
 from ambari_commons.os_family_impl import OsFamilyImpl
 from ambari_commons import OSConst
-
+from resource_management.libraries.functions.curl_krb_request import 
curl_krb_request
+from resource_management.core.logger import Logger
 
 class HdfsServiceCheck(Script):
   pass
@@ -68,21 +69,34 @@ class HdfsServiceCheckDefault(HdfsServiceCheck):
     params.HdfsResource(None, action="execute")
 
     if params.has_journalnode_hosts:
-      journalnode_port = params.journalnode_port
-      checkWebUIFileName = "checkWebUI.py"
-      checkWebUIFilePath = format("{tmp_dir}/{checkWebUIFileName}")
-      comma_sep_jn_hosts = ",".join(params.journalnode_hosts)
-      checkWebUICmd = format("python {checkWebUIFilePath} -m 
{comma_sep_jn_hosts} -p {journalnode_port} -s {https_only}")
-      File(checkWebUIFilePath,
-           content=StaticFile(checkWebUIFileName),
-           mode=0775)
-
-      Execute(checkWebUICmd,
-              logoutput=True,
-              try_sleep=3,
-              tries=5,
-              user=params.smoke_user
-      )
+      if params.security_enabled:
+        for host in params.journalnode_hosts:
+          if params.https_only:
+            uri = format("https://{host}:{journalnode_port}";)
+          else:
+            uri = format("http://{host}:{journalnode_port}";)
+          response, errmsg, time_millis = curl_krb_request(params.tmp_dir, 
params.smoke_user_keytab,
+                                                           
params.smokeuser_principal, uri, "jn_service_check",
+                                                           
params.kinit_path_local, False, None, params.smoke_user)
+          if not response:
+            Logger.error("Cannot access WEB UI on: {0}. Error : {1}", uri, 
errmsg)
+            return 1
+      else:
+        journalnode_port = params.journalnode_port
+        checkWebUIFileName = "checkWebUI.py"
+        checkWebUIFilePath = format("{tmp_dir}/{checkWebUIFileName}")
+        comma_sep_jn_hosts = ",".join(params.journalnode_hosts)
+        checkWebUICmd = format("python {checkWebUIFilePath} -m 
{comma_sep_jn_hosts} -p {journalnode_port} -s {https_only}")
+        File(checkWebUIFilePath,
+             content=StaticFile(checkWebUIFileName),
+             mode=0775)
+
+        Execute(checkWebUICmd,
+                logoutput=True,
+                try_sleep=3,
+                tries=5,
+                user=params.smoke_user
+        )
 
     if params.is_namenode_master:
       if params.has_zkfc_hosts:

http://git-wip-us.apache.org/repos/asf/ambari/blob/434fb995/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py
index 93c8c8c..263bdec 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py
@@ -29,6 +29,7 @@ from resource_management.core import shell
 from resource_management.core.shell import as_user, as_sudo
 from resource_management.core.exceptions import ComponentIsNotRunning
 from resource_management.core.logger import Logger
+from resource_management.libraries.functions.curl_krb_request import 
curl_krb_request
 
 from zkfc_slave import ZkfcSlave
 
@@ -261,7 +262,7 @@ def service(action=None, name=None, user=None, options="", 
create_pid_dir=False,
          action="delete",
     )
 
-def get_jmx_data(nn_address, modeler_type, metric, encrypted=False):
+def get_jmx_data(nn_address, modeler_type, metric, encrypted=False, 
security_enabled=False):
   """
   :param nn_address: Namenode Address, e.g., host:port, ** MAY ** be preceded 
with "http://"; or "https://"; already.
   If not preceded, will use the encrypted param to determine.
@@ -281,17 +282,23 @@ def get_jmx_data(nn_address, modeler_type, metric, 
encrypted=False):
   nn_address = nn_address + "jmx"
   Logger.info("Retrieve modeler: %s, metric: %s from JMX endpoint %s" % 
(modeler_type, metric, nn_address))
 
-  data = urllib2.urlopen(nn_address).read()
-  data_dict = json.loads(data)
+  if security_enabled:
+    import params
+    data, error_msg, time_millis = curl_krb_request(params.tmp_dir, 
params.smoke_user_keytab, params.smokeuser_principal, nn_address,
+                            "jn_upgrade", params.kinit_path_local, False, 
None, params.smoke_user)
+  else:
+    data = urllib2.urlopen(nn_address).read()
   my_data = None
-  if data_dict:
-    for el in data_dict['beans']:
-      if el is not None and el['modelerType'] is not None and 
el['modelerType'].startswith(modeler_type):
-        if metric in el:
-          my_data = el[metric]
-          if my_data:
-            my_data = json.loads(str(my_data))
-            break
+  if data:
+    data_dict = json.loads(data)
+    if data_dict:
+      for el in data_dict['beans']:
+        if el is not None and el['modelerType'] is not None and 
el['modelerType'].startswith(modeler_type):
+          if metric in el:
+            my_data = el[metric]
+            if my_data:
+              my_data = json.loads(str(my_data))
+              break
   return my_data
 
 def get_port(address):

http://git-wip-us.apache.org/repos/asf/ambari/blob/434fb995/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py 
b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
index 2e45f59..b7e99c1 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
@@ -261,7 +261,8 @@ class TestJournalnode(RMFTestCase):
 
   @patch('time.sleep')
   @patch("urllib2.urlopen")
-  def test_post_rolling_restart(self, urlopen_mock, time_mock):
+  @patch("utils.curl_krb_request")
+  def test_post_rolling_restart(self, curl_krb_request_mock, urlopen_mock, 
time_mock):
     # load the NN and JN JMX files so that the urllib2.urlopen mock has data
     # to return
     num_journalnodes = 3
@@ -300,9 +301,7 @@ class TestJournalnode(RMFTestCase):
     urlopen_mock.assert_called_with("http://c6407.ambari.apache.org:8480/jmx";)
 
     url_stream_mock.reset_mock()
-    url_stream_mock.read.side_effect = (num_journalnodes * [namenode_jmx, 
journalnode_jmx])
-
-    urlopen_mock.return_value = url_stream_mock
+    curl_krb_request_mock.side_effect = (num_journalnodes * [(namenode_jmx, 
"", 1), (journalnode_jmx, "", 1)])
 
     # now try with HDFS on SSL
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/journalnode.py",
@@ -313,10 +312,10 @@ class TestJournalnode(RMFTestCase):
       target = RMFTestCase.TARGET_COMMON_SERVICES )
 
     # ensure that the mock was called with the http-style version of the URL
-    urlopen_mock.assert_called
-    urlopen_mock.assert_called_with("https://c6407.ambari.apache.org:8481/jmx";)
-
-
+    curl_krb_request_mock.assert_called
+    curl_krb_request_mock.assert_called_with("/tmp", 
"/etc/security/keytabs/smokeuser.headless.keytab",
+                                             "[email protected]", 
"https://c6407.ambari.apache.org:8481/jmx";,
+                                             "jn_upgrade", "/usr/bin/kinit", 
False, None, "ambari-qa")
 
   @patch('time.sleep')
   @patch("urllib2.urlopen")

http://git-wip-us.apache.org/repos/asf/ambari/blob/434fb995/ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade.json
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade.json 
b/ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade.json
index 0419223..8b10691 100644
--- a/ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade.json
+++ b/ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade.json
@@ -1027,7 +1027,7 @@
             "content": "\n#\n#\n# Licensed to the Apache Software Foundation 
(ASF) under one\n# or more contributor license agreements.  See the NOTICE 
file\n# distributed with this work for additional information\n# regarding 
copyright ownership.  The ASF licenses this file\n# to you under the Apache 
License, Version 2.0 (the\n# \"License\"); you may not use this file except in 
compliance\n# with the License.  You may obtain a copy of the License at\n#\n#  
 http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable 
law or agreed to in writing,\n# software distributed under the License is 
distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF 
ANY\n# KIND, either express or implied.  See the License for the\n# specific 
language governing permissions and limitations\n# under the 
License.\n#\n#\n#\n\n# ***** Set root logger level to DEBUG and its only 
appender to A.\nlog4j.logger.org.apache.pig=info, A\n\n# ***** A is set to be a 
ConsoleAppender.\
 nlog4j.appender.A=org.apache.log4j.ConsoleAppender\n# ***** A uses 
PatternLayout.\nlog4j.appender.A.layout=org.apache.log4j.PatternLayout\nlog4j.appender.A.layout.ConversionPattern=%-4r
 [%t] %-5p %c %x - %m%n"
         }, 
         "cluster-env": {
-            "security_enabled": "true",
+            "security_enabled": "false",
             "ignore_groupsusers_create": "false",
             "kerberos_domain": "EXAMPLE.COM",
             "user_group": "hadoop",

Reply via email to