http://git-wip-us.apache.org/repos/asf/ambari/blob/5a93dfd4/ambari-server/src/test/python/TestBootstrap.py.orig
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/TestBootstrap.py.orig 
b/ambari-server/src/test/python/TestBootstrap.py.orig
deleted file mode 100644
index 792d99d..0000000
--- a/ambari-server/src/test/python/TestBootstrap.py.orig
+++ /dev/null
@@ -1,879 +0,0 @@
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-
-from stacks.utils.RMFTestCase import *
-import bootstrap
-import time
-import subprocess
-import os
-import logging
-import tempfile
-import pprint
-
-from ambari_commons.os_check import OSCheck
-from bootstrap import PBootstrap, Bootstrap, BootstrapDefault, SharedState, 
HostLog, SCP, SSH
-from unittest import TestCase
-from subprocess import Popen
-from bootstrap import AMBARI_PASSPHRASE_VAR_NAME
-from mock.mock import MagicMock, call
-from mock.mock import patch
-from mock.mock import create_autospec
-from only_for_platform import not_for_platform, os_distro_value, 
PLATFORM_WINDOWS
-
-@not_for_platform(PLATFORM_WINDOWS)
-class TestBootstrap(TestCase):
-
-  def setUp(self):
-    logging.basicConfig(level=logging.ERROR)
-
-
-  def test_getRemoteName(self):
-    shared_state = SharedState("root", "sshkey_file", "scriptDir", "bootdir",
-                      "setupAgentFile", "ambariServer", "centos6", None, 
"8440", "root")
-    res = bootstrap_obj = Bootstrap("hostname", shared_state)
-    utime1 = 1234
-    utime2 = 12345
-    bootstrap_obj.getUtime = MagicMock(return_value=utime1)
-    remote1 = bootstrap_obj.getRemoteName("/tmp/setupAgent.sh")
-    self.assertEquals(remote1, "/tmp/setupAgent{0}.sh".format(utime1))
-
-    bootstrap_obj.getUtime.return_value=utime2
-    remote1 = bootstrap_obj.getRemoteName("/tmp/setupAgent.sh")
-    self.assertEquals(remote1, "/tmp/setupAgent{0}.sh".format(utime1))
-
-    remote2 = bootstrap_obj.getRemoteName("/tmp/host_pass")
-    self.assertEquals(remote2, "/tmp/host_pass{0}".format(utime2))
-
-
-  # TODO: Test bootstrap timeout
-
-  # TODO: test_return_error_message_for_missing_sudo_package
-
-  def test_getAmbariPort(self):
-    shared_state = SharedState("root", "sshkey_file", "scriptDir", "bootdir",
-                               "setupAgentFile", "ambariServer", "centos6",
-                               None, "8440", "root")
-    bootstrap_obj = Bootstrap("hostname", shared_state)
-    self.assertEquals(bootstrap_obj.getAmbariPort(),"8440")
-    shared_state.server_port = None
-    bootstrap_obj = Bootstrap("hostname", shared_state)
-    self.assertEquals(bootstrap_obj.getAmbariPort(),"null")
-
-
-  @patch.object(subprocess, "Popen")
-  @patch("sys.stderr")
-  @patch("sys.exit")
-  @patch.object(PBootstrap, "run")
-  @patch("os.path.dirname")
-  @patch("os.path.realpath")
-  def test_bootstrap_main(self, dirname_mock, realpath_mock, run_mock, 
exit_mock, stderr_mock, subprocess_Popen_mock):
-    bootstrap.main(["bootstrap.py", "hostname,hostname2", "/tmp/bootstrap", 
"root", "sshkey_file", "setupAgent.py", "ambariServer", \
-                    "centos6", "1.1.1", "8440", "root", "passwordfile"])
-    self.assertTrue(run_mock.called)
-    run_mock.reset_mock()
-    bootstrap.main(["bootstrap.py", "hostname,hostname2", "/tmp/bootstrap", 
"root", "sshkey_file", "setupAgent.py", "ambariServer", \
-                    "centos6", "1.1.1", "8440", "root", None])
-    self.assertTrue(run_mock.called)
-    run_mock.reset_mock()
-    def side_effect(retcode):
-      raise Exception(retcode, "sys.exit")
-    exit_mock.side_effect = side_effect
-    try:
-      bootstrap.main(["bootstrap.py","hostname,hostname2", "/tmp/bootstrap"])
-      self.fail("sys.exit(2)")
-    except Exception:
-    # Expected
-      pass
-    self.assertTrue(exit_mock.called)
-
-
-  @patch("os.environ")
-  def test_getRunSetupWithPasswordCommand(self, environ_mock):
-    shared_state = SharedState("root", "sshkey_file", "scriptDir", "bootdir",
-                               "setupAgentFile", "ambariServer", "centos6",
-                               None, "8440", "root")
-    environ_mock.__getitem__.return_value = "TEST_PASSPHRASE"
-    bootstrap_obj = Bootstrap("hostname", shared_state)
-    utime = 1234
-    bootstrap_obj.getUtime = MagicMock(return_value=utime)
-    ret = bootstrap_obj.getRunSetupWithPasswordCommand("hostname")
-    expected = "sudo -S python /var/lib/ambari-agent/data/tmp/setupAgent{0}.py 
hostname TEST_PASSPHRASE " \
-               "ambariServer root  8440 < 
/var/lib/ambari-agent/data/tmp/host_pass{0}".format(utime)
-    self.assertEquals(ret, expected)
-
-
-  def test_generateRandomFileName(self):
-    shared_state = SharedState("root", "sshkey_file", "scriptDir", "bootdir",
-                               "setupAgentFile", "ambariServer", "centos6",
-                               None, "8440", "root")
-    bootstrap_obj = Bootstrap("hostname", shared_state)
-    self.assertTrue(bootstrap_obj.generateRandomFileName(None) == 
bootstrap_obj.getUtime())
-
-
-
-  @patch.object(OSCheck, "is_redhat_family")
-  @patch.object(OSCheck, "is_suse_family")
-  def test_getRepoDir(self, is_suse_family, is_redhat_family):
-    shared_state = SharedState("root", "sshkey_file", "scriptDir", "bootdir",
-                               "setupAgentFile", "ambariServer", "centos6",
-                               None, "8440", "root")
-    bootstrap_obj = Bootstrap("hostname", shared_state)
-    # Suse
-    is_redhat_family.return_value = False
-    is_suse_family.return_value = True
-    res = bootstrap_obj.getRepoDir()
-    self.assertEquals(res, "/etc/zypp/repos.d")
-    # non-Suse
-    is_suse_family.return_value = False
-    is_redhat_family.return_value = True
-    res = bootstrap_obj.getRepoDir()
-    self.assertEquals(res, "/etc/yum.repos.d")
-
-  def test_getSetupScript(self):
-    shared_state = SharedState("root", "sshkey_file", "scriptDir", "bootdir",
-                               "setupAgentFile", "ambariServer", "centos6",
-                               None, "8440", "root")
-    bootstrap_obj = Bootstrap("hostname", shared_state)
-    self.assertEquals(bootstrap_obj.shared_state.script_dir, "scriptDir")
-
-
-  def test_run_setup_agent_command_ends_with_project_version(self):
-    os.environ[AMBARI_PASSPHRASE_VAR_NAME] = ""
-    version = "1.1.1"
-    shared_state = SharedState("root", "sshkey_file", "scriptDir", "bootdir",
-                               "setupAgentFile", "ambariServer", "centos6",
-                               version, "8440", "root")
-    bootstrap_obj = Bootstrap("hostname", shared_state)
-    runSetupCommand = bootstrap_obj.getRunSetupCommand("hostname")
-    self.assertTrue(runSetupCommand.endswith(version + " 8440"))
-
-
-  def test_agent_setup_command_without_project_version(self):
-    os.environ[AMBARI_PASSPHRASE_VAR_NAME] = ""
-    version = None
-    shared_state = SharedState("root", "sshkey_file", "scriptDir", "bootdir",
-                               "setupAgentFile", "ambariServer", "centos6",
-                               version, "8440", "root")
-    bootstrap_obj = Bootstrap("hostname", shared_state)
-    runSetupCommand = bootstrap_obj.getRunSetupCommand("hostname")
-    self.assertTrue(runSetupCommand.endswith(" 8440"))
-
-
-  # TODO: test_os_check_fail_fails_bootstrap_execution
-
-
-  def test_host_log(self):
-    tmp_file, tmp_filename  = tempfile.mkstemp()
-    dummy_log = HostLog(tmp_filename)
-    # First write to log
-    dummy_log.write("a\nb\nc")
-    # Read it
-    with open(tmp_filename) as f:
-      s = f.read()
-      etalon = "a\nb\nc\n"
-      self.assertEquals(s, etalon)
-    # Next write
-    dummy_log.write("Yet another string")
-    # Read it
-    with open(tmp_filename) as f:
-      s = f.read()
-      etalon = "a\nb\nc\nYet another string\n"
-      self.assertEquals(s, etalon)
-    # Should not append line end if it already exists
-    dummy_log.write("line break->\n")
-    # Read it
-    with open(tmp_filename) as f:
-      s = f.read()
-      etalon = "a\nb\nc\nYet another string\nline break->\n"
-      self.assertEquals(s, etalon)
-    # Cleanup
-    os.unlink(tmp_filename)
-
-
-  @patch("subprocess.Popen")
-  def test_SCP(self, popenMock):
-    params = SharedState("root", "sshkey_file", "scriptDir", "bootdir",
-                                  "setupAgentFile", "ambariServer", "centos6",
-                                  "1.2.1", "8440", "root")
-    host_log_mock = MagicMock()
-    log = {'text': ""}
-    def write_side_effect(text):
-      log['text'] = log['text'] + text
-
-    host_log_mock.write.side_effect = write_side_effect
-    scp = SCP(params.user, params.sshkey_file, "dummy-host", "src/file",
-              "dst/file", params.bootdir, host_log_mock)
-    log_sample = "log_sample"
-    error_sample = "error_sample"
-    # Successful run
-    process = MagicMock()
-    popenMock.return_value = process
-    process.communicate.return_value = (log_sample, error_sample)
-    process.returncode = 0
-
-    retcode = scp.run()
-
-    self.assertTrue(popenMock.called)
-    self.assertTrue(log_sample in log['text'])
-    self.assertTrue(error_sample in log['text'])
-    command_str = str(popenMock.call_args[0][0])
-    self.assertEquals(command_str, "['scp', '-r', '-o', 'ConnectTimeout=60', 
'-o', "
-        "'BatchMode=yes', '-o', 'StrictHostKeyChecking=no', '-i', 
'sshkey_file',"
-        " 'src/file', 'root@dummy-host:dst/file']")
-    self.assertEqual(retcode["exitstatus"], 0)
-
-    log['text'] = ""
-    #unsuccessfull run
-    process.returncode = 1
-
-    retcode = scp.run()
-
-    self.assertTrue(log_sample in log['text'])
-    self.assertTrue(error_sample in log['text'])
-    self.assertEqual(retcode["exitstatus"], 1)
-
-
-  @patch("subprocess.Popen")
-  def test_SSH(self, popenMock):
-    params = SharedState("root", "sshkey_file", "scriptDir", "bootdir",
-                                  "setupAgentFile", "ambariServer", "centos6",
-                                  "1.2.1", "8440", "root")
-    host_log_mock = MagicMock()
-    log = {'text': ""}
-    def write_side_effect(text):
-      log['text'] = log['text'] + text
-
-    host_log_mock.write.side_effect = write_side_effect
-    ssh = SSH(params.user, params.sshkey_file, "dummy-host", "dummy-command",
-              params.bootdir, host_log_mock)
-    log_sample = "log_sample"
-    error_sample = "error_sample"
-    # Successful run
-    process = MagicMock()
-    popenMock.return_value = process
-    process.communicate.return_value = (log_sample, error_sample)
-    process.returncode = 0
-
-    retcode = ssh.run()
-
-    self.assertTrue(popenMock.called)
-    self.assertTrue(log_sample in log['text'])
-    self.assertTrue(error_sample in log['text'])
-    command_str = str(popenMock.call_args[0][0])
-    self.assertEquals(command_str, "['ssh', '-o', 'ConnectTimeOut=60', '-o', "
-            "'StrictHostKeyChecking=no', '-o', 'BatchMode=yes', '-tt', '-i', "
-            "'sshkey_file', 'root@dummy-host', 'dummy-command']")
-    self.assertEqual(retcode["exitstatus"], 0)
-
-    log['text'] = ""
-    #unsuccessfull run
-    process.returncode = 1
-
-    retcode = ssh.run()
-
-    self.assertTrue(log_sample in log['text'])
-    self.assertTrue(error_sample in log['text'])
-    self.assertEqual(retcode["exitstatus"], 1)
-
-    log['text'] = ""
-    # unsuccessful run with error message
-    process.returncode = 1
-
-    dummy_error_message = "dummy_error_message"
-    ssh = SSH(params.user, params.sshkey_file, "dummy-host", "dummy-command",
-              params.bootdir, host_log_mock, errorMessage= dummy_error_message)
-    retcode = ssh.run()
-
-    self.assertTrue(log_sample in log['text'])
-    self.assertTrue(error_sample in log['text'])
-    self.assertTrue(dummy_error_message in log['text'])
-    self.assertEqual(retcode["exitstatus"], 1)
-
-
-  def test_getOsCheckScript(self):
-    shared_state = SharedState("root", "sshkey_file", "scriptDir", "bootdir",
-                               "setupAgentFile", "ambariServer", "centos6",
-                               None, "8440", "root")
-    bootstrap_obj = Bootstrap("hostname", shared_state)
-    ocs = bootstrap_obj.getOsCheckScript()
-    self.assertEquals(ocs, "scriptDir/os_check_type.py")
-
-
-  @patch.object(BootstrapDefault, "getRemoteName")
-  def test_getOsCheckScriptRemoteLocation(self, getRemoteName_mock):
-    shared_state = SharedState("root", "sshkey_file", "scriptDir", "bootdir",
-                               "setupAgentFile", "ambariServer", "centos6",
-                               None, "8440", "root")
-    bootstrap_obj = Bootstrap("hostname", shared_state)
-    v = "/tmp/os_check_type1374259902.py"
-    getRemoteName_mock.return_value = v
-    ocs = bootstrap_obj.getOsCheckScriptRemoteLocation()
-    self.assertEquals(ocs, v)
-
-
-  @patch.object(BootstrapDefault, "is_suse")
-  def test_getRepoFile(self, is_suse_mock):
-    shared_state = SharedState("root", "sshkey_file", "scriptDir", "bootdir",
-                               "setupAgentFile", "ambariServer", "centos6",
-                               None, "8440", "root")
-    bootstrap_obj = Bootstrap("hostname", shared_state)
-    is_suse_mock.return_value = False
-    rf = bootstrap_obj.getRepoFile()
-    self.assertEquals(rf, "/etc/yum.repos.d/ambari.repo")
-
-
-  @patch.object(SSH, "__init__")
-  @patch.object(SSH, "run")
-  @patch.object(HostLog, "write")
-  def test_createTargetDir(self, write_mock, run_mock,
-                            init_mock):
-    shared_state = SharedState("root", "sshkey_file", "scriptDir", "bootdir",
-                               "setupAgentFile", "ambariServer", "centos6",
-                               None, "8440", "root")
-    bootstrap_obj = Bootstrap("hostname", shared_state)
-    expected = 42
-    init_mock.return_value = None
-    run_mock.return_value = expected
-    res = bootstrap_obj.createTargetDir()
-    self.assertEquals(res, expected)
-    command = str(init_mock.call_args[0][3])
-    self.assertEqual(command,
-                     "sudo mkdir -p /var/lib/ambari-agent/data/tmp ; "
-                     "sudo chown -R root /var/lib/ambari-agent/data/tmp ; "
-                     "sudo chmod 755 /var/lib/ambari-agent ; "
-                     "sudo chmod 755 /var/lib/ambari-agent/data ; "
-                     "sudo chmod 777 /var/lib/ambari-agent/data/tmp")
-
-  @patch.object(BootstrapDefault, "getOsCheckScript")
-  @patch.object(BootstrapDefault, "getOsCheckScriptRemoteLocation")
-  @patch.object(SCP, "__init__")
-  @patch.object(SCP, "run")
-  @patch.object(HostLog, "write")
-  def test_copyOsCheckScript(self, write_mock, run_mock, init_mock,
-                    getOsCheckScriptRemoteLocation_mock, 
getOsCheckScript_mock):
-    shared_state = SharedState("root", "sshkey_file", "scriptDir", "bootdir",
-                               "setupAgentFile", "ambariServer", "centos6",
-                               None, "8440", "root")
-    bootstrap_obj = Bootstrap("hostname", shared_state)
-    getOsCheckScript_mock.return_value = "OsCheckScript"
-    getOsCheckScriptRemoteLocation_mock.return_value = 
"OsCheckScriptRemoteLocation"
-    expected = 42
-    init_mock.return_value = None
-    run_mock.return_value = expected
-    res = bootstrap_obj.copyOsCheckScript()
-    self.assertEquals(res, expected)
-    input_file = str(init_mock.call_args[0][3])
-    remote_file = str(init_mock.call_args[0][4])
-    self.assertEqual(input_file, "OsCheckScript")
-    self.assertEqual(remote_file, "OsCheckScriptRemoteLocation")
-
-
-  @patch.object(BootstrapDefault, "getRemoteName")
-  @patch.object(BootstrapDefault, "hasPassword")
-  @patch.object(OSCheck, "is_suse_family")
-  @patch.object(OSCheck, "is_ubuntu_family")
-  @patch.object(OSCheck, "is_redhat_family")
-  def test_getRepoFile(self, is_redhat_family, is_ubuntu_family, 
is_suse_family, hasPassword_mock, getRemoteName_mock):
-    shared_state = SharedState("root", "sshkey_file", "scriptDir", "bootdir",
-                               "setupAgentFile", "ambariServer", "centos6",
-                               None, "8440", "root")
-    is_redhat_family.return_value = True
-    is_ubuntu_family.return_value = False
-    is_suse_family.return_value = False
-    bootstrap_obj = Bootstrap("hostname", shared_state)
-    # Without password
-    hasPassword_mock.return_value = False
-    getRemoteName_mock.return_value = "RemoteName"
-    rf = bootstrap_obj.getMoveRepoFileCommand("target")
-    self.assertEquals(rf, "sudo mv RemoteName target/ambari.repo")
-    # With password
-    hasPassword_mock.return_value = True
-    getRemoteName_mock.return_value = "RemoteName"
-    rf = bootstrap_obj.getMoveRepoFileCommand("target")
-    self.assertEquals(rf, "sudo -S mv RemoteName target/ambari.repo < 
RemoteName")
-
-  @patch("os.path.exists")
-  @patch.object(OSCheck, "is_suse_family")
-  @patch.object(OSCheck, "is_ubuntu_family")
-  @patch.object(OSCheck, "is_redhat_family")
-  @patch.object(BootstrapDefault, "getMoveRepoFileCommand")
-  @patch.object(BootstrapDefault, "getRepoDir")
-  @patch.object(BootstrapDefault, "getRepoFile")
-  @patch.object(BootstrapDefault, "getRemoteName")
-  @patch.object(SCP, "__init__")
-  @patch.object(SCP, "run")
-  @patch.object(SSH, "__init__")
-  @patch.object(SSH, "run")
-  @patch.object(HostLog, "write")
-  def test_copyNeededFiles(self, write_mock, ssh_run_mock, ssh_init_mock,
-                           scp_run_mock, scp_init_mock,
-                           getRemoteName_mock, getRepoFile_mock, getRepoDir,
-                           getMoveRepoFileCommand, is_redhat_family, 
is_ubuntu_family, is_suse_family,
-                           os_path_exists_mock):
-    #
-    # Ambari repo file exists
-    #
-    def os_path_exists_side_effect(*args, **kwargs):
-      if args[0] == getRepoFile_mock():
-        return True
-      else:
-        return False
-
-    os_path_exists_mock.side_effect = os_path_exists_side_effect
-    os_path_exists_mock.return_value = None
-
-    shared_state = SharedState("root", "sshkey_file", "scriptDir", "bootdir",
-                               "setupAgentFile", "ambariServer", "centos6",
-                               None, "8440", "root")
-    is_redhat_family.return_value = True
-    is_ubuntu_family.return_value = False
-    is_suse_family.return_value = False
-    bootstrap_obj = Bootstrap("hostname", shared_state)
-    getMoveRepoFileCommand.return_value = "MoveRepoFileCommand"
-    getRepoDir.return_value  = "RepoDir"
-    getRemoteName_mock.return_value = "RemoteName"
-    getRepoFile_mock.return_value = "RepoFile"
-    expected1 = {"exitstatus": 42, "log": "log42", "errormsg": "errorMsg"}
-    expected2 = {"exitstatus": 17, "log": "log17", "errormsg": "errorMsg"}
-    expected3 = {"exitstatus": 1, "log": "log1", "errormsg": "errorMsg"}
-    expected4 = {"exitstatus": 17, "log": "log17", "errormsg": "errorMsg"}
-    scp_init_mock.return_value = None
-    ssh_init_mock.return_value = None
-    # Testing max retcode return
-    scp_run_mock.side_effect = [expected1, expected3]
-    ssh_run_mock.side_effect = [expected2, expected4]
-    res = bootstrap_obj.copyNeededFiles()
-    self.assertEquals(res, expected1["exitstatus"])
-    input_file = str(scp_init_mock.call_args[0][3])
-    remote_file = str(scp_init_mock.call_args[0][4])
-    self.assertEqual(input_file, "setupAgentFile")
-    self.assertEqual(remote_file, "RemoteName")
-    command = str(ssh_init_mock.call_args[0][3])
-    self.assertEqual(command, "sudo chmod 644 RepoFile")
-    # Another order
-    expected1 = {"exitstatus": 0, "log": "log0", "errormsg": "errorMsg"}
-    expected2 = {"exitstatus": 17, "log": "log17", "errormsg": "errorMsg"}
-    expected3 = {"exitstatus": 1, "log": "log1", "errormsg": "errorMsg"}
-    expected4 = {"exitstatus": 17, "log": "log17", "errormsg": "errorMsg"}
-    scp_run_mock.side_effect = [expected1, expected3]
-    ssh_run_mock.side_effect = [expected2, expected4]
-    res = bootstrap_obj.copyNeededFiles()
-    self.assertEquals(res, expected2["exitstatus"])
-    # yet another order
-    expected1 = {"exitstatus": 33, "log": "log33", "errormsg": "errorMsg"}
-    expected2 = {"exitstatus": 17, "log": "log17", "errormsg": "errorMsg"}
-    expected3 = {"exitstatus": 42, "log": "log42", "errormsg": "errorMsg"}
-    expected4 = {"exitstatus": 17, "log": "log17", "errormsg": "errorMsg"}
-    scp_run_mock.side_effect = [expected1, expected3]
-    ssh_run_mock.side_effect = [expected2, expected4]
-    res = bootstrap_obj.copyNeededFiles()
-    self.assertEquals(res, expected3["exitstatus"])
-
-    #
-    #Ambari repo file does not exist
-    #
-    os_path_exists_mock.side_effect = None
-    os_path_exists_mock.return_value = False
-
-    #Expectations:
-    # SSH will not be called at all
-    # SCP will be called once for copying the setup script file
-    scp_run_mock.reset_mock()
-    ssh_run_mock.reset_mock()
-    expectedResult = {"exitstatus": 33, "log": "log33", "errormsg": "errorMsg"}
-    scp_run_mock.side_effect = [expectedResult]
-    res = bootstrap_obj.copyNeededFiles()
-    self.assertFalse(ssh_run_mock.called)
-    self.assertEquals(res, expectedResult["exitstatus"])
-
-  @patch.object(BootstrapDefault, "getOsCheckScriptRemoteLocation")
-  @patch.object(SSH, "__init__")
-  @patch.object(SSH, "run")
-  @patch.object(HostLog, "write")
-  def test_runOsCheckScript(self, write_mock, run_mock,
-                            init_mock, getOsCheckScriptRemoteLocation_mock):
-    shared_state = SharedState("root", "sshkey_file", "scriptDir", "bootdir",
-                               "setupAgentFile", "ambariServer", "centos6",
-                               None, "8440", "root")
-    bootstrap_obj = Bootstrap("hostname", shared_state)
-    getOsCheckScriptRemoteLocation_mock.return_value = 
"OsCheckScriptRemoteLocation"
-    expected = 42
-    init_mock.return_value = None
-    run_mock.return_value = expected
-    res = bootstrap_obj.runOsCheckScript()
-    self.assertEquals(res, expected)
-    command = str(init_mock.call_args[0][3])
-    self.assertEqual(command,
-                     "chmod a+x OsCheckScriptRemoteLocation && "
-                     "env 
PYTHONPATH=$PYTHONPATH:/var/lib/ambari-agent/data/tmp 
OsCheckScriptRemoteLocation centos6")
-
-
-  @patch.object(SSH, "__init__")
-  @patch.object(BootstrapDefault, "getRunSetupCommand")
-  @patch.object(SSH, "run")
-  @patch.object(HostLog, "write")
-  def test_runSetupAgent(self, write_mock, run_mock,
-                         getRunSetupCommand_mock, init_mock):
-    shared_state = SharedState("root", "sshkey_file", "scriptDir", "bootdir",
-                               "setupAgentFile", "ambariServer", "centos6",
-                               None, "8440", "root")
-    bootstrap_obj = Bootstrap("hostname", shared_state)
-    getRunSetupCommand_mock.return_value = "RunSetupCommand"
-    expected = 42
-    init_mock.return_value = None
-    run_mock.return_value = expected
-    res = bootstrap_obj.runSetupAgent()
-    self.assertEquals(res, expected)
-    command = str(init_mock.call_args[0][3])
-    self.assertEqual(command, "RunSetupCommand")
-
-
-  @patch.object(BootstrapDefault, "hasPassword")
-  @patch.object(BootstrapDefault, "getRunSetupWithPasswordCommand")
-  @patch.object(BootstrapDefault, "getRunSetupWithoutPasswordCommand")
-  def test_getRunSetupCommand(self, getRunSetupWithoutPasswordCommand_mock,
-                              getRunSetupWithPasswordCommand_mock,
-                              hasPassword_mock):
-    shared_state = SharedState("root", "sshkey_file", "scriptDir", "bootdir",
-                               "setupAgentFile", "ambariServer", "centos6",
-                               None, "8440", "root")
-    bootstrap_obj = Bootstrap("hostname", shared_state)
-    # With password
-    hasPassword_mock.return_value = True
-    getRunSetupWithPasswordCommand_mock.return_value = 
"RunSetupWithPasswordCommand"
-    getRunSetupWithoutPasswordCommand_mock.return_value = 
"RunSetupWithoutPasswordCommand"
-    res = bootstrap_obj.getRunSetupCommand("dummy-host")
-    self.assertEqual(res, "RunSetupWithPasswordCommand")
-    # Without password
-    hasPassword_mock.return_value = False
-    res = bootstrap_obj.getRunSetupCommand("dummy-host")
-    self.assertEqual(res, "RunSetupWithoutPasswordCommand")
-
-
-  @patch.object(HostLog, "write")
-  def test_createDoneFile(self, write_mock):
-    tmp_dir = tempfile.gettempdir()
-    shared_state = SharedState("root", "sshkey_file", "scriptDir", tmp_dir,
-                               "setupAgentFile", "ambariServer", "centos6",
-                               None, "8440", "root")
-    bootstrap_obj = Bootstrap("hostname", shared_state)
-    done_file = os.path.join(tmp_dir, "hostname.done")
-    expected = 42
-    bootstrap_obj.createDoneFile(expected)
-    with open(done_file) as df:
-      res = df.read()
-      self.assertEqual(res, str(expected))
-    os.unlink(done_file)
-
-  @patch.object(OSCheck, "is_suse_family")
-  @patch.object(OSCheck, "is_ubuntu_family")
-  @patch.object(OSCheck, "is_redhat_family")
-  @patch.object(SSH, "__init__")
-  @patch.object(SSH, "run")
-  @patch.object(HostLog, "write")
-  def test_checkSudoPackage(self, write_mock, run_mock, init_mock, 
is_redhat_family, is_ubuntu_family, is_suse_family):
-    shared_state = SharedState("root", "sshkey_file", "scriptDir", "bootdir",
-                               "setupAgentFile", "ambariServer", "centos6",
-                               None, "8440", "root")
-    bootstrap_obj = Bootstrap("hostname", shared_state)
-    expected = 42
-    init_mock.return_value = None
-    run_mock.return_value = expected
-    is_redhat_family.return_value = True
-    is_ubuntu_family.return_value = False
-    is_suse_family.return_value = False
-    res = bootstrap_obj.checkSudoPackage()
-    self.assertEquals(res, expected)
-    command = str(init_mock.call_args[0][3])
-    self.assertEqual(command, "rpm -qa | grep -e '^sudo\-'")
-
-  @patch.object(OSCheck, "is_suse_family")
-  @patch.object(OSCheck, "is_ubuntu_family")
-  @patch.object(OSCheck, "is_redhat_family")
-  @patch.object(SSH, "__init__")
-  @patch.object(SSH, "run")
-  @patch.object(HostLog, "write")
-  def test_checkSudoPackageUbuntu(self, write_mock, run_mock, init_mock,
-                                  is_redhat_family, is_ubuntu_family, 
is_suse_family):
-    shared_state = SharedState("root", "sshkey_file", "scriptDir", "bootdir",
-                               "setupAgentFile", "ambariServer", "ubuntu12",
-                               None, "8440", "root")
-    is_redhat_family.return_value = False
-    is_ubuntu_family.return_value = True
-    is_suse_family.return_value = False
-    bootstrap_obj = Bootstrap("hostname", shared_state)
-    expected = 42
-    init_mock.return_value = None
-    run_mock.return_value = expected
-    res = bootstrap_obj.checkSudoPackage()
-    self.assertEquals(res, expected)
-    command = str(init_mock.call_args[0][3])
-    self.assertEqual(command, "dpkg --get-selections|grep -e 
'^sudo\s*install'")
-
-
-  @patch.object(SSH, "__init__")
-  @patch.object(SSH, "run")
-  @patch.object(HostLog, "write")
-  @patch.object(BootstrapDefault, "getPasswordFile")
-  def test_deletePasswordFile(self, getPasswordFile_mock, write_mock, run_mock,
-                              init_mock):
-    shared_state = SharedState("root", "sshkey_file", "scriptDir", "bootdir",
-                               "setupAgentFile", "ambariServer", "centos6",
-                               None, "8440", "root")
-    bootstrap_obj = Bootstrap("hostname", shared_state)
-    expected = 42
-    getPasswordFile_mock.return_value = "PasswordFile"
-    init_mock.return_value = None
-    run_mock.return_value = expected
-    res = bootstrap_obj.deletePasswordFile()
-    self.assertEquals(res, expected)
-    command = str(init_mock.call_args[0][3])
-    self.assertEqual(command, "rm PasswordFile")
-
-
-  @patch.object(BootstrapDefault, "getPasswordFile")
-  @patch.object(SCP, "__init__")
-  @patch.object(SCP, "run")
-  @patch.object(SSH, "__init__")
-  @patch.object(SSH, "run")
-  @patch.object(HostLog, "write")
-  def test_copyPasswordFile(self, write_mock, ssh_run_mock,
-                            ssh_init_mock, scp_run_mock,
-                            scp_init_mock, getPasswordFile_mock):
-    shared_state = SharedState("root", "sshkey_file", "scriptDir", "bootdir",
-                               "setupAgentFile", "ambariServer", "centos6",
-                               None, "8440", "root", 
password_file="PasswordFile")
-    bootstrap_obj = Bootstrap("hostname", shared_state)
-    getPasswordFile_mock.return_value = "PasswordFile"
-     # Testing max retcode return
-    expected1 = {"exitstatus": 42, "log": "log42", "errormsg": "errorMsg"}
-    expected2 = {"exitstatus": 17, "log": "log17", "errormsg": "errorMsg"}
-    scp_init_mock.return_value = None
-    scp_run_mock.return_value = expected1
-    ssh_init_mock.return_value = None
-    ssh_run_mock.return_value = expected2
-    res = bootstrap_obj.copyPasswordFile()
-    self.assertEquals(res, expected1["exitstatus"])
-    input_file = str(scp_init_mock.call_args[0][3])
-    remote_file = str(scp_init_mock.call_args[0][4])
-    self.assertEqual(input_file, "PasswordFile")
-    self.assertEqual(remote_file, "PasswordFile")
-    command = str(ssh_init_mock.call_args[0][3])
-    self.assertEqual(command, "chmod 600 PasswordFile")
-    # Another order
-    expected1 = {"exitstatus": 0, "log": "log0", "errormsg": "errorMsg"}
-    expected2 = {"exitstatus": 17, "log": "log17", "errormsg": "errorMsg"}
-    scp_run_mock.return_value = expected1
-    ssh_run_mock.return_value = expected2
-
-
-  @patch.object(SSH, "__init__")
-  @patch.object(SSH, "run")
-  @patch.object(HostLog, "write")
-  @patch.object(BootstrapDefault, "getPasswordFile")
-  def test_changePasswordFileModeOnHost(self, getPasswordFile_mock, write_mock,
-                                        run_mock, init_mock):
-    shared_state = SharedState("root", "sshkey_file", "scriptDir", "bootdir",
-                               "setupAgentFile", "ambariServer", "centos6",
-                               None, "8440", "root")
-    bootstrap_obj = Bootstrap("hostname", shared_state)
-    expected = 42
-    getPasswordFile_mock.return_value = "PasswordFile"
-    init_mock.return_value = None
-    run_mock.return_value = expected
-    res = bootstrap_obj.changePasswordFileModeOnHost()
-    self.assertEquals(res, expected)
-    command = str(init_mock.call_args[0][3])
-    self.assertEqual(command, "chmod 600 PasswordFile")
-
-
-  @patch.object(HostLog, "write")
-  def test_try_to_execute(self, write_mock):
-    expected = 43
-    shared_state = SharedState("root", "sshkey_file", "scriptDir", "bootdir",
-                               "setupAgentFile", "ambariServer", "centos6",
-                               None, "8440", "root")
-    bootstrap_obj = Bootstrap("hostname", shared_state)
-    # Normal case
-    def act_normal_return_int():
-      return 43
-    ret = bootstrap_obj.try_to_execute(act_normal_return_int)
-    self.assertEqual(ret["exitstatus"], expected)
-    self.assertFalse(write_mock.called)
-    write_mock.reset_mock()
-    def act_normal_return():
-        return {"exitstatus": 43}
-    ret = bootstrap_obj.try_to_execute(act_normal_return)
-    self.assertEqual(ret["exitstatus"], expected)
-    self.assertFalse(write_mock.called)
-    write_mock.reset_mock()
-    # Exception scenario
-    def act():
-      raise IOError()
-    ret = bootstrap_obj.try_to_execute(act)
-    self.assertEqual(ret["exitstatus"], 177)
-    self.assertTrue(write_mock.called)
-
-
-  @patch.object(BootstrapDefault, "try_to_execute")
-  @patch.object(BootstrapDefault, "hasPassword")
-  @patch.object(BootstrapDefault, "createDoneFile")
-  @patch.object(HostLog, "write")
-  @patch("logging.warn")
-  @patch("logging.error")
-  def test_run(self, error_mock, warn_mock, write_mock, createDoneFile_mock,
-               hasPassword_mock, try_to_execute_mock):
-    shared_state = SharedState("root", "sshkey_file", "scriptDir", "bootdir",
-                               "setupAgentFile", "ambariServer", "centos6",
-                               None, "8440", "root")
-    bootstrap_obj = Bootstrap("hostname", shared_state)
-    # Testing workflow without password
-    bootstrap_obj.copied_password_file = False
-    hasPassword_mock.return_value = False
-    try_to_execute_mock.return_value = {"exitstatus": 0, "log":"log0", 
"errormsg":"errormsg0"}
-    bootstrap_obj.run()
-    self.assertEqual(try_to_execute_mock.call_count, 7) # <- Adjust if changed
-    self.assertTrue(createDoneFile_mock.called)
-    self.assertEqual(bootstrap_obj.getStatus()["return_code"], 0)
-
-    try_to_execute_mock.reset_mock()
-    createDoneFile_mock.reset_mock()
-    # Testing workflow with password
-    bootstrap_obj.copied_password_file = True
-    hasPassword_mock.return_value = True
-    try_to_execute_mock.return_value = {"exitstatus": 0, "log":"log0", 
"errormsg":"errormsg0"}
-    bootstrap_obj.run()
-    self.assertEqual(try_to_execute_mock.call_count, 10) # <- Adjust if changed
-    self.assertTrue(createDoneFile_mock.called)
-    self.assertEqual(bootstrap_obj.getStatus()["return_code"], 0)
-
-    error_mock.reset_mock()
-    write_mock.reset_mock()
-    try_to_execute_mock.reset_mock()
-    createDoneFile_mock.reset_mock()
-    # Testing workflow when some action failed before copying password
-    bootstrap_obj.copied_password_file = False
-    hasPassword_mock.return_value = False
-    try_to_execute_mock.side_effect = [{"exitstatus": 0, "log":"log0", 
"errormsg":"errormsg0"}, {"exitstatus": 1, "log":"log1", 
"errormsg":"errormsg1"}]
-    bootstrap_obj.run()
-    self.assertEqual(try_to_execute_mock.call_count, 2) # <- Adjust if changed
-    self.assertTrue("ERROR" in error_mock.call_args[0][0])
-    self.assertTrue("ERROR" in write_mock.call_args[0][0])
-    self.assertTrue(createDoneFile_mock.called)
-    self.assertEqual(bootstrap_obj.getStatus()["return_code"], 1)
-
-    try_to_execute_mock.reset_mock()
-    createDoneFile_mock.reset_mock()
-    # Testing workflow when some action failed after copying password
-    bootstrap_obj.copied_password_file = True
-    hasPassword_mock.return_value = True
-    try_to_execute_mock.side_effect = [{"exitstatus": 0, "log":"log0", 
"errormsg":"errormsg0"}, {"exitstatus": 42, "log":"log42", 
"errormsg":"errormsg42"}, {"exitstatus": 0, "log":"log0", 
"errormsg":"errormsg0"}]
-    bootstrap_obj.run()
-    self.assertEqual(try_to_execute_mock.call_count, 3) # <- Adjust if changed
-    self.assertTrue(createDoneFile_mock.called)
-    self.assertEqual(bootstrap_obj.getStatus()["return_code"], 42)
-
-    error_mock.reset_mock()
-    write_mock.reset_mock()
-    try_to_execute_mock.reset_mock()
-    createDoneFile_mock.reset_mock()
-    # Testing workflow when some action failed after copying password and
-    # removing password failed too
-    bootstrap_obj.copied_password_file = True
-    hasPassword_mock.return_value = True
-    try_to_execute_mock.side_effect = [{"exitstatus": 0, "log":"log0", 
"errormsg":"errormsg0"}, {"exitstatus": 17, "log":"log17", 
"errormsg":"errormsg17"}, {"exitstatus": 19, "log":"log19", 
"errormsg":"errormsg19"}]
-    bootstrap_obj.run()
-    self.assertEqual(try_to_execute_mock.call_count, 3) # <- Adjust if changed
-    self.assertTrue("ERROR" in write_mock.call_args_list[0][0][0])
-    self.assertTrue("ERROR" in error_mock.call_args[0][0])
-    self.assertTrue("WARNING" in write_mock.call_args_list[1][0][0])
-    self.assertTrue("WARNING" in warn_mock.call_args[0][0])
-    self.assertTrue(createDoneFile_mock.called)
-    self.assertEqual(bootstrap_obj.getStatus()["return_code"], 17)
-
-
-  @patch.object(BootstrapDefault, "createDoneFile")
-  @patch.object(HostLog, "write")
-  def test_interruptBootstrap(self, write_mock, createDoneFile_mock):
-    shared_state = SharedState("root", "sshkey_file", "scriptDir", "bootdir",
-                               "setupAgentFile", "ambariServer", "centos6",
-                               None, "8440", "root")
-    bootstrap_obj = Bootstrap("hostname", shared_state)
-    bootstrap_obj.interruptBootstrap()
-    self.assertTrue(createDoneFile_mock.called)
-
-
-  @patch("time.sleep")
-  @patch("time.time")
-  @patch("logging.warn")
-  @patch("logging.info")
-  @patch.object(BootstrapDefault, "start")
-  @patch.object(BootstrapDefault, "interruptBootstrap")
-  @patch.object(BootstrapDefault, "getStatus")
-  def test_PBootstrap(self, getStatus_mock, interruptBootstrap_mock, 
start_mock,
-                      info_mock, warn_mock, time_mock, sleep_mock):
-    shared_state = SharedState("root", "sshkey_file", "scriptDir", "bootdir",
-                               "setupAgentFile", "ambariServer", "centos6",
-                               None, "8440", "root")
-    n = 180
-    time = 100500
-    time_mock.return_value = time
-    hosts = []
-    for i in range(0, n):
-      hosts.append("host" + str(i))
-    # Testing normal case
-    getStatus_mock.return_value = {"return_code": 0,
-                                   "start_time": time + 999}
-    pbootstrap_obj = PBootstrap(hosts, shared_state)
-    pbootstrap_obj.run()
-    self.assertEqual(start_mock.call_count, n)
-    self.assertEqual(interruptBootstrap_mock.call_count, 0)
-
-    start_mock.reset_mock()
-    getStatus_mock.reset_mock()
-    # Testing case of timeout
-    def fake_return_code_generator():
-      call_number = 0
-      while True:
-        call_number += 1
-        if call_number % 5 != 0:   # ~80% of hosts finish successfully
-          yield 0
-        else:
-          yield None
-
-    def fake_start_time_generator():
-      while True:
-        yield time - bootstrap.HOST_BOOTSTRAP_TIMEOUT - 1
-
-    return_code_generator = fake_return_code_generator()
-    start_time_generator = fake_start_time_generator()
-
-    def status_get_item_mock(item):
-      if item == "return_code":
-        return return_code_generator.next()
-      elif item == "start_time":
-        return start_time_generator.next()
-
-    dict_mock = MagicMock()
-    dict_mock.__getitem__.side_effect = status_get_item_mock
-    getStatus_mock.return_value = dict_mock
-
-    pbootstrap_obj.run()
-    self.assertEqual(start_mock.call_count, n)
-    self.assertEqual(interruptBootstrap_mock.call_count, n / 5)
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/5a93dfd4/ambari-server/src/test/python/stacks/2.1/configs/secured-storm-start.json.orig
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.1/configs/secured-storm-start.json.orig
 
b/ambari-server/src/test/python/stacks/2.1/configs/secured-storm-start.json.orig
deleted file mode 100644
index 9dad8e1..0000000
--- 
a/ambari-server/src/test/python/stacks/2.1/configs/secured-storm-start.json.orig
+++ /dev/null
@@ -1,374 +0,0 @@
-{
-    "configuration_attributes": {
-        "storm-site": {}, 
-        "hdfs-site": {
-            "final": {
-                "dfs.support.append": "true", 
-                "dfs.namenode.http-address": "true"
-            }
-        }, 
-        "storm-env": {}, 
-        "core-site": {
-            "final": {
-                "fs.defaultFS": "true"
-            }
-        }, 
-        "hadoop-policy": {}, 
-        "hdfs-log4j": {}, 
-        "hadoop-env": {}, 
-        "zookeeper-env": {}, 
-        "zookeeper-log4j": {}, 
-        "cluster-env": {}
-    }, 
-    "commandParams": {
-        "command_timeout": "600", 
-        "script": "scripts/nimbus.py", 
-        "script_type": "PYTHON", 
-        "service_package_folder": "HDP/2.1/services/STORM/package", 
-        "hooks_folder": "HDP/2.0.6/hooks"
-    }, 
-    "roleCommand": "START", 
-    "clusterName": "pacan", 
-    "hostname": "c6402.ambari.apache.org", 
-    "hostLevelParams": {
-        "jdk_location": "http://c6401.ambari.apache.org:8080/resources/";, 
-        "ambari_db_rca_password": "mapred", 
-        "java_home": "/usr/jdk64/jdk1.7.0_45", 
-        "ambari_db_rca_url": 
"jdbc:postgresql://c6401.ambari.apache.org/ambarirca", 
-        "jce_name": "UnlimitedJCEPolicyJDK7.zip", 
-        "oracle_jdbc_url": 
"http://c6401.ambari.apache.org:8080/resources//ojdbc6.jar";, 
-        "repo_info": 
"[{\"baseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/updates/2.2.0.0\",\"osType\":\"redhat6\",\"repoId\":\"HDP-2.2\",\"repoName\":\"HDP\",\"defaultBaseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/updates/2.2.0.0\",\"latestBaseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/updates/2.2.0.0\"},{\"baseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6\",\"osType\":\"redhat6\",\"repoId\":\"HDP-UTILS-1.1.0.20\",\"repoName\":\"HDP-UTILS\",\"defaultBaseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6\",\"latestBaseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6\"}]";,
 
-        "group_list": "[\"hadoop\",\"users\"]", 
-        "package_list": "[{\"name\":\"storm_2_2_0_0_*\"}]", 
-        "stack_version": "2.2", 
-        "stack_name": "HDP", 
-        "db_name": "ambari", 
-        "ambari_db_rca_driver": "org.postgresql.Driver", 
-        "jdk_name": "jdk-7u45-linux-x64.tar.gz", 
-        "ambari_db_rca_username": "mapred", 
-        "db_driver_filename": "mysql-connector-java.jar", 
-        "user_list": "[\"ambari-qa\",\"zookeeper\",\"hdfs\",\"storm\"]", 
-        "mysql_jdbc_url": 
"http://c6401.ambari.apache.org:8080/resources//mysql-connector-java.jar";
-    }, 
-    "commandType": "EXECUTION_COMMAND", 
-    "roleParams": {}, 
-    "serviceName": "STORM", 
-    "role": "NIMBUS", 
-    "forceRefreshConfigTags": [], 
-    "taskId": 54, 
-    "public_hostname": "c6402.ambari.apache.org", 
-    "configurations": {
-        "storm-site": {
-            "topology.tuple.serializer": 
"backtype.storm.serialization.types.ListDelegateSerializer", 
-            "topology.workers": "1", 
-            "drpc.worker.threads": "64", 
-            "storm.zookeeper.servers": "['c6402.ambari.apache.org']", 
-            "supervisor.heartbeat.frequency.secs": "5", 
-            "topology.executor.send.buffer.size": "1024", 
-            "drpc.childopts": "-Xmx768m", 
-            "nimbus.thrift.port": "6627", 
-            "storm.zookeeper.retry.intervalceiling.millis": "30000", 
-            "storm.local.dir": "/hadoop/storm", 
-            "topology.receiver.buffer.size": "8", 
-            "storm.messaging.netty.client_worker_threads": "1", 
-            "transactional.zookeeper.root": "/transactional", 
-            "topology.skip.missing.kryo.registrations": "false", 
-            "worker.heartbeat.frequency.secs": "1", 
-            "zmq.hwm": "0", 
-            "storm.zookeeper.connection.timeout": "15000", 
-            "_storm.thrift.secure.transport": "SECURED_TRANSPORT_CLASS", 
-            "storm.messaging.netty.server_worker_threads": "1", 
-            "supervisor.worker.start.timeout.secs": "120", 
-            "zmq.threads": "1", 
-            "topology.acker.executors": "null", 
-            "storm.local.mode.zmq": "false", 
-            "topology.max.task.parallelism": "null", 
-            "topology.max.error.report.per.interval": "5", 
-            "storm.zookeeper.port": "2181", 
-            "drpc.queue.size": "128", 
-            "worker.childopts": "-Xmx768m _JAAS_PLACEHOLDER 
-javaagent:/usr/hdp/current/storm/contrib/storm-jmxetric/lib/jmxetric-1.0.4.jar=host=localhost,port=8650,wireformat31x=true,mode=multicast,config=/usr/hdp/current/storm/contrib/storm-jmxetric/conf/jmxetric-conf.xml,process=Worker_%ID%_JVM",
 
-            "nimbus.childopts": "-Xmx1024m _JAAS_PLACEHOLDER 
-javaagent:/usr/hdp/current/storm/contrib/storm-jmxetric/lib/jmxetric-1.0.4.jar=host=localhost,port=8649,wireformat31x=true,mode=multicast,config=/usr/hdp/current/storm/contrib/storm-jmxetric/conf/jmxetric-conf.xml,process=Nimbus_JVM",
 
-            "storm.zookeeper.retry.times": "5", 
-            "nimbus.monitor.freq.secs": "10", 
-            "storm.cluster.mode": "distributed", 
-            "dev.zookeeper.path": "/tmp/dev-storm-zookeeper", 
-            "drpc.invocations.port": "3773", 
-            "_storm.thrift.nonsecure.transport": 
"NON_SECURED_TRANSPORT_CLASS", 
-            "storm.zookeeper.root": "/storm", 
-            "logviewer.childopts": "-Xmx128m", 
-            "transactional.zookeeper.port": "null", 
-            "topology.worker.childopts": "null", 
-            "topology.max.spout.pending": "1000",
-            "nimbus.cleanup.inbox.freq.secs": "600", 
-            "storm.messaging.netty.min_wait_ms": "100", 
-            "nimbus.task.timeout.secs": "30", 
-            "nimbus.thrift.max_buffer_size": "1048576", 
-            "topology.sleep.spout.wait.strategy.time.ms": "1", 
-            "topology.optimize": "true", 
-            "nimbus.reassign": "true", 
-            "storm.messaging.transport": 
"backtype.storm.messaging.netty.Context", 
-            "logviewer.appender.name": "A1", 
-            "nimbus.host": "c6402.ambari.apache.org", 
-            "ui.port": "8744", 
-            "supervisor.slots.ports": "[6700, 6701]", 
-            "nimbus.file.copy.expiration.secs": "600", 
-            "supervisor.monitor.frequency.secs": "3", 
-            "ui.childopts": "-Xmx768m _JAAS_PLACEHOLDER", 
-            "transactional.zookeeper.servers": "null", 
-            "zmq.linger.millis": "5000", 
-            "topology.error.throttle.interval.secs": "10", 
-            "topology.worker.shared.thread.pool.size": "4", 
-            "java.library.path": 
"/usr/local/lib:/opt/local/lib:/usr/lib:/usr/hdp/current/storm/lib", 
-            "topology.spout.wait.strategy": 
"backtype.storm.spout.SleepSpoutWaitStrategy", 
-            "task.heartbeat.frequency.secs": "3", 
-            "topology.transfer.buffer.size": "1024", 
-            "storm.zookeeper.session.timeout": "20000", 
-            "topology.executor.receive.buffer.size": "1024", 
-            "topology.stats.sample.rate": "0.05", 
-            "topology.fall.back.on.java.serialization": "true", 
-            "supervisor.childopts": "-Xmx256m _JAAS_PLACEHOLDER 
-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.ssl=false 
-Dcom.sun.management.jmxremote.authenticate=false 
-Dcom.sun.management.jmxremote.port=56431 
-javaagent:/usr/hdp/current/storm/contrib/storm-jmxetric/lib/jmxetric-1.0.4.jar=host=localhost,port=8650,wireformat31x=true,mode=multicast,config=/usr/hdp/current/storm/contrib/storm-jmxetric/conf/jmxetric-conf.xml,process=Supervisor_JVM",
 
-            "topology.enable.message.timeouts": "true", 
-            "storm.messaging.netty.max_wait_ms": "1000", 
-            "nimbus.topology.validator": 
"backtype.storm.nimbus.DefaultTopologyValidator", 
-            "nimbus.supervisor.timeout.secs": "60", 
-            "topology.disruptor.wait.strategy": 
"com.lmax.disruptor.BlockingWaitStrategy", 
-            "nimbus.inbox.jar.expiration.secs": "3600", 
-            "drpc.port": "3772", 
-            "topology.kryo.factory": 
"backtype.storm.serialization.DefaultKryoFactory", 
-            "storm.zookeeper.retry.interval": "1000", 
-            "storm.messaging.netty.max_retries": "30", 
-            "topology.tick.tuple.freq.secs": "null", 
-            "drpc.request.timeout.secs": "600", 
-            "nimbus.task.launch.secs": "120", 
-            "task.refresh.poll.secs": "10", 
-            "topology.message.timeout.secs": "30", 
-            "storm.messaging.netty.buffer_size": "5242880", 
-            "topology.state.synchronization.timeout.secs": "60", 
-            "supervisor.worker.timeout.secs": "30", 
-            "topology.trident.batch.emit.interval.millis": "500", 
-            "topology.builtin.metrics.bucket.size.secs": "60", 
-            "logviewer.port": "8000", 
-            "topology.debug": "false"
-        }, 
-        "hdfs-site": {
-            "dfs.namenode.avoid.write.stale.datanode": "true", 
-            "dfs.namenode.kerberos.internal.spnego.principal": 
"${dfs.web.authentication.kerberos.principal}", 
-            "dfs.namenode.checkpoint.txns": "1000000", 
-            "dfs.block.access.token.enable": "true", 
-            "dfs.support.append": "true", 
-            "dfs.datanode.address": "0.0.0.0:1019", 
-            "dfs.cluster.administrators": " hdfs", 
-            "dfs.journalnode.kerberos.principal": "jn/[email protected]", 
-            "dfs.datanode.balance.bandwidthPerSec": "6250000", 
-            "dfs.namenode.safemode.threshold-pct": "1.0f", 
-            "dfs.namenode.checkpoint.edits.dir": 
"${dfs.namenode.checkpoint.dir}", 
-            "dfs.permissions.enabled": "true", 
-            "dfs.namenode.kerberos.principal": "nn/[email protected]", 
-            "dfs.namenode.checkpoint.dir": "/hadoop/hdfs/namesecondary", 
-            "dfs.https.port": "50470", 
-            "dfs.namenode.https-address": "c6402.ambari.apache.org:50470", 
-            "dfs.secondary.namenode.kerberos.https.principal": 
"HTTP/[email protected]", 
-            "dfs.blockreport.initialDelay": "120", 
-            "dfs.journalnode.edits.dir": "/grid/0/hdfs/journal", 
-            "dfs.blocksize": "134217728", 
-            "dfs.client.read.shortcircuit": "true", 
-            "dfs.datanode.max.transfer.threads": "1024", 
-            "dfs.heartbeat.interval": "3", 
-            "dfs.replication": "3", 
-            "dfs.namenode.handler.count": "40", 
-            "dfs.web.authentication.kerberos.keytab": 
"/etc/security/keytabs/spnego.service.keytab", 
-            "fs.permissions.umask-mode": "022", 
-            "dfs.namenode.stale.datanode.interval": "30000", 
-            "dfs.datanode.ipc.address": "0.0.0.0:8010", 
-            "dfs.namenode.name.dir": "/hadoop/hdfs/namenode", 
-            "dfs.secondary.namenode.kerberos.internal.spnego.principal": 
"${dfs.web.authentication.kerberos.principal}", 
-            "dfs.datanode.data.dir": "/hadoop/hdfs/data", 
-            "dfs.namenode.http-address": "c6402.ambari.apache.org:50070", 
-            "dfs.webhdfs.enabled": "true", 
-            "dfs.datanode.failed.volumes.tolerated": "0", 
-            "dfs.namenode.accesstime.precision": "0", 
-            "dfs.namenode.avoid.read.stale.datanode": "true", 
-            "dfs.namenode.secondary.http-address": 
"c6402.ambari.apache.org:50090", 
-            "dfs.datanode.kerberos.principal": "dn/[email protected]", 
-            "dfs.journalnode.keytab.file": 
"/etc/security/keytabs/jn.service.keytab", 
-            "dfs.datanode.http.address": "0.0.0.0:1022", 
-            "dfs.datanode.du.reserved": "1073741824", 
-            "dfs.client.read.shortcircuit.streams.cache.size": "4096", 
-            "dfs.namenode.kerberos.https.principal": "HTTP/[email protected]", 
-            "dfs.secondary.namenode.keytab.file": 
"/etc/security/keytabs/nn.service.keytab", 
-            "dfs.web.authentication.kerberos.principal": 
"HTTP/[email protected]", 
-            "dfs.secondary.namenode.kerberos.principal": 
"nn/[email protected]", 
-            "dfs.datanode.keytab.file": 
"/etc/security/keytabs/dn.service.keytab", 
-            "dfs.namenode.keytab.file": 
"/etc/security/keytabs/nn.service.keytab", 
-            "dfs.permissions.superusergroup": "hdfs", 
-            "dfs.journalnode.http-address": "0.0.0.0:8480", 
-            "dfs.journalnode.kerberos.internal.spnego.principal": 
"HTTP/[email protected]", 
-            "dfs.domain.socket.path": "/var/lib/hadoop-hdfs/dn_socket", 
-            "dfs.namenode.write.stale.datanode.ratio": "1.0f", 
-            "dfs.hosts.exclude": "/etc/hadoop/conf/dfs.exclude", 
-            "dfs.datanode.data.dir.perm": "750", 
-            "dfs.namenode.name.dir.restore": "true", 
-            "dfs.replication.max": "50", 
-            "dfs.namenode.checkpoint.period": "21600",
-            "dfs.http.policy": "HTTP_ONLY"
-        }, 
-        "storm-env": {
-            "storm_log_dir": "/var/log/storm", 
-            "storm_principal_name": "[email protected]", 
-            "storm_pid_dir": "/var/run/storm", 
-            "storm_user": "storm", 
-            "content": "\n#!/bin/bash\n\n# Set Storm specific environment 
variables here.\n\n# The java implementation to use.\nexport 
JAVA_HOME={{java64_home}}\n\n# export STORM_CONF_DIR=\"\"", 
-            "nimbus_principal_name": "nimbus/[email protected]", 
-            "storm_ui_keytab": 
"/etc/security/keytabs/http.storm.service.keytab",
-            "nimbus_keytab": "/etc/security/keytabs/nimbus.service.keytab", 
-            "storm_keytab": "/etc/security/keytabs/storm.headless.keytab",
-            "storm_ui_principal_name": "HTTP/_HOST"
-        }, 
-        "core-site": {
-            "io.serializations": 
"org.apache.hadoop.io.serializer.WritableSerialization", 
-            "fs.trash.interval": "360", 
-            "hadoop.security.authentication": "kerberos", 
-            "io.compression.codecs": 
"org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec",
 
-            "hadoop.proxyuser.falcon.hosts": "*", 
-            "mapreduce.jobtracker.webinterface.trusted": "false", 
-            "hadoop.security.authorization": "true", 
-            "fs.defaultFS": "hdfs://c6402.ambari.apache.org:8020", 
-            "ipc.server.tcpnodelay": "true", 
-            "ipc.client.connect.max.retries": "50", 
-            "ipc.client.idlethreshold": "8000", 
-            "io.file.buffer.size": "131072", 
-            "hadoop.security.auth_to_local": 
"RULE:[2:$1@$0](rm@.*EXAMPLE.COM)s/.*/yarn/\nRULE:[2:$1@$0](nm@.*EXAMPLE.COM)s/.*/yarn/\nRULE:[2:$1@$0](nn@.*EXAMPLE.COM)s/.*/hdfs/\nRULE:[2:$1@$0](dn@.*EXAMPLE.COM)s/.*/hdfs/\nRULE:[2:$1@$0](jn/_HOST@.*EXAMPLE.COM)s/.*/hdfs/\nRULE:[2:$1@$0](falcon@.*EXAMPLE.COM)s/.*/falcon/\nDEFAULT",
 
-            "ipc.client.connection.maxidletime": "30000", 
-            "hadoop.proxyuser.falcon.groups": "users"
-        }, 
-        "hadoop-policy": {
-            "security.job.client.protocol.acl": "*", 
-            "security.job.task.protocol.acl": "*", 
-            "security.datanode.protocol.acl": "*", 
-            "security.namenode.protocol.acl": "*", 
-            "security.client.datanode.protocol.acl": "*", 
-            "security.inter.tracker.protocol.acl": "*", 
-            "security.refresh.usertogroups.mappings.protocol.acl": "hadoop", 
-            "security.client.protocol.acl": "*", 
-            "security.refresh.policy.protocol.acl": "hadoop", 
-            "security.admin.operations.protocol.acl": "hadoop", 
-            "security.inter.datanode.protocol.acl": "*"
-        }, 
-        "hdfs-log4j": {
-            "content": "\n#\n# Licensed to the Apache Software Foundation 
(ASF) under one\n# or more contributor license agreements.  See the NOTICE 
file\n# distributed with this work for additional information\n# regarding 
copyright ownership.  The ASF licenses this file\n# to you under the Apache 
License, Version 2.0 (the\n# \"License\"); you may not use this file except in 
compliance\n# with the License.  You may obtain a copy of the License at\n#\n#  
http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable 
law or agreed to in writing,\n# software distributed under the License is 
distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF 
ANY\n# KIND, either express or implied.  See the License for the\n# specific 
language governing permissions and limitations\n# under the License.\n#\n\n\n# 
Define some default values that can be overridden by system properties\n# To 
change daemon root logger use hadoop_root_logger in hadoop-env\nhadoop.root.lo
 gger=INFO,console\nhadoop.log.dir=.\nhadoop.log.file=hadoop.log\n\n\n# Define 
the root logger to the system property 
\"hadoop.root.logger\".\nlog4j.rootLogger=${hadoop.root.logger}, 
EventCounter\n\n# Logging Threshold\nlog4j.threshhold=ALL\n\n#\n# Daily Rolling 
File 
Appender\n#\n\nlog4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.DRFA.File=${hadoop.log.dir}/${hadoop.log.file}\n\n#
 Rollver at midnight\nlog4j.appender.DRFA.DatePattern=.yyyy-MM-dd\n\n# 30-day 
backup\n#log4j.appender.DRFA.MaxBackupIndex=30\nlog4j.appender.DRFA.layout=org.apache.log4j.PatternLayout\n\n#
 Pattern format: Date LogLevel LoggerName 
LogMessage\nlog4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: 
%m%n\n# Debugging Pattern 
format\n#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} 
(%F:%M(%L)) - %m%n\n\n\n#\n# console\n# Add \"console\" to rootlogger above if 
you want to use 
this\n#\n\nlog4j.appender.console=org.apache.log4j.ConsoleAppender\nlog4j.appender
 
.console.target=System.err\nlog4j.appender.console.layout=org.apache.log4j.PatternLayout\nlog4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd
 HH:mm:ss} %p %c{2}: %m%n\n\n#\n# TaskLog Appender\n#\n\n#Default 
values\nhadoop.tasklog.taskid=null\nhadoop.tasklog.iscleanup=false\nhadoop.tasklog.noKeepSplits=4\nhadoop.tasklog.totalLogFileSize=100\nhadoop.tasklog.purgeLogSplits=true\nhadoop.tasklog.logsRetainHours=12\n\nlog4j.appender.TLA=org.apache.hadoop.mapred.TaskLogAppender\nlog4j.appender.TLA.taskId=${hadoop.tasklog.taskid}\nlog4j.appender.TLA.isCleanup=${hadoop.tasklog.iscleanup}\nlog4j.appender.TLA.totalLogFileSize=${hadoop.tasklog.totalLogFileSize}\n\nlog4j.appender.TLA.layout=org.apache.log4j.PatternLayout\nlog4j.appender.TLA.layout.ConversionPattern=%d{ISO8601}
 %p %c: %m%n\n\n#\n#Security audit 
appender\n#\nhadoop.security.logger=INFO,console\nhadoop.security.log.maxfilesize=256MB\nhadoop.security.log.maxbackupindex=20\nlog4j.category.SecurityLogger=${hadoop.security.logg
 
er}\nhadoop.security.log.file=SecurityAuth.audit\nlog4j.appender.DRFAS=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.DRFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}\nlog4j.appender.DRFAS.layout=org.apache.log4j.PatternLayout\nlog4j.appender.DRFAS.layout.ConversionPattern=%d{ISO8601}
 %p %c: 
%m%n\nlog4j.appender.DRFAS.DatePattern=.yyyy-MM-dd\n\nlog4j.appender.RFAS=org.apache.log4j.RollingFileAppender\nlog4j.appender.RFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}\nlog4j.appender.RFAS.layout=org.apache.log4j.PatternLayout\nlog4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601}
 %p %c: 
%m%n\nlog4j.appender.RFAS.MaxFileSize=${hadoop.security.log.maxfilesize}\nlog4j.appender.RFAS.MaxBackupIndex=${hadoop.security.log.maxbackupindex}\n\n#\n#
 hdfs audit 
logging\n#\nhdfs.audit.logger=INFO,console\nlog4j.logger.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=${hdfs.audit.logger}\nlog4j.additivity.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=
 
false\nlog4j.appender.DRFAAUDIT=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.DRFAAUDIT.File=${hadoop.log.dir}/hdfs-audit.log\nlog4j.appender.DRFAAUDIT.layout=org.apache.log4j.PatternLayout\nlog4j.appender.DRFAAUDIT.layout.ConversionPattern=%d{ISO8601}
 %p %c{2}: %m%n\nlog4j.appender.DRFAAUDIT.DatePattern=.yyyy-MM-dd\n\n#\n# 
mapred audit 
logging\n#\nmapred.audit.logger=INFO,console\nlog4j.logger.org.apache.hadoop.mapred.AuditLogger=${mapred.audit.logger}\nlog4j.additivity.org.apache.hadoop.mapred.AuditLogger=false\nlog4j.appender.MRAUDIT=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.MRAUDIT.File=${hadoop.log.dir}/mapred-audit.log\nlog4j.appender.MRAUDIT.layout=org.apache.log4j.PatternLayout\nlog4j.appender.MRAUDIT.layout.ConversionPattern=%d{ISO8601}
 %p %c{2}: %m%n\nlog4j.appender.MRAUDIT.DatePattern=.yyyy-MM-dd\n\n#\n# Rolling 
File 
Appender\n#\n\nlog4j.appender.RFA=org.apache.log4j.RollingFileAppender\nlog4j.appender.RFA.File=${hadoop.log.dir}/${hadoop.log.fi
 le}\n\n# Logfile size and and 30-day 
backups\nlog4j.appender.RFA.MaxFileSize=256MB\nlog4j.appender.RFA.MaxBackupIndex=10\n\nlog4j.appender.RFA.layout=org.apache.log4j.PatternLayout\nlog4j.appender.RFA.layout.ConversionPattern=%d{ISO8601}
 %-5p %c{2} - %m%n\nlog4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} 
%-5p %c{2} (%F:%M(%L)) - %m%n\n\n\n# Custom Logging 
levels\n\nhadoop.metrics.log.level=INFO\n#log4j.logger.org.apache.hadoop.mapred.JobTracker=DEBUG\n#log4j.logger.org.apache.hadoop.mapred.TaskTracker=DEBUG\n#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG\nlog4j.logger.org.apache.hadoop.metrics2=${hadoop.metrics.log.level}\n\n#
 Jets3t 
library\nlog4j.logger.org.jets3t.service.impl.rest.httpclient.RestS3Service=ERROR\n\n#\n#
 Null Appender\n# Trap security logger on the hadoop client 
side\n#\nlog4j.appender.NullAppender=org.apache.log4j.varia.NullAppender\n\n#\n#
 Event Counter Appender\n# Sends counts of logging messages at different 
severity levels to Hadoop Metrics.\n#\n
 log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter\n\n# 
Removes \"deprecated\" 
messages\nlog4j.logger.org.apache.hadoop.conf.Configuration.deprecation=WARN"
-        }, 
-        "hadoop-env": {
-            "namenode_opt_maxnewsize": "200m", 
-            "hdfs_log_dir_prefix": "/var/log/hadoop", 
-            "namenode_heapsize": "1024m", 
-            "proxyuser_group": "users", 
-            "hdfs_user_keytab": "/etc/security/keytabs/hdfs.headless.keytab", 
-            "content": "\n# Set Hadoop-specific environment variables 
here.\n\n# The only required environment variable is JAVA_HOME.  All others 
are\n# optional.  When running a distributed configuration it is best to\n# set 
JAVA_HOME in this file, so that it is correctly defined on\n# remote 
nodes.\n\n# The java implementation to use.  Required.\nexport 
JAVA_HOME={{java_home}}\nexport HADOOP_HOME_WARN_SUPPRESS=1\n\n# Hadoop home 
directory\nexport HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n# Hadoop 
Configuration Directory\n#TODO: if env var set that can cause problems\nexport 
HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-{{hadoop_conf_dir}}}\n\n{# this is different 
for HDP1 #}\n# Path to jsvc required by secure HDP 2.0 datanode\nexport 
JSVC_HOME={{jsvc_path}}\n\n\n# The maximum amount of heap to use, in MB. 
Default is 1000.\nexport HADOOP_HEAPSIZE=\"{{hadoop_heapsize}}\"\n\nexport 
HADOOP_NAMENODE_INIT_HEAPSIZE=\"-Xms{{namenode_heapsize}}\"\n\n# Extra Java 
runtime options.  Empty by defaul
 t.\nexport HADOOP_OPTS=\"-Djava.net.preferIPv4Stack=true ${HADOOP_OPTS}\"\n\n# 
Command specific options appended to HADOOP_OPTS when specified\nexport 
HADOOP_NAMENODE_OPTS=\"-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC 
-XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log 
-XX:NewSize={{namenode_opt_newsize}} -XX:MaxNewSize={{namenode_opt_maxnewsize}} 
-XX:PermSize={{namenode_opt_permsize}} 
-XX:MaxPermSize={{namenode_opt_maxpermsize}} 
-Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc 
-XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps 
-Xms{{namenode_heapsize}} -Xmx{{namenode_heapsize}} 
-Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT 
${HADOOP_NAMENODE_OPTS}\"\nHADOOP_JOBTRACKER_OPTS=\"-server 
-XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC 
-XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log 
-XX:NewSize={{jtnode_opt_newsize}} -XX:MaxNewSize={{jtnode_opt_maxnewsize}} 
-Xloggc:{{hdfs_log_dir_prefix}
 }/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails 
-XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Xmx{{jtnode_heapsize}} 
-Dhadoop.security.logger=INFO,DRFAS -Dmapred.audit.logger=INFO,MRAUDIT 
-Dhadoop.mapreduce.jobsummary.logger=INFO,JSA 
${HADOOP_JOBTRACKER_OPTS}\"\n\nHADOOP_TASKTRACKER_OPTS=\"-server 
-Xmx{{ttnode_heapsize}} -Dhadoop.security.logger=ERROR,console 
-Dmapred.audit.logger=ERROR,console 
${HADOOP_TASKTRACKER_OPTS}\"\nHADOOP_DATANODE_OPTS=\"-Xmx{{dtnode_heapsize}} 
-Dhadoop.security.logger=ERROR,DRFAS 
${HADOOP_DATANODE_OPTS}\"\nHADOOP_BALANCER_OPTS=\"-server 
-Xmx{{hadoop_heapsize}}m ${HADOOP_BALANCER_OPTS}\"\n\nexport 
HADOOP_SECONDARYNAMENODE_OPTS=\"-server -XX:ParallelGCThreads=8 
-XX:+UseConcMarkSweepGC 
-XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log 
-XX:NewSize={{namenode_opt_newsize}} -XX:MaxNewSize={{namenode_opt_maxnewsize}} 
-XX:PermSize={{namenode_opt_permsize}} 
-XX:MaxPermSize={{namenode_opt_maxpermsize}} -Xloggc:{{hdfs_log_dir_prefix}}/
 $USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails 
-XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps ${HADOOP_NAMENODE_INIT_HEAPSIZE} 
-Xmx{{namenode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS 
-Dhdfs.audit.logger=INFO,DRFAAUDIT ${HADOOP_SECONDARYNAMENODE_OPTS}\"\n\n# The 
following applies to multiple commands (fs, dfs, fsck, distcp etc)\nexport 
HADOOP_CLIENT_OPTS=\"-Xmx${HADOOP_HEAPSIZE}m $HADOOP_CLIENT_OPTS\"\n# On secure 
datanodes, user to run the datanode as after dropping privileges\nexport 
HADOOP_SECURE_DN_USER={{hdfs_user}}\n\n# Extra ssh options.  Empty by 
default.\nexport HADOOP_SSH_OPTS=\"-o ConnectTimeout=5 -o 
SendEnv=HADOOP_CONF_DIR\"\n\n# Where log files are stored.  $HADOOP_HOME/logs 
by default.\nexport HADOOP_LOG_DIR={{hdfs_log_dir_prefix}}/$USER\n\n# History 
server logs\nexport HADOOP_MAPRED_LOG_DIR={{mapred_log_dir_prefix}}/$USER\n\n# 
Where log files are stored in the secure data environment.\nexport 
HADOOP_SECURE_DN_LOG_DIR={{hdfs_log_dir_prefix}}/$HAD
 OOP_SECURE_DN_USER\n\n# File naming remote slave hosts.  
$HADOOP_HOME/conf/slaves by default.\n# export 
HADOOP_SLAVES=${HADOOP_HOME}/conf/slaves\n\n# host:path where hadoop code 
should be rsync'd from.  Unset by default.\n# export 
HADOOP_MASTER=master:/home/$USER/src/hadoop\n\n# Seconds to sleep between slave 
commands.  Unset by default.  This\n# can be useful in large clusters, where, 
e.g., slave rsyncs can\n# otherwise arrive faster than the master can service 
them.\n# export HADOOP_SLAVE_SLEEP=0.1\n\n# The directory where pid files are 
stored. /tmp by default.\nexport 
HADOOP_PID_DIR={{hadoop_pid_dir_prefix}}/$USER\nexport 
HADOOP_SECURE_DN_PID_DIR={{hadoop_pid_dir_prefix}}/$HADOOP_SECURE_DN_USER\n\n# 
History server pid\nexport 
HADOOP_MAPRED_PID_DIR={{mapred_pid_dir_prefix}}/$USER\n\nYARN_RESOURCEMANAGER_OPTS=\"-Dyarn.server.resourcemanager.appsummary.logger=INFO,RMSUMMARY\"\n\n#
 A string representing this instance of hadoop. $USER by default.\nexport 
HADOOP_IDENT_STRING=$USER\n\n#
  The scheduling priority for daemon processes.  See 'man nice'.\n\n# export 
HADOOP_NICENESS=10\n\n# Use libraries from standard 
classpath\nJAVA_JDBC_LIBS=\"\"\n#Add libraries required by mysql connector\nfor 
jarFile in `ls /usr/share/java/*mysql* 2>/dev/null`\ndo\n  
JAVA_JDBC_LIBS=${JAVA_JDBC_LIBS}:$jarFile\ndone\n#Add libraries required by 
oracle connector\nfor jarFile in `ls /usr/share/java/*ojdbc* 2>/dev/null`\ndo\n 
 JAVA_JDBC_LIBS=${JAVA_JDBC_LIBS}:$jarFile\ndone\n#Add libraries required by 
nodemanager\nMAPREDUCE_LIBS={{mapreduce_libs_path}}\nexport 
HADOOP_CLASSPATH=${HADOOP_CLASSPATH}${JAVA_JDBC_LIBS}:${MAPREDUCE_LIBS}\n\nif [ 
-d \"/usr/lib/tez\" ]; then\n  export 
HADOOP_CLASSPATH=$HADOOP_CLASSPATH:/usr/lib/tez/*:/usr/lib/tez/lib/*:/etc/tez/conf\nfi\n\n#
 Setting path to hdfs command line\nexport 
HADOOP_LIBEXEC_DIR={{hadoop_libexec_dir}}\n\n#Mostly required for hadoop 
2.0\nexport 
JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:/usr/lib/hadoop/lib/native/Linux-amd64-64\n\n#Hadoop
 logging 
 options\nexport HADOOP_ROOT_LOGGER={{hadoop_root_logger}}", 
-            "hdfs_user": "hdfs", 
-            "namenode_opt_newsize": "200m",
-            "namenode_opt_permsize" : "128m",
-            "namenode_opt_maxpermsize" : "256m", 
-            "dtnode_heapsize": "1024m", 
-            "hadoop_root_logger": "INFO,RFA", 
-            "hadoop_heapsize": "1024", 
-            "hadoop_pid_dir_prefix": "/var/run/hadoop", 
-            "hdfs_principal_name": "hdfs"
-        }, 
-        "zookeeper-env": {
-            "clientPort": "2181", 
-            "zookeeper_keytab_path": 
"/etc/security/keytabs/zk.service.keytab", 
-            "zk_user": "zookeeper", 
-            "zk_log_dir": "/var/log/zookeeper", 
-            "syncLimit": "5", 
-            "content": "\nexport JAVA_HOME={{java64_home}}\nexport 
ZOO_LOG_DIR={{zk_log_dir}}\nexport ZOOPIDFILE={{zk_pid_file}}\nexport 
SERVER_JVMFLAGS={{zk_server_heapsize}}\nexport JAVA=$JAVA_HOME/bin/java\nexport 
CLASSPATH=$CLASSPATH:/usr/share/zookeeper/*\n\n{% if security_enabled 
%}\nexport SERVER_JVMFLAGS=\"$SERVER_JVMFLAGS 
-Djava.security.auth.login.config={{zk_server_jaas_file}}\"\nexport 
CLIENT_JVMFLAGS=\"$CLIENT_JVMFLAGS 
-Djava.security.auth.login.config={{zk_client_jaas_file}}\"\n{% endif %}", 
-            "initLimit": "10", 
-            "zk_pid_dir": "/var/run/zookeeper", 
-            "zk_data_dir": "/hadoop/zookeeper", 
-            "zookeeper_principal_name": "zookeeper/[email protected]", 
-            "tickTime": "2000"
-        }, 
-        "zookeeper-log4j": {
-            "content": "\n#\n#\n# Licensed to the Apache Software Foundation 
(ASF) under one\n# or more contributor license agreements.  See the NOTICE 
file\n# distributed with this work for additional information\n# regarding 
copyright ownership.  The ASF licenses this file\n# to you under the Apache 
License, Version 2.0 (the\n# \"License\"); you may not use this file except in 
compliance\n# with the License.  You may obtain a copy of the License at\n#\n#  
 http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable 
law or agreed to in writing,\n# software distributed under the License is 
distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF 
ANY\n# KIND, either express or implied.  See the License for the\n# specific 
language governing permissions and limitations\n# under the 
License.\n#\n#\n#\n\n#\n# ZooKeeper Logging Configuration\n#\n\n# DEFAULT: 
console appender only\nlog4j.rootLogger=INFO, CONSOLE\n\n# Example with rolling 
log file\n#log4
 j.rootLogger=DEBUG, CONSOLE, ROLLINGFILE\n\n# Example with rolling log file 
and tracing\n#log4j.rootLogger=TRACE, CONSOLE, ROLLINGFILE, TRACEFILE\n\n#\n# 
Log INFO level and above messages to the 
console\n#\nlog4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender\nlog4j.appender.CONSOLE.Threshold=INFO\nlog4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout\nlog4j.appender.CONSOLE.layout.ConversionPattern=%d{ISO8601}
 - %-5p [%t:%C{1}@%L] - %m%n\n\n#\n# Add ROLLINGFILE to rootLogger to get log 
file output\n#    Log DEBUG level and above messages to a log 
file\nlog4j.appender.ROLLINGFILE=org.apache.log4j.RollingFileAppender\nlog4j.appender.ROLLINGFILE.Threshold=DEBUG\nlog4j.appender.ROLLINGFILE.File=zookeeper.log\n\n#
 Max log file size of 10MB\nlog4j.appender.ROLLINGFILE.MaxFileSize=10MB\n# 
uncomment the next line to limit number of backup 
files\n#log4j.appender.ROLLINGFILE.MaxBackupIndex=10\n\nlog4j.appender.ROLLINGFILE.layout=org.apache.log4j.PatternLayout\nlog4j.appender.ROLLIN
 GFILE.layout.ConversionPattern=%d{ISO8601} - %-5p [%t:%C{1}@%L] - 
%m%n\n\n\n#\n# Add TRACEFILE to rootLogger to get log file output\n#    Log 
DEBUG level and above messages to a log 
file\nlog4j.appender.TRACEFILE=org.apache.log4j.FileAppender\nlog4j.appender.TRACEFILE.Threshold=TRACE\nlog4j.appender.TRACEFILE.File=zookeeper_trace.log\n\nlog4j.appender.TRACEFILE.layout=org.apache.log4j.PatternLayout\n###
 Notice we are including log4j's NDC here 
(%x)\nlog4j.appender.TRACEFILE.layout.ConversionPattern=%d{ISO8601} - %-5p 
[%t:%C{1}@%L][%x] - %m%n"
-        }, 
-        "cluster-env": {
-            "security_enabled": "true", 
-            "ignore_groupsusers_create": "false", 
-            "smokeuser_keytab": 
"/etc/security/keytabs/smokeuser.headless.keytab", 
-            "kerberos_domain": "EXAMPLE.COM", 
-            "kinit_path_local": "/usr/bin", 
-            "user_group": "hadoop", 
-            "smokeuser": "ambari-qa"
-        }
-    }, 
-    "configurationTags": {
-        "storm-site": {
-            "tag": "version1412001710682"
-        }, 
-        "hdfs-site": {
-            "tag": "version1412001710682"
-        }, 
-        "storm-env": {
-            "tag": "version1412001710682"
-        }, 
-        "core-site": {
-            "tag": "version1412001710682"
-        }, 
-        "hadoop-policy": {
-            "tag": "version1411996371868"
-        }, 
-        "hdfs-log4j": {
-            "tag": "version1411996371868"
-        }, 
-        "hadoop-env": {
-            "tag": "version1412001710682"
-        }, 
-        "zookeeper-env": {
-            "tag": "version1412001710682"
-        }, 
-        "zookeeper-log4j": {
-            "tag": "version1"
-        }, 
-        "cluster-env": {
-            "tag": "version1412001710681"
-        }
-    }, 
-    "commandId": "12-1", 
-    "clusterHostInfo": {
-        "snamenode_host": [
-            "c6402.ambari.apache.org"
-        ], 
-        "drpc_server_hosts": [
-            "c6402.ambari.apache.org"
-        ], 
-        "nimbus_hosts": [
-            "c6402.ambari.apache.org"
-        ], 
-        "all_ping_ports": [
-            "8670"
-        ], 
-        "all_hosts": [
-            "c6402.ambari.apache.org"
-        ], 
-        "slave_hosts": [
-            "c6402.ambari.apache.org"
-        ], 
-        "namenode_host": [
-            "c6402.ambari.apache.org"
-        ], 
-        "storm_ui_server_hosts": [
-            "c6402.ambari.apache.org"
-        ], 
-        "storm_rest_api_hosts": [
-            "c6402.ambari.apache.org"
-        ], 
-        "ambari_server_host": [
-            "c6401.ambari.apache.org"
-        ], 
-        "zookeeper_hosts": [
-            "c6402.ambari.apache.org"
-        ], 
-        "supervisor_hosts": [
-            "c6402.ambari.apache.org"
-        ]
-    }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/5a93dfd4/ambari-server/src/test/resources/stacks/HDP/2.1.1/upgrades/upgrade_test.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/resources/stacks/HDP/2.1.1/upgrades/upgrade_test.xml 
b/ambari-server/src/test/resources/stacks/HDP/2.1.1/upgrades/upgrade_test.xml
index 827348a..33234bc 100644
--- 
a/ambari-server/src/test/resources/stacks/HDP/2.1.1/upgrades/upgrade_test.xml
+++ 
b/ambari-server/src/test/resources/stacks/HDP/2.1.1/upgrades/upgrade_test.xml
@@ -17,7 +17,7 @@
 -->
 <upgrade xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";>
   <target>2.2.*.*</target>
-  <target-stack>HDP-2.1.1</target-stack>
+  <target-stack>HDP-2.2.0</target-stack>
   <type>ROLLING</type>
   
   <order>

http://git-wip-us.apache.org/repos/asf/ambari/blob/5a93dfd4/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/browser/HiveBrowserService.java.orig
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/browser/HiveBrowserService.java.orig
 
b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/browser/HiveBrowserService.java.orig
deleted file mode 100644
index 55919a7..0000000
--- 
a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/browser/HiveBrowserService.java.orig
+++ /dev/null
@@ -1,282 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.resources.browser;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.ViewResourceHandler;
-import org.apache.ambari.view.hive.client.ColumnDescription;
-import org.apache.ambari.view.hive.client.Cursor;
-import org.apache.ambari.view.hive.client.IConnectionFactory;
-import org.apache.ambari.view.hive.resources.jobs.ResultsPaginationController;
-import org.apache.ambari.view.hive.utils.BadRequestFormattedException;
-import org.apache.ambari.view.hive.utils.ServiceFormattedException;
-import org.apache.ambari.view.hive.utils.SharedObjectsFactory;
-import org.apache.commons.collections4.map.PassiveExpiringMap;
-import org.apache.hive.service.cli.thrift.TSessionHandle;
-import org.json.simple.JSONObject;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.inject.Inject;
-import javax.ws.rs.*;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.Callable;
-
-/**
- * Database access resource
- */
-public class HiveBrowserService {
-  @Inject
-  ViewResourceHandler handler;
-  @Inject
-  protected ViewContext context;
-
-  protected final static Logger LOG =
-      LoggerFactory.getLogger(HiveBrowserService.class);
-
-  private static final long EXPIRING_TIME = 10*60*1000;  // 10 minutes
-  private static Map<String, Cursor> resultsCache;
-  private IConnectionFactory connectionFactory;
-
-  public static Map<String, Cursor> getResultsCache() {
-    if (resultsCache == null) {
-      PassiveExpiringMap<String, Cursor> resultsCacheExpiringMap =
-          new PassiveExpiringMap<String, Cursor>(EXPIRING_TIME);
-      resultsCache = Collections.synchronizedMap(resultsCacheExpiringMap);
-    }
-    return resultsCache;
-  }
-
-  private IConnectionFactory getConnectionFactory() {
-    if (connectionFactory == null)
-      connectionFactory = new SharedObjectsFactory(context);
-    return new SharedObjectsFactory(context);
-  }
-
-  /**
-   * Returns list of databases
-   */
-  @GET
-  @Path("database")
-  @Produces(MediaType.APPLICATION_JSON)
-  public Response databases(@QueryParam("like")String like,
-                            @QueryParam("first") String fromBeginning,
-                            @QueryParam("count") Integer count,
-                            @QueryParam("columns") final String 
requestedColumns) {
-    if (like == null)
-      like = "*";
-    else
-      like = "*" + like + "*";
-    String curl = null;
-    try {
-      JSONObject response = new JSONObject();
-      TSessionHandle session = 
getConnectionFactory().getHiveConnection().getOrCreateSessionByTag("DDL");
-      List<String> tables = 
getConnectionFactory().getHiveConnection().ddl().getDBList(session, like);
-      response.put("databases", tables);
-      return Response.ok(response).build();
-    } catch (WebApplicationException ex) {
-      throw ex;
-    } catch (IllegalArgumentException ex) {
-      throw new BadRequestFormattedException(ex.getMessage(), ex);
-    } catch (Exception ex) {
-      throw new ServiceFormattedException(ex.getMessage(), ex, curl);
-    }
-  }
-
-  /**
-   * Returns list of databases
-   */
-  @GET
-  @Path("database.page")
-  @Produces(MediaType.APPLICATION_JSON)
-  public Response databasesPaginated(@QueryParam("like")String like,
-                            @QueryParam("first") String fromBeginning,
-                            @QueryParam("count") Integer count,
-                            @QueryParam("searchId") String searchId,
-                            @QueryParam("format") String format,
-                            @QueryParam("columns") final String 
requestedColumns) {
-    if (like == null)
-      like = "*";
-    else
-      like = "*" + like + "*";
-    String curl = null;
-    try {
-      final String finalLike = like;
-      return ResultsPaginationController.getInstance(context)
-          .request("databases", searchId, false, fromBeginning, count, format,
-                  new Callable<Cursor>() {
-                    @Override
-                    public Cursor call() throws Exception {
-                      TSessionHandle session = 
getConnectionFactory().getHiveConnection().getOrCreateSessionByTag("DDL");
-                      return 
getConnectionFactory().getHiveConnection().ddl().getDBListCursor(session, 
finalLike);
-                    }
-                  }).build();
-    } catch (WebApplicationException ex) {
-      throw ex;
-    } catch (IllegalArgumentException ex) {
-      throw new BadRequestFormattedException(ex.getMessage(), ex);
-    } catch (Exception ex) {
-      throw new ServiceFormattedException(ex.getMessage(), ex, curl);
-    }
-  }
-
-  /**
-   * Returns list of databases
-   */
-  @GET
-  @Path("database/{db}/table")
-  @Produces(MediaType.APPLICATION_JSON)
-  public Response tablesInDatabase(@PathParam("db") String db,
-                                   @QueryParam("like")String like,
-                                   @QueryParam("first") String fromBeginning,
-                                   @QueryParam("count") Integer count,
-                                   @QueryParam("columns") final String 
requestedColumns) {
-    if (like == null)
-      like = "*";
-    else
-      like = "*" + like + "*";
-    String curl = null;
-    try {
-      JSONObject response = new JSONObject();
-      TSessionHandle session = 
getConnectionFactory().getHiveConnection().getOrCreateSessionByTag("DDL");
-      List<String> tables = 
getConnectionFactory().getHiveConnection().ddl().getTableList(session, db, 
like);
-      response.put("tables", tables);
-      response.put("database", db);
-      return Response.ok(response).build();
-    } catch (WebApplicationException ex) {
-      throw ex;
-    } catch (IllegalArgumentException ex) {
-      throw new BadRequestFormattedException(ex.getMessage(), ex);
-    } catch (Exception ex) {
-      throw new ServiceFormattedException(ex.getMessage(), ex, curl);
-    }
-  }
-
-  /**
-   * Returns list of databases
-   */
-  @GET
-  @Path("database/{db}/table.page")
-  @Produces(MediaType.APPLICATION_JSON)
-  public Response tablesInDatabasePaginated(@PathParam("db") final String db,
-                                   @QueryParam("like")String like,
-                                   @QueryParam("first") String fromBeginning,
-                                   @QueryParam("count") Integer count,
-                                   @QueryParam("searchId") String searchId,
-                                   @QueryParam("format") String format,
-                                   @QueryParam("columns") final String 
requestedColumns) {
-    if (like == null)
-      like = "*";
-    else
-      like = "*" + like + "*";
-    String curl = null;
-    try {
-      final String finalLike = like;
-      return ResultsPaginationController.getInstance(context)
-          .request(db + ":tables", searchId, false, fromBeginning, count, 
format,
-                  new Callable<Cursor>() {
-                    @Override
-                    public Cursor call() throws Exception {
-                      TSessionHandle session = 
getConnectionFactory().getHiveConnection().getOrCreateSessionByTag("DDL");
-                      Cursor cursor = 
getConnectionFactory().getHiveConnection().ddl().getTableListCursor(session, 
db, finalLike);
-                      cursor.selectColumns(requestedColumns);
-                      return cursor;
-                    }
-                  }).build();
-    } catch (WebApplicationException ex) {
-      throw ex;
-    } catch (IllegalArgumentException ex) {
-      throw new BadRequestFormattedException(ex.getMessage(), ex);
-    } catch (Exception ex) {
-      throw new ServiceFormattedException(ex.getMessage(), ex, curl);
-    }
-  }
-
-  /**
-   * Returns list of databases
-   */
-  @GET
-  @Path("database/{db}/table/{table}")
-  @Produces(MediaType.APPLICATION_JSON)
-  public Response describeTable(@PathParam("db") String db,
-                                @PathParam("table") String table,
-                                @QueryParam("like") String like,
-                                @QueryParam("columns") String requestedColumns,
-                                @QueryParam("extended") String extended) {
-    boolean extendedTableDescription = (extended != null && 
extended.equals("true"));
-    String curl = null;
-    try {
-      JSONObject response = new JSONObject();
-      TSessionHandle session = 
getConnectionFactory().getHiveConnection().getOrCreateSessionByTag("DDL");
-      List<ColumnDescription> columnDescriptions = 
getConnectionFactory().getHiveConnection().ddl()
-          .getTableDescription(session, db, table, like, 
extendedTableDescription);
-      response.put("columns", columnDescriptions);
-      response.put("database", db);
-      response.put("table", table);
-      return Response.ok(response).build();
-    } catch (WebApplicationException ex) {
-      throw ex;
-    } catch (IllegalArgumentException ex) {
-      throw new BadRequestFormattedException(ex.getMessage(), ex);
-    } catch (Exception ex) {
-      throw new ServiceFormattedException(ex.getMessage(), ex, curl);
-    }
-  }
-
-  /**
-   * Returns list of databases
-   */
-  @GET
-  @Path("database/{db}/table/{table}.page")
-  @Produces(MediaType.APPLICATION_JSON)
-  public Response describeTablePaginated(@PathParam("db") final String db,
-                                         @PathParam("table") final String 
table,
-                                         @QueryParam("like") final String like,
-                                         @QueryParam("first") String 
fromBeginning,
-                                         @QueryParam("searchId") String 
searchId,
-                                         @QueryParam("count") Integer count,
-                                         @QueryParam("format") String format,
-                                         @QueryParam("columns") final String 
requestedColumns) {
-    String curl = null;
-    try {
-      return ResultsPaginationController.getInstance(context)
-          .request(db + ":tables:" + table + ":columns", searchId, false, 
fromBeginning, count, format,
-              new Callable<Cursor>() {
-                @Override
-                public Cursor call() throws Exception {
-                  TSessionHandle session = 
getConnectionFactory().getHiveConnection().getOrCreateSessionByTag("DDL");
-                  Cursor cursor = 
getConnectionFactory().getHiveConnection().ddl().
-                      getTableDescriptionCursor(session, db, table, like);
-                  cursor.selectColumns(requestedColumns);
-                  return cursor;
-                }
-              }).build();
-    } catch (WebApplicationException ex) {
-      throw ex;
-    } catch (IllegalArgumentException ex) {
-      throw new BadRequestFormattedException(ex.getMessage(), ex);
-    } catch (Exception ex) {
-      throw new ServiceFormattedException(ex.getMessage(), ex, curl);
-    }
-  }
-}

Reply via email to