AMBARI-18393. Hive Server Interactive (HSI) fails to start with 'Permission 
denied' for User Hive, if HSI starts before HS2.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/565685d3
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/565685d3
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/565685d3

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 565685d326e5a877f8e4479736e75f59f728c9ec
Parents: f5ad1de
Author: Swapan Shridhar <sshrid...@hortonworks.com>
Authored: Wed Sep 14 15:37:51 2016 -0700
Committer: Swapan Shridhar <sshrid...@hortonworks.com>
Committed: Wed Sep 14 15:37:51 2016 -0700

----------------------------------------------------------------------
 .../0.12.0.2.0/package/scripts/hive_interactive.py |  8 ++++++++
 .../python/stacks/2.5/HIVE/test_hive_server_int.py | 17 ++++++++++++++++-
 2 files changed, 24 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/565685d3/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py
index 1bcb954..6511d0e 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py
@@ -58,6 +58,14 @@ Sets up the configs, jdbc connection and tarball copy to 
HDFS for Hive Server In
 def hive_interactive(name=None):
   import params
 
+  # Create Hive User Dir
+  params.HdfsResource(params.hive_hdfs_user_dir,
+                      type="directory",
+                      action="create_on_execute",
+                      owner=params.hive_user,
+                      mode=params.hive_hdfs_user_mode
+                      )
+
   # list of properties that should be excluded from the config
   # this approach is a compromise against adding a dedicated config
   # type for hive_server_interactive or needed config groups on a

http://git-wip-us.apache.org/repos/asf/ambari/blob/565685d3/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py 
b/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py
index b97c377..7477457 100644
--- a/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py
+++ b/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py
@@ -224,7 +224,22 @@ class TestHiveServerInteractive(RMFTestCase):
     self.assertNoMoreResources()
 
 
-  def assert_configure_default(self, no_tmp=False, 
default_fs_default='hdfs://c6401.ambari.apache.org:8020'):
+  def assert_configure_default(self, no_tmp=False, 
default_fs_default=u'hdfs://c6401.ambari.apache.org:8020'):
+
+    self.assertResourceCalled('HdfsResource', '/user/hive',
+                              immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
+                              security_enabled = False,
+                              hadoop_bin_dir = 
'/usr/hdp/current/hadoop-client/bin',
+                              keytab = UnknownConfigurationMock(),
+                              kinit_path_local = '/usr/bin/kinit',
+                              user = 'hdfs',
+                              dfs_type = '',
+                              owner = 'hive',
+                              hadoop_conf_dir = 
'/usr/hdp/current/hadoop-client/conf',
+                              type = 'directory',
+                              action = ['create_on_execute'], 
hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', 
hdfs_site=self.getConfig()['configurations']['hdfs-site'], 
principal_name='missing_principal', default_fs=default_fs_default,
+                              mode = 0755,
+                              )
 
     self.assertResourceCalled('Directory', '/etc/hive2',
                               mode=0755,

Reply via email to