AMBARI-22243. Apache Hive 2 LLAP cluster doesn't have the metastore warehouse 
directory. (stoader)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/43fb5976
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/43fb5976
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/43fb5976

Branch: refs/heads/branch-feature-AMBARI-22008
Commit: 43fb59761595431b2a8832bafe1bfcb2fa85a1fa
Parents: 8c017c1
Author: Toader, Sebastian <stoa...@hortonworks.com>
Authored: Sat Oct 14 07:45:22 2017 +0200
Committer: Toader, Sebastian <stoa...@hortonworks.com>
Committed: Sun Oct 15 07:43:51 2017 +0200

----------------------------------------------------------------------
 .../0.12.0.2.0/package/scripts/hive_interactive.py  | 13 +++++++++++++
 .../2.1.0.3.0/package/scripts/hive_interactive.py   | 13 +++++++++++++
 .../python/stacks/2.5/HIVE/test_hive_server_int.py  | 16 ++++++++++++++++
 3 files changed, 42 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/43fb5976/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py
index 2ed3e3a..89060be 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py
@@ -62,6 +62,19 @@ def hive_interactive(name=None):
   import params
   MB_TO_BYTES = 1048576
 
+  # if warehouse directory is in DFS
+  if not params.whs_dir_protocol or params.whs_dir_protocol == 
urlparse(params.default_fs).scheme:
+    # Create Hive Metastore Warehouse Dir
+    params.HdfsResource(params.hive_apps_whs_dir,
+                        type="directory",
+                        action="create_on_execute",
+                        owner=params.hive_user,
+                        group=params.user_group,
+                        mode=params.hive_apps_whs_mode
+                        )
+  else:
+    Logger.info(format("Not creating warehouse directory 
'{hive_apps_whs_dir}', as the location is not in DFS."))
+
   # Create Hive User Dir
   params.HdfsResource(params.hive_hdfs_user_dir,
                       type="directory",

http://git-wip-us.apache.org/repos/asf/ambari/blob/43fb5976/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_interactive.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_interactive.py
 
b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_interactive.py
index 2ed3e3a..89060be 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_interactive.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_interactive.py
@@ -62,6 +62,19 @@ def hive_interactive(name=None):
   import params
   MB_TO_BYTES = 1048576
 
+  # if warehouse directory is in DFS
+  if not params.whs_dir_protocol or params.whs_dir_protocol == 
urlparse(params.default_fs).scheme:
+    # Create Hive Metastore Warehouse Dir
+    params.HdfsResource(params.hive_apps_whs_dir,
+                        type="directory",
+                        action="create_on_execute",
+                        owner=params.hive_user,
+                        group=params.user_group,
+                        mode=params.hive_apps_whs_mode
+                        )
+  else:
+    Logger.info(format("Not creating warehouse directory 
'{hive_apps_whs_dir}', as the location is not in DFS."))
+
   # Create Hive User Dir
   params.HdfsResource(params.hive_hdfs_user_dir,
                       type="directory",

http://git-wip-us.apache.org/repos/asf/ambari/blob/43fb5976/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py 
b/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py
index 3dc78ab..4951c7e 100644
--- a/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py
+++ b/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py
@@ -418,6 +418,22 @@ class TestHiveServerInteractive(RMFTestCase):
 
   def assert_configure_default(self, no_tmp=False, 
default_fs_default=u'hdfs://c6401.ambari.apache.org:8020', 
with_cs_enabled=False):
 
+    self.assertResourceCalled('HdfsResource', '/apps/hive/warehouse',
+                              immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
+                              security_enabled = False,
+                              hadoop_bin_dir = 
'/usr/hdp/current/hadoop-client/bin',
+                              keytab = UnknownConfigurationMock(),
+                              kinit_path_local = '/usr/bin/kinit',
+                              user = 'hdfs',
+                              dfs_type = '',
+                              owner = 'hive',
+                              group = 'hadoop',
+                              hadoop_conf_dir = 
'/usr/hdp/current/hadoop-client/conf',
+                              type = 'directory',
+                              action = ['create_on_execute'], 
hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', 
hdfs_site=self.getConfig()['configurations']['hdfs-site'], 
principal_name='missing_principal', default_fs=default_fs_default,
+                              mode = 0777,
+    )
+
     self.assertResourceCalled('HdfsResource', '/user/hive',
                               immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                               security_enabled = False,

Reply via email to