http://git-wip-us.apache.org/repos/asf/bigtop/blob/4522c959/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/scripts/setup_ranger_hive.py ---------------------------------------------------------------------- diff --git a/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/scripts/setup_ranger_hive.py b/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/scripts/setup_ranger_hive.py new file mode 100755 index 0000000..81a4e3e --- /dev/null +++ b/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/scripts/setup_ranger_hive.py @@ -0,0 +1,98 @@ +#!/usr/bin/env python +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" +from resource_management.core.logger import Logger + +def setup_ranger_hive(upgrade_type = None): + import params + + if params.has_ranger_admin: + + stack_version = None + + if upgrade_type is not None: + stack_version = params.version + + if params.retryAble: + Logger.info("Hive: Setup ranger: command retry enables thus retrying if ranger admin is down !") + else: + Logger.info("Hive: Setup ranger: command retry not enabled thus skipping if ranger admin is down !") + + if params.xml_configurations_supported and params.enable_ranger_hive and params.xa_audit_hdfs_is_enabled: + params.HdfsResource("/ranger/audit", + type="directory", + action="create_on_execute", + owner=params.hdfs_user, + group=params.hdfs_user, + mode=0755, + recursive_chmod=True + ) + params.HdfsResource("/ranger/audit/hiveServer2", + type="directory", + action="create_on_execute", + owner=params.hive_user, + group=params.hive_user, + mode=0700, + recursive_chmod=True + ) + params.HdfsResource(None, action="execute") + + if params.xml_configurations_supported: + api_version=None + if params.stack_supports_ranger_kerberos: + api_version='v2' + from resource_management.libraries.functions.setup_ranger_plugin_xml import setup_ranger_plugin + setup_ranger_plugin('hive-server2', 'hive', params.ranger_previous_jdbc_jar, + params.ranger_downloaded_custom_connector, params.ranger_driver_curl_source, + params.ranger_driver_curl_target, params.java64_home, + params.repo_name, params.hive_ranger_plugin_repo, + params.ranger_env, params.ranger_plugin_properties, + params.policy_user, params.policymgr_mgr_url, + params.enable_ranger_hive, conf_dict=params.hive_server_conf_dir, + component_user=params.hive_user, component_group=params.user_group, cache_service_list=['hiveServer2'], + plugin_audit_properties=params.config['configurations']['ranger-hive-audit'], plugin_audit_attributes=params.config['configuration_attributes']['ranger-hive-audit'], + plugin_security_properties=params.config['configurations']['ranger-hive-security'], plugin_security_attributes=params.config['configuration_attributes']['ranger-hive-security'], + plugin_policymgr_ssl_properties=params.config['configurations']['ranger-hive-policymgr-ssl'], plugin_policymgr_ssl_attributes=params.config['configuration_attributes']['ranger-hive-policymgr-ssl'], + component_list=['hive-client', 'hive-metastore', 'hive-server2'], audit_db_is_enabled=params.xa_audit_db_is_enabled, + credential_file=params.credential_file, xa_audit_db_password=params.xa_audit_db_password, + ssl_truststore_password=params.ssl_truststore_password, ssl_keystore_password=params.ssl_keystore_password, + stack_version_override = stack_version, skip_if_rangeradmin_down= not params.retryAble, api_version=api_version, + is_security_enabled = params.security_enabled, + is_stack_supports_ranger_kerberos = params.stack_supports_ranger_kerberos, + component_user_principal=params.hive_principal if params.security_enabled else None, + component_user_keytab=params.hive_server2_keytab if params.security_enabled else None) + else: + from resource_management.libraries.functions.setup_ranger_plugin import setup_ranger_plugin + setup_ranger_plugin('hive-server2', 'hive', params.ranger_previous_jdbc_jar, + params.ranger_downloaded_custom_connector, params.ranger_driver_curl_source, + params.ranger_driver_curl_target, params.java64_home, + params.repo_name, params.hive_ranger_plugin_repo, + params.ranger_env, params.ranger_plugin_properties, + params.policy_user, params.policymgr_mgr_url, + params.enable_ranger_hive, conf_dict=params.hive_server_conf_dir, + component_user=params.hive_user, component_group=params.user_group, cache_service_list=['hiveServer2'], + plugin_audit_properties=params.config['configurations']['ranger-hive-audit'], plugin_audit_attributes=params.config['configuration_attributes']['ranger-hive-audit'], + plugin_security_properties=params.config['configurations']['ranger-hive-security'], plugin_security_attributes=params.config['configuration_attributes']['ranger-hive-security'], + plugin_policymgr_ssl_properties=params.config['configurations']['ranger-hive-policymgr-ssl'], plugin_policymgr_ssl_attributes=params.config['configuration_attributes']['ranger-hive-policymgr-ssl'], + component_list=['hive-client', 'hive-metastore', 'hive-server2'], audit_db_is_enabled=params.xa_audit_db_is_enabled, + credential_file=params.credential_file, xa_audit_db_password=params.xa_audit_db_password, + ssl_truststore_password=params.ssl_truststore_password, ssl_keystore_password=params.ssl_keystore_password, + stack_version_override = stack_version, skip_if_rangeradmin_down= not params.retryAble) + else: + Logger.info('Ranger admin not installed')
http://git-wip-us.apache.org/repos/asf/bigtop/blob/4522c959/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/scripts/setup_ranger_hive_interactive.py ---------------------------------------------------------------------- diff --git a/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/scripts/setup_ranger_hive_interactive.py b/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/scripts/setup_ranger_hive_interactive.py new file mode 100755 index 0000000..0b5d5db --- /dev/null +++ b/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/scripts/setup_ranger_hive_interactive.py @@ -0,0 +1,78 @@ +#!/usr/bin/env python +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" +from resource_management.core.logger import Logger + +def setup_ranger_hive_interactive(upgrade_type = None): + import params + + if params.has_ranger_admin: + + stack_version = None + + if upgrade_type is not None: + stack_version = params.version + + if params.retryAble: + Logger.info("Hive2: Setup ranger: command retry enabled thus retrying if ranger admin is down !") + else: + Logger.info("Hive2: Setup ranger: command retry not enabled thus skipping if ranger admin is down !") + + if params.xml_configurations_supported and params.enable_ranger_hive and params.xa_audit_hdfs_is_enabled: + params.HdfsResource("/ranger/audit", + type="directory", + action="create_on_execute", + owner=params.hdfs_user, + group=params.hdfs_user, + mode=0755, + recursive_chmod=True + ) + params.HdfsResource("/ranger/audit/hive2", + type="directory", + action="create_on_execute", + owner=params.hive_user, + group=params.hive_user, + mode=0700, + recursive_chmod=True + ) + params.HdfsResource(None, action="execute") + + from resource_management.libraries.functions.setup_ranger_plugin_xml import setup_ranger_plugin + setup_ranger_plugin('hive-server2-hive2', 'hive', params.ranger_previous_jdbc_jar, + params.ranger_downloaded_custom_connector, params.ranger_driver_curl_source, + params.ranger_driver_curl_target, params.java64_home, + params.repo_name, params.hive_ranger_plugin_repo, + params.ranger_env, params.ranger_plugin_properties, + params.policy_user, params.policymgr_mgr_url, + params.enable_ranger_hive, conf_dict=params.hive_server_interactive_conf_dir, + component_user=params.hive_user, component_group=params.user_group, cache_service_list=['hive-server2-hive2'], + plugin_audit_properties=params.config['configurations']['ranger-hive-audit'], plugin_audit_attributes=params.config['configuration_attributes']['ranger-hive-audit'], + plugin_security_properties=params.config['configurations']['ranger-hive-security'], plugin_security_attributes=params.config['configuration_attributes']['ranger-hive-security'], + plugin_policymgr_ssl_properties=params.config['configurations']['ranger-hive-policymgr-ssl'], plugin_policymgr_ssl_attributes=params.config['configuration_attributes']['ranger-hive-policymgr-ssl'], + component_list=['hive-client', 'hive-metastore', 'hive-server2','hive-server2-hive2'], audit_db_is_enabled=False, + credential_file=params.credential_file, xa_audit_db_password=None, + ssl_truststore_password=params.ssl_truststore_password, ssl_keystore_password=params.ssl_keystore_password, + stack_version_override = stack_version, skip_if_rangeradmin_down= not params.retryAble, api_version='v2', + is_security_enabled = params.security_enabled, + is_stack_supports_ranger_kerberos = params.stack_supports_ranger_kerberos, + component_user_principal=params.hive_principal if params.security_enabled else None, + component_user_keytab=params.hive_server2_keytab if params.security_enabled else None) + + else: + Logger.info('Ranger admin not installed') http://git-wip-us.apache.org/repos/asf/bigtop/blob/4522c959/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/scripts/status_params.py ---------------------------------------------------------------------- diff --git a/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/scripts/status_params.py b/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/scripts/status_params.py new file mode 100755 index 0000000..b7cb148 --- /dev/null +++ b/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/scripts/status_params.py @@ -0,0 +1,118 @@ +#!/usr/bin/env python +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" + +from ambari_commons import OSCheck + +from resource_management.libraries.functions import conf_select +from resource_management.libraries.functions import stack_select +from resource_management.libraries.functions import format +from resource_management.libraries.functions import StackFeature +from resource_management.libraries.functions.stack_features import check_stack_feature +from resource_management.libraries.functions.version import format_stack_version +from resource_management.libraries.functions.default import default +from resource_management.libraries.functions import get_kinit_path +from resource_management.libraries.script.script import Script + + +# a map of the Ambari role to the component name +# for use with <stack-root>/current/<component> +SERVER_ROLE_DIRECTORY_MAP = { + 'HIVE_METASTORE' : 'hive-metastore', + 'HIVE_SERVER' : 'hive-server2', + 'WEBHCAT_SERVER' : 'hive-webhcat', + 'HIVE_CLIENT' : 'hive-client', + 'HCAT' : 'hive-client', + 'HIVE_SERVER_INTERACTIVE' : 'hive-server2-hive2' +} + + +# Either HIVE_METASTORE, HIVE_SERVER, WEBHCAT_SERVER, HIVE_CLIENT, HCAT, HIVE_SERVER_INTERACTIVE +role = default("/role", None) +component_directory = Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP, "HIVE_CLIENT") +component_directory_interactive = Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP, "HIVE_SERVER_INTERACTIVE") + +config = Script.get_config() + +stack_root = Script.get_stack_root() +stack_version_unformatted = config['hostLevelParams']['stack_version'] +stack_version_formatted_major = format_stack_version(stack_version_unformatted) + +if OSCheck.is_windows_family(): + hive_metastore_win_service_name = "metastore" + hive_client_win_service_name = "hwi" + hive_server_win_service_name = "hiveserver2" + webhcat_server_win_service_name = "templeton" +else: + hive_pid_dir = config['configurations']['hive-env']['hive_pid_dir'] + hive_pid = 'hive-server.pid' + hive_interactive_pid = 'hive-interactive.pid' + hive_metastore_pid = 'hive.pid' + + hcat_pid_dir = config['configurations']['hive-env']['hcat_pid_dir'] #hcat_pid_dir + webhcat_pid_file = format('{hcat_pid_dir}/webhcat.pid') + + process_name = 'mysqld' + if OSCheck.is_suse_family() or OSCheck.is_ubuntu_family(): + daemon_name = 'mysql' + else: + daemon_name = 'mysqld' + + # Security related/required params + hostname = config['hostname'] + security_enabled = config['configurations']['cluster-env']['security_enabled'] + kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None)) + tmp_dir = Script.get_tmp_dir() + hdfs_user = config['configurations']['hadoop-env']['hdfs_user'] + hive_user = config['configurations']['hive-env']['hive_user'] + webhcat_user = config['configurations']['hive-env']['webhcat_user'] + + # default configuration directories + hadoop_conf_dir = conf_select.get_hadoop_conf_dir() + hadoop_bin_dir = stack_select.get_hadoop_dir("bin") + hive_etc_dir_prefix = "/etc/hive" + hive_interactive_etc_dir_prefix = "/etc/hive2" + + hive_server_conf_dir = "/etc/hive/conf.server" + hive_server_interactive_conf_dir = "/etc/hive2/conf.server" + + webhcat_conf_dir = format("{stack_root}/current/hive-webhcat/conf") + hive_home_dir = format("{stack_root}/current/{component_directory}") + hive_conf_dir = format("{stack_root}/current/{component_directory}/conf") + hive_client_conf_dir = format("{stack_root}/current/{component_directory}/conf") + + if check_stack_feature(StackFeature.CONFIG_VERSIONING, stack_version_formatted_major): + hive_server_conf_dir = format("{stack_root}/current/{component_directory}/conf/conf.server") + hive_conf_dir = hive_server_conf_dir + + if check_stack_feature(StackFeature.HIVE_WEBHCAT_SPECIFIC_CONFIGS, stack_version_formatted_major): + # this is NOT a typo. Configs for hcatalog/webhcat point to a + # specific directory which is NOT called 'conf' + webhcat_conf_dir = format("{stack_root}/current/hive-webhcat/etc/webhcat") + + # if stack version supports hive serve interactive + if check_stack_feature(StackFeature.HIVE_SERVER_INTERACTIVE, stack_version_formatted_major): + hive_server_interactive_conf_dir = format("{stack_root}/current/{component_directory_interactive}/conf/conf.server") + + hive_config_dir = hive_client_conf_dir + + if 'role' in config and config['role'] in ["HIVE_SERVER", "HIVE_METASTORE", "HIVE_SERVER_INTERACTIVE"]: + hive_config_dir = hive_server_conf_dir + +stack_name = default("/hostLevelParams/stack_name", None) \ No newline at end of file http://git-wip-us.apache.org/repos/asf/bigtop/blob/4522c959/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/scripts/webhcat.py ---------------------------------------------------------------------- diff --git a/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/scripts/webhcat.py b/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/scripts/webhcat.py new file mode 100755 index 0000000..fe3f34a --- /dev/null +++ b/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/scripts/webhcat.py @@ -0,0 +1,145 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Ambari Agent + +""" +import sys +import os.path +from resource_management import * +from resource_management.core.resources.system import Execute +from resource_management.libraries.functions import StackFeature +from resource_management.libraries.functions.stack_features import check_stack_feature +from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl +from resource_management.libraries.functions.setup_atlas_hook import has_atlas_in_cluster, setup_atlas_hook +from ambari_commons import OSConst +from ambari_commons.constants import SERVICE + + +@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY) +def webhcat(): + import params + XmlConfig("webhcat-site.xml", + conf_dir=params.hcat_config_dir, + configurations=params.config['configurations']['webhcat-site'] + ) + # Manually overriding service logon user & password set by the installation package + ServiceConfig(params.webhcat_server_win_service_name, + action="change_user", + username = params.hcat_user, + password = Script.get_password(params.hcat_user)) + + +@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT) +def webhcat(): + import params + + Directory(params.templeton_pid_dir, + owner=params.webhcat_user, + mode=0755, + group=params.user_group, + create_parents = True) + + Directory(params.templeton_log_dir, + owner=params.webhcat_user, + mode=0755, + group=params.user_group, + create_parents = True) + + Directory(params.config_dir, + create_parents = True, + owner=params.webhcat_user, + group=params.user_group, + cd_access="a") + + if params.security_enabled: + kinit_if_needed = format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_principal_name};") + else: + kinit_if_needed = "" + + if kinit_if_needed: + Execute(kinit_if_needed, + user=params.webhcat_user, + path='/bin' + ) + + # Replace _HOST with hostname in relevant principal-related properties + webhcat_site = params.config['configurations']['webhcat-site'].copy() + for prop_name in ['templeton.hive.properties', 'templeton.kerberos.principal']: + if prop_name in webhcat_site: + webhcat_site[prop_name] = webhcat_site[prop_name].replace("_HOST", params.hostname) + + XmlConfig("webhcat-site.xml", + conf_dir=params.config_dir, + configurations=webhcat_site, + configuration_attributes=params.config['configuration_attributes']['webhcat-site'], + owner=params.webhcat_user, + group=params.user_group, + ) + + # if we're in an upgrade of a secure cluster, make sure hive-site and yarn-site are created + if params.stack_version_formatted_major and check_stack_feature(StackFeature.CONFIG_VERSIONING, params.stack_version_formatted_major) and \ + params.version and params.stack_root: + XmlConfig("hive-site.xml", + conf_dir = format("{stack_root}/{version}/hive/conf"), + configurations = params.config['configurations']['hive-site'], + configuration_attributes = params.config['configuration_attributes']['hive-site'], + owner = params.hive_user, + group = params.user_group, + ) + + XmlConfig("yarn-site.xml", + conf_dir = format("{stack_root}/{version}/hadoop/conf"), + configurations = params.config['configurations']['yarn-site'], + configuration_attributes = params.config['configuration_attributes']['yarn-site'], + owner = params.yarn_user, + group = params.user_group, + ) + + + File(format("{config_dir}/webhcat-env.sh"), + owner=params.webhcat_user, + group=params.user_group, + content=InlineTemplate(params.webhcat_env_sh_template) + ) + + Directory(params.webhcat_conf_dir, + cd_access='a', + create_parents = True + ) + + log4j_webhcat_filename = 'webhcat-log4j.properties' + if (params.log4j_webhcat_props != None): + File(format("{config_dir}/{log4j_webhcat_filename}"), + mode=0644, + group=params.user_group, + owner=params.webhcat_user, + content=params.log4j_webhcat_props + ) + elif (os.path.exists("{config_dir}/{log4j_webhcat_filename}.template")): + File(format("{config_dir}/{log4j_webhcat_filename}"), + mode=0644, + group=params.user_group, + owner=params.webhcat_user, + content=StaticFile(format("{config_dir}/{log4j_webhcat_filename}.template")) + ) + + # Generate atlas-application.properties.xml file + if has_atlas_in_cluster(): + # WebHCat uses a different config dir than the rest of the daemons in Hive. + atlas_hook_filepath = os.path.join(params.config_dir, params.atlas_hook_filename) + setup_atlas_hook(SERVICE.HIVE, params.hive_atlas_application_properties, atlas_hook_filepath, params.hive_user, params.user_group) http://git-wip-us.apache.org/repos/asf/bigtop/blob/4522c959/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/scripts/webhcat_server.py ---------------------------------------------------------------------- diff --git a/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/scripts/webhcat_server.py b/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/scripts/webhcat_server.py new file mode 100755 index 0000000..34687c4 --- /dev/null +++ b/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/scripts/webhcat_server.py @@ -0,0 +1,164 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Ambari Agent + +""" +from resource_management import * +from resource_management.libraries.functions import conf_select +from resource_management.libraries.functions import stack_select +from resource_management.libraries.functions import StackFeature +from resource_management.libraries.functions.stack_features import check_stack_feature +from resource_management.libraries.functions.security_commons import build_expectations, \ + cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \ + FILE_TYPE_XML +from webhcat import webhcat +from webhcat_service import webhcat_service +from ambari_commons import OSConst +from ambari_commons.os_family_impl import OsFamilyImpl + + +class WebHCatServer(Script): + def install(self, env): + import params + self.install_packages(env) + + def start(self, env, upgrade_type=None): + import params + env.set_params(params) + self.configure(env) # FOR SECURITY + webhcat_service(action='start', upgrade_type=upgrade_type) + + def stop(self, env, upgrade_type=None): + import params + env.set_params(params) + webhcat_service(action='stop') + + def configure(self, env): + import params + env.set_params(params) + webhcat() + + +@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY) +class WebHCatServerWindows(WebHCatServer): + def status(self, env): + import status_params + env.set_params(status_params) + check_windows_service_status(status_params.webhcat_server_win_service_name) + + +@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT) +class WebHCatServerDefault(WebHCatServer): + def get_component_name(self): + return "hive-webhcat" + + def status(self, env): + import status_params + env.set_params(status_params) + check_process_status(status_params.webhcat_pid_file) + + def pre_upgrade_restart(self, env, upgrade_type=None): + Logger.info("Executing WebHCat Stack Upgrade pre-restart") + import params + env.set_params(params) + + if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version): + # webhcat has no conf, but uses hadoop home, so verify that regular hadoop conf is set + conf_select.select(params.stack_name, "hive-hcatalog", params.version) + conf_select.select(params.stack_name, "hadoop", params.version) + stack_select.select("hive-webhcat", params.version) + + def security_status(self, env): + import status_params + env.set_params(status_params) + + if status_params.security_enabled: + expectations ={} + expectations.update( + build_expectations( + 'webhcat-site', + { + "templeton.kerberos.secret": "secret" + }, + [ + "templeton.kerberos.keytab", + "templeton.kerberos.principal" + ], + [ + "templeton.kerberos.keytab" + ] + ) + ) + expectations.update( + build_expectations( + 'hive-site', + { + "hive.server2.authentication": "KERBEROS", + "hive.metastore.sasl.enabled": "true", + "hive.security.authorization.enabled": "true" + }, + None, + None + ) + ) + + security_params = {} + security_params.update(get_params_from_filesystem(status_params.hive_conf_dir, + {'hive-site.xml': FILE_TYPE_XML})) + security_params.update(get_params_from_filesystem(status_params.webhcat_conf_dir, + {'webhcat-site.xml': FILE_TYPE_XML})) + result_issues = validate_security_config_properties(security_params, expectations) + if not result_issues: # If all validations passed successfully + try: + # Double check the dict before calling execute + if 'webhcat-site' not in security_params \ + or 'templeton.kerberos.keytab' not in security_params['webhcat-site'] \ + or 'templeton.kerberos.principal' not in security_params['webhcat-site']: + self.put_structured_out({"securityState": "UNSECURED"}) + self.put_structured_out({"securityIssuesFound": "Keytab file or principal are not set property."}) + return + + cached_kinit_executor(status_params.kinit_path_local, + status_params.webhcat_user, + security_params['webhcat-site']['templeton.kerberos.keytab'], + security_params['webhcat-site']['templeton.kerberos.principal'], + status_params.hostname, + status_params.tmp_dir) + self.put_structured_out({"securityState": "SECURED_KERBEROS"}) + except Exception as e: + self.put_structured_out({"securityState": "ERROR"}) + self.put_structured_out({"securityStateErrorInfo": str(e)}) + else: + issues = [] + for cf in result_issues: + issues.append("Configuration file %s did not pass the validation. Reason: %s" % (cf, result_issues[cf])) + self.put_structured_out({"securityIssuesFound": ". ".join(issues)}) + self.put_structured_out({"securityState": "UNSECURED"}) + else: + self.put_structured_out({"securityState": "UNSECURED"}) + + def get_log_folder(self): + import params + return params.hcat_log_dir + + def get_user(self): + import params + return params.webhcat_user + +if __name__ == "__main__": + WebHCatServer().execute() http://git-wip-us.apache.org/repos/asf/bigtop/blob/4522c959/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/scripts/webhcat_service.py ---------------------------------------------------------------------- diff --git a/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/scripts/webhcat_service.py b/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/scripts/webhcat_service.py new file mode 100755 index 0000000..c24db4c --- /dev/null +++ b/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/scripts/webhcat_service.py @@ -0,0 +1,96 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Ambari Agent + +""" +from resource_management import * +from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl +from ambari_commons import OSConst +from resource_management.core.shell import as_user +from resource_management.core.logger import Logger +import traceback + + +@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY) +def webhcat_service(action='start', rolling_restart=False): + import params + if action == 'start' or action == 'stop': + Service(params.webhcat_server_win_service_name, action=action) + + +@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT) +def webhcat_service(action='start', upgrade_type=None): + import params + + environ = { + 'HADOOP_HOME': params.hadoop_home + } + + cmd = format('{webhcat_bin_dir}/webhcat_server.sh') + + if action == 'start': + if upgrade_type is not None and params.version and params.stack_root: + environ['HADOOP_HOME'] = format("{stack_root}/{version}/hadoop") + + daemon_cmd = format('cd {hcat_pid_dir} ; {cmd} start') + no_op_test = as_user(format('ls {webhcat_pid_file} >/dev/null 2>&1 && ps -p `cat {webhcat_pid_file}` >/dev/null 2>&1'), user=params.webhcat_user) + try: + Execute(daemon_cmd, + user=params.webhcat_user, + not_if=no_op_test, + environment = environ) + except: + show_logs(params.hcat_log_dir, params.webhcat_user) + raise + elif action == 'stop': + try: + graceful_stop(cmd, environ) + except Fail: + show_logs(params.hcat_log_dir, params.webhcat_user) + Logger.info(traceback.format_exc()) + + pid_expression = "`" + as_user(format("cat {webhcat_pid_file}"), user=params.webhcat_user) + "`" + process_id_exists_command = format("ls {webhcat_pid_file} >/dev/null 2>&1 && ps -p {pid_expression} >/dev/null 2>&1") + daemon_hard_kill_cmd = format("{sudo} kill -9 {pid_expression}") + wait_time = 10 + Execute(daemon_hard_kill_cmd, + not_if = format("! ({process_id_exists_command}) || ( sleep {wait_time} && ! ({process_id_exists_command}) )"), + ignore_failures = True + ) + + try: + # check if stopped the process, else fail the task + Execute(format("! ({process_id_exists_command})"), + tries=20, + try_sleep=3, + ) + except: + show_logs(params.hcat_log_dir, params.webhcat_user) + raise + + File(params.webhcat_pid_file, + action="delete", + ) + +def graceful_stop(cmd, environ): + import params + daemon_cmd = format('{cmd} stop') + + Execute(daemon_cmd, + user = params.webhcat_user, + environment = environ) http://git-wip-us.apache.org/repos/asf/bigtop/blob/4522c959/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/scripts/webhcat_service_check.py ---------------------------------------------------------------------- diff --git a/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/scripts/webhcat_service_check.py b/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/scripts/webhcat_service_check.py new file mode 100755 index 0000000..8e80d48 --- /dev/null +++ b/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/scripts/webhcat_service_check.py @@ -0,0 +1,128 @@ +#!/usr/bin/env python +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" +import urllib2 + +from resource_management import * +from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl +from ambari_commons import OSConst +import time + +@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY) +def webhcat_service_check(): + Logger.info("Webhcat smoke test - service status") + + import params + # AMBARI-11633 [WinTP2] Webhcat service check fails + # Hive doesn't pass the environment variables correctly to child processes, which fails the smoke test. + # Reducing the amount of URLs checked to the minimum required. + #smoke_cmd = os.path.join(params.stack_root,"Run-SmokeTests.cmd") + #service = "WEBHCAT" + #Execute(format("cmd /C {smoke_cmd} {service}"), user=params.hcat_user, logoutput=True) + + url_tests = [ + "status", + #These are the failing ones: + #"ddl/database?user.name=hadoop", + #"ddl/database/default/table?user.name=hadoop" + ] + + + import socket + + url_host = socket.getfqdn() + url_port = params.config["configurations"]["webhcat-site"]["templeton.port"] + + for url_test in url_tests: + url_request = "http://{0}:{1}/templeton/v1/{2}".format(url_host, url_port, url_test) + url_response = None + + try: + # execute the query for the JSON that includes WebHCat status + url_response = urllib2.urlopen(url_request, timeout=30) + + status = url_response.getcode() + response = url_response.read() + + if status != 200: + Logger.warning("Webhcat service check status: {0}".format(status)) + Logger.info("Webhcat service check response: {0}".format(response)) + except urllib2.HTTPError as he: + raise Fail("Webhcat check {0} failed: {1}".format(url_request, he.msg)) + finally: + if url_response is not None: + try: + url_response.close() + except: + pass + + +@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT) +def webhcat_service_check(): + import params + File(format("{tmp_dir}/templetonSmoke.sh"), + content= StaticFile('templetonSmoke.sh'), + mode=0755 + ) + + if params.security_enabled: + smokeuser_keytab=params.smoke_user_keytab + smoke_user_principal=params.smokeuser_principal + else: + smokeuser_keytab= "no_keytab" + smoke_user_principal="no_principal" + + unique_name = format("{smokeuser}.{timestamp}", timestamp = time.time()) + templeton_test_script = format("idtest.{unique_name}.pig") + templeton_test_input = format("/tmp/idtest.{unique_name}.in") + templeton_test_output = format("/tmp/idtest.{unique_name}.out") + + File(format("{tmp_dir}/{templeton_test_script}"), + content = Template("templeton_smoke.pig.j2", templeton_test_input=templeton_test_input, templeton_test_output=templeton_test_output), + owner=params.hdfs_user + ) + + params.HdfsResource(format("/tmp/{templeton_test_script}"), + action = "create_on_execute", + type = "file", + source = format("{tmp_dir}/{templeton_test_script}"), + owner = params.smokeuser + ) + + params.HdfsResource(templeton_test_input, + action = "create_on_execute", + type = "file", + source = "/etc/passwd", + owner = params.smokeuser + ) + + params.HdfsResource(None, action = "execute") + + cmd = format("{tmp_dir}/templetonSmoke.sh {webhcat_server_host[0]} {smokeuser} {templeton_port} {templeton_test_script} {smokeuser_keytab}" + " {security_param} {kinit_path_local} {smoke_user_principal}" + " {tmp_dir}") + + Execute(cmd, + tries=3, + try_sleep=5, + path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin', + logoutput=True) + + + http://git-wip-us.apache.org/repos/asf/bigtop/blob/4522c959/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/templates/hadoop-metrics2-hivemetastore.properties.j2 ---------------------------------------------------------------------- diff --git a/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/templates/hadoop-metrics2-hivemetastore.properties.j2 b/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/templates/hadoop-metrics2-hivemetastore.properties.j2 new file mode 100755 index 0000000..e4d88bc --- /dev/null +++ b/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/templates/hadoop-metrics2-hivemetastore.properties.j2 @@ -0,0 +1,54 @@ +{# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#} + +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# syntax: [prefix].[source|sink|jmx].[instance].[options] +# See package.html for org.apache.hadoop.metrics2 for details + +{% if has_metric_collector %} + + *.period={{metrics_collection_period}} + *.sink.timeline.plugin.urls=file:///usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar + *.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink + *.sink.timeline.period={{metrics_collection_period}} + *.sink.timeline.sendInterval={{metrics_report_interval}}000 + *.sink.timeline.slave.host.name = {{hostname}} + + # HTTPS properties + *.sink.timeline.truststore.path = {{metric_truststore_path}} + *.sink.timeline.truststore.type = {{metric_truststore_type}} + *.sink.timeline.truststore.password = {{metric_truststore_password}} + + hivemetastore.sink.timeline.collector={{metric_collector_protocol}}://{{metric_collector_host}}:{{metric_collector_port}} + + +{% endif %} http://git-wip-us.apache.org/repos/asf/bigtop/blob/4522c959/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/templates/hadoop-metrics2-hiveserver2.properties.j2 ---------------------------------------------------------------------- diff --git a/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/templates/hadoop-metrics2-hiveserver2.properties.j2 b/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/templates/hadoop-metrics2-hiveserver2.properties.j2 new file mode 100755 index 0000000..b5c4891 --- /dev/null +++ b/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/templates/hadoop-metrics2-hiveserver2.properties.j2 @@ -0,0 +1,54 @@ +{# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#} + +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# syntax: [prefix].[source|sink|jmx].[instance].[options] +# See package.html for org.apache.hadoop.metrics2 for details + +{% if has_metric_collector %} + + *.period={{metrics_collection_period}} + *.sink.timeline.plugin.urls=file:///usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar + *.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink + *.sink.timeline.period={{metrics_collection_period}} + *.sink.timeline.sendInterval={{metrics_report_interval}}000 + *.sink.timeline.slave.host.name = {{hostname}} + + # HTTPS properties + *.sink.timeline.truststore.path = {{metric_truststore_path}} + *.sink.timeline.truststore.type = {{metric_truststore_type}} + *.sink.timeline.truststore.password = {{metric_truststore_password}} + + hiveserver2.sink.timeline.collector={{metric_collector_protocol}}://{{metric_collector_host}}:{{metric_collector_port}} + + +{% endif %} http://git-wip-us.apache.org/repos/asf/bigtop/blob/4522c959/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/templates/hadoop-metrics2-llapdaemon.j2 ---------------------------------------------------------------------- diff --git a/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/templates/hadoop-metrics2-llapdaemon.j2 b/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/templates/hadoop-metrics2-llapdaemon.j2 new file mode 100755 index 0000000..1d75ccf --- /dev/null +++ b/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/templates/hadoop-metrics2-llapdaemon.j2 @@ -0,0 +1,52 @@ +{# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#} + +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# syntax: [prefix].[source|sink|jmx].[instance].[options] +# See package.html for org.apache.hadoop.metrics2 for details + +{% if has_metric_collector %} + + *.period={{metrics_collection_period}} + *.sink.timeline.plugin.urls=file:///usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar + *.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink + *.sink.timeline.period={{metrics_collection_period}} + *.sink.timeline.sendInterval={{metrics_report_interval}}000 + + # HTTPS properties + *.sink.timeline.truststore.path = {{metric_truststore_path}} + *.sink.timeline.truststore.type = {{metric_truststore_type}} + *.sink.timeline.truststore.password = {{metric_truststore_password}} + + llapdaemon.sink.timeline.collector={{metric_collector_protocol}}://{{metric_collector_host}}:{{metric_collector_port}} + +{% endif %} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/bigtop/blob/4522c959/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/templates/hadoop-metrics2-llaptaskscheduler.j2 ---------------------------------------------------------------------- diff --git a/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/templates/hadoop-metrics2-llaptaskscheduler.j2 b/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/templates/hadoop-metrics2-llaptaskscheduler.j2 new file mode 100755 index 0000000..5ab787c --- /dev/null +++ b/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/templates/hadoop-metrics2-llaptaskscheduler.j2 @@ -0,0 +1,52 @@ +{# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#} + +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# syntax: [prefix].[source|sink|jmx].[instance].[options] +# See package.html for org.apache.hadoop.metrics2 for details + +{% if has_metric_collector %} + + *.period={{metrics_collection_period}} + *.sink.timeline.plugin.urls=file:///usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar + *.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink + *.sink.timeline.period={{metrics_collection_period}} + *.sink.timeline.sendInterval={{metrics_report_interval}}000 + + # HTTPS properties + *.sink.timeline.truststore.path = {{metric_truststore_path}} + *.sink.timeline.truststore.type = {{metric_truststore_type}} + *.sink.timeline.truststore.password = {{metric_truststore_password}} + + llaptaskscheduler.sink.timeline.collector={{metric_collector_protocol}}://{{metric_collector_host}}:{{metric_collector_port}} + +{% endif %} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/bigtop/blob/4522c959/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/templates/hive.conf.j2 ---------------------------------------------------------------------- diff --git a/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/templates/hive.conf.j2 b/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/templates/hive.conf.j2 new file mode 100755 index 0000000..5af53d0 --- /dev/null +++ b/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/templates/hive.conf.j2 @@ -0,0 +1,35 @@ +{# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#} + +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +{{hive_user}} - nofile {{hive_user_nofile_limit}} +{{hive_user}} - nproc {{hive_user_nproc_limit}} http://git-wip-us.apache.org/repos/asf/bigtop/blob/4522c959/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/templates/startHiveserver2.sh.j2 ---------------------------------------------------------------------- diff --git a/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/templates/startHiveserver2.sh.j2 b/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/templates/startHiveserver2.sh.j2 new file mode 100755 index 0000000..70b418c --- /dev/null +++ b/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/templates/startHiveserver2.sh.j2 @@ -0,0 +1,24 @@ +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +HIVE_SERVER2_OPTS=" -hiveconf hive.log.file=hiveserver2.log -hiveconf hive.log.dir=$5" +HIVE_CONF_DIR=$4 {{hive_bin}}/hiveserver2 -hiveconf hive.metastore.uris=" " ${HIVE_SERVER2_OPTS} > $1 2> $2 & +echo $!|cat>$3 http://git-wip-us.apache.org/repos/asf/bigtop/blob/4522c959/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/templates/startHiveserver2Interactive.sh.j2 ---------------------------------------------------------------------- diff --git a/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/templates/startHiveserver2Interactive.sh.j2 b/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/templates/startHiveserver2Interactive.sh.j2 new file mode 100755 index 0000000..6062a7e --- /dev/null +++ b/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/templates/startHiveserver2Interactive.sh.j2 @@ -0,0 +1,24 @@ +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +HIVE_SERVER2_INTERACTIVE_OPTS=" -hiveconf hive.log.file=hiveserver2Interactive.log -hiveconf hive.log.dir=$5" +HIVE_INTERACTIVE_CONF_DIR=$4 {{hive_interactive_bin}}/hiveserver2 -hiveconf hive.metastore.uris=" " ${HIVE_SERVER2_INTERACTIVE_OPTS} > $1 2> $2 & +echo $!|cat>$3 \ No newline at end of file http://git-wip-us.apache.org/repos/asf/bigtop/blob/4522c959/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/templates/templeton_smoke.pig.j2 ---------------------------------------------------------------------- diff --git a/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/templates/templeton_smoke.pig.j2 b/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/templates/templeton_smoke.pig.j2 new file mode 100755 index 0000000..3153e81 --- /dev/null +++ b/bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/templates/templeton_smoke.pig.j2 @@ -0,0 +1,24 @@ +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +A = load '{{templeton_test_input}}' using PigStorage(':'); +B = foreach A generate \$0 as id; +store B into '{{templeton_test_output}}'; \ No newline at end of file
