http://git-wip-us.apache.org/repos/asf/ambari/blob/0aab3803/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/ams.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/ams.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/ams.py deleted file mode 100755 index 7b1a824..0000000 --- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/ams.py +++ /dev/null @@ -1,388 +0,0 @@ -#!/usr/bin/env python -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -""" - -from resource_management import * -from ambari_commons import OSConst -from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl -from ambari_commons.str_utils import compress_backslashes -import glob -import os - -@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY) -def ams(name=None): - import params - if name == 'collector': - if not check_windows_service_exists(params.ams_collector_win_service_name): - Execute(format("cmd /C cd {ams_collector_home_dir} & ambari-metrics-collector.cmd setup")) - - Directory(params.ams_collector_conf_dir, - owner=params.ams_user, - create_parents = True - ) - - Directory(params.ams_checkpoint_dir, - owner=params.ams_user, - create_parents = True - ) - - XmlConfig("ams-site.xml", - conf_dir=params.ams_collector_conf_dir, - configurations=params.config['configurations']['ams-site'], - configuration_attributes=params.config['configuration_attributes']['ams-site'], - owner=params.ams_user, - ) - - merged_ams_hbase_site = {} - merged_ams_hbase_site.update(params.config['configurations']['ams-hbase-site']) - if params.security_enabled: - merged_ams_hbase_site.update(params.config['configurations']['ams-hbase-security-site']) - - XmlConfig( "hbase-site.xml", - conf_dir = params.ams_collector_conf_dir, - configurations = merged_ams_hbase_site, - configuration_attributes=params.config['configuration_attributes']['ams-hbase-site'], - owner = params.ams_user, - ) - - if (params.log4j_props != None): - File(os.path.join(params.ams_collector_conf_dir, "log4j.properties"), - owner=params.ams_user, - content=params.log4j_props - ) - - File(os.path.join(params.ams_collector_conf_dir, "ams-env.cmd"), - owner=params.ams_user, - content=InlineTemplate(params.ams_env_sh_template) - ) - - ServiceConfig(params.ams_collector_win_service_name, - action="change_user", - username = params.ams_user, - password = Script.get_password(params.ams_user)) - - if not params.is_local_fs_rootdir: - # Configuration needed to support NN HA - XmlConfig("hdfs-site.xml", - conf_dir=params.ams_collector_conf_dir, - configurations=params.config['configurations']['hdfs-site'], - configuration_attributes=params.config['configuration_attributes']['hdfs-site'], - owner=params.ams_user, - group=params.user_group, - mode=0644 - ) - - XmlConfig("hdfs-site.xml", - conf_dir=params.hbase_conf_dir, - configurations=params.config['configurations']['hdfs-site'], - configuration_attributes=params.config['configuration_attributes']['hdfs-site'], - owner=params.ams_user, - group=params.user_group, - mode=0644 - ) - - XmlConfig("core-site.xml", - conf_dir=params.ams_collector_conf_dir, - configurations=params.config['configurations']['core-site'], - configuration_attributes=params.config['configuration_attributes']['core-site'], - owner=params.ams_user, - group=params.user_group, - mode=0644 - ) - - XmlConfig("core-site.xml", - conf_dir=params.hbase_conf_dir, - configurations=params.config['configurations']['core-site'], - configuration_attributes=params.config['configuration_attributes']['core-site'], - owner=params.ams_user, - group=params.user_group, - mode=0644 - ) - - else: - ServiceConfig(params.ams_embedded_hbase_win_service_name, - action="change_user", - username = params.ams_user, - password = Script.get_password(params.ams_user)) - # creating symbolic links on ams jars to make them available to services - links_pairs = [ - ("%COLLECTOR_HOME%\\hbase\\lib\\ambari-metrics-hadoop-sink-with-common.jar", - "%SINK_HOME%\\hadoop-sink\\ambari-metrics-hadoop-sink-with-common-*.jar"), - ] - for link_pair in links_pairs: - link, target = link_pair - real_link = os.path.expandvars(link) - target = compress_backslashes(glob.glob(os.path.expandvars(target))[0]) - if not os.path.exists(real_link): - #TODO check the symlink destination too. Broken in Python 2.x on Windows. - Execute('cmd /c mklink "{0}" "{1}"'.format(real_link, target)) - pass - - elif name == 'monitor': - if not check_windows_service_exists(params.ams_monitor_win_service_name): - Execute(format("cmd /C cd {ams_monitor_home_dir} & ambari-metrics-monitor.cmd setup")) - - # creating symbolic links on ams jars to make them available to services - links_pairs = [ - ("%HADOOP_HOME%\\share\\hadoop\\common\\lib\\ambari-metrics-hadoop-sink-with-common.jar", - "%SINK_HOME%\\hadoop-sink\\ambari-metrics-hadoop-sink-with-common-*.jar"), - ("%HBASE_HOME%\\lib\\ambari-metrics-hadoop-sink-with-common.jar", - "%SINK_HOME%\\hadoop-sink\\ambari-metrics-hadoop-sink-with-common-*.jar"), - ] - for link_pair in links_pairs: - link, target = link_pair - real_link = os.path.expandvars(link) - target = compress_backslashes(glob.glob(os.path.expandvars(target))[0]) - if not os.path.exists(real_link): - #TODO check the symlink destination too. Broken in Python 2.x on Windows. - Execute('cmd /c mklink "{0}" "{1}"'.format(real_link, target)) - - Directory(params.ams_monitor_conf_dir, - owner=params.ams_user, - create_parents = True - ) - - TemplateConfig( - os.path.join(params.ams_monitor_conf_dir, "metric_monitor.ini"), - owner=params.ams_user, - template_tag=None - ) - - TemplateConfig( - os.path.join(params.ams_monitor_conf_dir, "metric_groups.conf"), - owner=params.ams_user, - template_tag=None - ) - - ServiceConfig(params.ams_monitor_win_service_name, - action="change_user", - username = params.ams_user, - password = Script.get_password(params.ams_user)) - - -@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT) -def ams(name=None): - import params - - if name == 'collector': - Directory(params.ams_collector_conf_dir, - owner=params.ams_user, - group=params.user_group, - create_parents = True - ) - - Execute(('chown', '-R', params.ams_user, params.ams_collector_conf_dir), - sudo=True - ) - - Directory(params.ams_checkpoint_dir, - owner=params.ams_user, - group=params.user_group, - cd_access="a", - create_parents = True - ) - - Execute(('chown', '-R', params.ams_user, params.ams_checkpoint_dir), - sudo=True - ) - - XmlConfig("ams-site.xml", - conf_dir=params.ams_collector_conf_dir, - configurations=params.config['configurations']['ams-site'], - configuration_attributes=params.config['configuration_attributes']['ams-site'], - owner=params.ams_user, - group=params.user_group - ) - - merged_ams_hbase_site = {} - merged_ams_hbase_site.update(params.config['configurations']['ams-hbase-site']) - if params.security_enabled: - merged_ams_hbase_site.update(params.config['configurations']['ams-hbase-security-site']) - - # Add phoenix client side overrides - merged_ams_hbase_site['phoenix.query.maxGlobalMemoryPercentage'] = str(params.phoenix_max_global_mem_percent) - merged_ams_hbase_site['phoenix.spool.directory'] = params.phoenix_client_spool_dir - - XmlConfig( "hbase-site.xml", - conf_dir = params.ams_collector_conf_dir, - configurations = merged_ams_hbase_site, - configuration_attributes=params.config['configuration_attributes']['ams-hbase-site'], - owner = params.ams_user, - group = params.user_group - ) - - if params.security_enabled: - TemplateConfig(os.path.join(params.hbase_conf_dir, "ams_collector_jaas.conf"), - owner = params.ams_user, - template_tag = None) - - if (params.log4j_props != None): - File(format("{params.ams_collector_conf_dir}/log4j.properties"), - mode=0644, - group=params.user_group, - owner=params.ams_user, - content=params.log4j_props - ) - - File(format("{ams_collector_conf_dir}/ams-env.sh"), - owner=params.ams_user, - content=InlineTemplate(params.ams_env_sh_template) - ) - - Directory(params.ams_collector_log_dir, - owner=params.ams_user, - group=params.user_group, - cd_access="a", - create_parents = True, - mode=0755, - ) - - Directory(params.ams_collector_pid_dir, - owner=params.ams_user, - group=params.user_group, - cd_access="a", - create_parents = True, - mode=0755, - ) - - # Hack to allow native HBase libs to be included for embedded hbase - File(os.path.join(params.ams_hbase_home_dir, "bin", "hadoop"), - owner=params.ams_user, - mode=0755 - ) - - # On some OS this folder could be not exists, so we will create it before pushing there files - Directory(params.limits_conf_dir, - create_parents = True, - owner='root', - group='root' - ) - - # Setting up security limits - File(os.path.join(params.limits_conf_dir, 'ams.conf'), - owner='root', - group='root', - mode=0644, - content=Template("ams.conf.j2") - ) - - # Phoenix spool file dir if not /tmp - if not os.path.exists(params.phoenix_client_spool_dir): - Directory(params.phoenix_client_spool_dir, - owner=params.ams_user, - mode = 0755, - group=params.user_group, - cd_access="a", - create_parents = True - ) - pass - - if not params.is_local_fs_rootdir and params.is_ams_distributed: - # Configuration needed to support NN HA - XmlConfig("hdfs-site.xml", - conf_dir=params.ams_collector_conf_dir, - configurations=params.config['configurations']['hdfs-site'], - configuration_attributes=params.config['configuration_attributes']['hdfs-site'], - owner=params.ams_user, - group=params.user_group, - mode=0644 - ) - - XmlConfig("hdfs-site.xml", - conf_dir=params.hbase_conf_dir, - configurations=params.config['configurations']['hdfs-site'], - configuration_attributes=params.config['configuration_attributes']['hdfs-site'], - owner=params.ams_user, - group=params.user_group, - mode=0644 - ) - - XmlConfig("core-site.xml", - conf_dir=params.ams_collector_conf_dir, - configurations=params.config['configurations']['core-site'], - configuration_attributes=params.config['configuration_attributes']['core-site'], - owner=params.ams_user, - group=params.user_group, - mode=0644 - ) - - XmlConfig("core-site.xml", - conf_dir=params.hbase_conf_dir, - configurations=params.config['configurations']['core-site'], - configuration_attributes=params.config['configuration_attributes']['core-site'], - owner=params.ams_user, - group=params.user_group, - mode=0644 - ) - - pass - - elif name == 'monitor': - Directory(params.ams_monitor_conf_dir, - owner=params.ams_user, - group=params.user_group, - create_parents = True - ) - - Directory(params.ams_monitor_log_dir, - owner=params.ams_user, - group=params.user_group, - mode=0755, - create_parents = True - ) - - Directory(params.ams_monitor_pid_dir, - owner=params.ams_user, - group=params.user_group, - mode=0755, - create_parents = True - ) - - Directory(format("{ams_monitor_dir}/psutil/build"), - owner=params.ams_user, - group=params.user_group, - cd_access="a", - create_parents = True) - - Execute(format("{sudo} chown -R {ams_user}:{user_group} {ams_monitor_dir}") - ) - - TemplateConfig( - format("{ams_monitor_conf_dir}/metric_monitor.ini"), - owner=params.ams_user, - group=params.user_group, - template_tag=None - ) - - TemplateConfig( - format("{ams_monitor_conf_dir}/metric_groups.conf"), - owner=params.ams_user, - group=params.user_group, - template_tag=None - ) - - File(format("{ams_monitor_conf_dir}/ams-env.sh"), - owner=params.ams_user, - content=InlineTemplate(params.ams_env_sh_template) - ) - - # TODO - pass - - pass
http://git-wip-us.apache.org/repos/asf/ambari/blob/0aab3803/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/ams_service.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/ams_service.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/ams_service.py deleted file mode 100755 index 0726802..0000000 --- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/ams_service.py +++ /dev/null @@ -1,103 +0,0 @@ -# !/usr/bin/env python -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -""" - -from resource_management import * -from ambari_commons import OSConst -from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl -from hbase_service import hbase_service -import os - -@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY) -def ams_service(name, action): - import params - if name == 'collector': - Service(params.ams_embedded_hbase_win_service_name, action=action) - Service(params.ams_collector_win_service_name, action=action) - elif name == 'monitor': - Service(params.ams_monitor_win_service_name, action=action) - -@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT) -def ams_service(name, action): - import params - - if name == 'collector': - cmd = format("{ams_collector_script} --config {ams_collector_conf_dir}") - pid_file = format("{ams_collector_pid_dir}/ambari-metrics-collector.pid") - #no_op_test should be much more complex to work with cumulative status of collector - #removing as startup script handle it also - #no_op_test = format("ls {pid_file} >/dev/null 2>&1 && ps `cat {pid_file}` >/dev/null 2>&1") - - if params.is_hbase_distributed: - hbase_service('zookeeper', action=action) - hbase_service('master', action=action) - hbase_service('regionserver', action=action) - cmd = format("{cmd} --distributed") - - if action == 'start': - Execute(format("{sudo} rm -rf {hbase_tmp_dir}/*.tmp") - ) - - if not params.is_hbase_distributed and os.path.exists(format("{zookeeper_data_dir}")): - Directory(params.zookeeper_data_dir, - action='delete' - ) - - - if params.security_enabled: - kinit_cmd = format("{kinit_path_local} -kt {ams_collector_keytab_path} {ams_collector_jaas_princ};") - daemon_cmd = format("{kinit_cmd} {cmd} start") - else: - daemon_cmd = format("{cmd} start") - - Execute(daemon_cmd, - user=params.ams_user - ) - - pass - elif action == 'stop': - daemon_cmd = format("{cmd} stop") - Execute(daemon_cmd, - user=params.ams_user - ) - - pass - pass - elif name == 'monitor': - cmd = format("{ams_monitor_script} --config {ams_monitor_conf_dir}") - pid_file = format("{ams_monitor_pid_dir}/ambari-metrics-monitor.pid") - no_op_test = format("ls {pid_file} >/dev/null 2>&1 && ps `cat {pid_file}` >/dev/null 2>&1") - - if action == 'start': - daemon_cmd = format("{cmd} start") - Execute(daemon_cmd, - user=params.ams_user - ) - - pass - elif action == 'stop': - - daemon_cmd = format("{cmd} stop") - Execute(daemon_cmd, - user=params.ams_user - ) - - pass - pass - pass http://git-wip-us.apache.org/repos/asf/ambari/blob/0aab3803/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/functions.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/functions.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/functions.py deleted file mode 100755 index 140c24c..0000000 --- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/functions.py +++ /dev/null @@ -1,51 +0,0 @@ -#!/usr/bin/env python -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -""" - -import os -import re -import math -import datetime - -from resource_management.core.shell import checked_call - -def calc_xmn_from_xms(heapsize_str, xmn_percent, xmn_max): - """ - @param heapsize: str (e.g 1000m) - @param xmn_percent: float (e.g 0.2) - @param xmn_max: integer (e.g 512) - """ - heapsize = int(re.search('\d+', str(heapsize_str)).group(0)) - heapsize_unit = re.search('\D+', str(heapsize_str)).group(0) - - xmn_val = int(math.floor(heapsize*xmn_percent)) - xmn_val -= xmn_val % 8 - - result_xmn_val = xmn_max if xmn_val > xmn_max else xmn_val - return str(result_xmn_val) + heapsize_unit - -def trim_heap_property(property, m_suffix = "m"): - if property and property.endswith(m_suffix): - property = property[:-1] - return property - -def check_append_heap_property(property, m_suffix = "m"): - if property and not property.endswith(m_suffix): - property += m_suffix - return property \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/0aab3803/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/hbase.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/hbase.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/hbase.py deleted file mode 100755 index 16d741f..0000000 --- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/hbase.py +++ /dev/null @@ -1,267 +0,0 @@ -#!/usr/bin/env python -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -""" -import os -from ambari_commons import OSConst -from resource_management import * -from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl - -@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY) -def hbase(name=None, action = None): - import params - Directory(params.hbase_conf_dir, - owner = params.hadoop_user, - create_parents = True - ) - Directory(params.hbase_tmp_dir, - create_parents = True, - owner = params.hadoop_user - ) - - Directory (os.path.join(params.local_dir, "jars"), - owner = params.hadoop_user, - create_parents = True - ) - - XmlConfig("hbase-site.xml", - conf_dir = params.hbase_conf_dir, - configurations = params.config['configurations']['ams-hbase-site'], - configuration_attributes=params.config['configuration_attributes']['ams-hbase-site'], - owner = params.hadoop_user - ) - - if 'ams-hbase-policy' in params.config['configurations']: - XmlConfig("hbase-policy.xml", - conf_dir = params.hbase_conf_dir, - configurations = params.config['configurations']['ams-hbase-policy'], - configuration_attributes=params.config['configuration_attributes']['ams-hbase-policy'], - owner = params.hadoop_user - ) - # Manually overriding ownership of file installed by hadoop package - else: - File(os.path.join(params.hbase_conf_dir, "hbase-policy.xml"), - owner = params.hadoop_user - ) - - # Metrics properties - File(os.path.join(params.hbase_conf_dir, "hadoop-metrics2-hbase.properties"), - owner = params.hbase_user, - content=Template("hadoop-metrics2-hbase.properties.j2") - ) - - hbase_TemplateConfig('regionservers', user=params.hadoop_user) - - if params.security_enabled: - hbase_TemplateConfig(format("hbase_{name}_jaas.conf"), user=params.hadoop_user) - - if name != "client": - Directory (params.hbase_log_dir, - owner = params.hadoop_user, - create_parents = True - ) - - if (params.hbase_log4j_props != None): - File(os.path.join(params.hbase_conf_dir, "log4j.properties"), - owner=params.hadoop_user, - content=params.hbase_log4j_props - ) - elif (os.path.exists(os.path.join(params.hbase_conf_dir,"log4j.properties"))): - File(os.path.join(params.hbase_conf_dir,"log4j.properties"), - owner=params.hadoop_user - ) - -@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT) -def hbase(name=None # 'master' or 'regionserver' or 'client' - , action=None): - import params - - Directory(params.hbase_conf_dir, - owner = params.hbase_user, - group = params.user_group, - create_parents = True - ) - - Execute(('chown', '-R', params.hbase_user, params.hbase_conf_dir), - sudo=True - ) - - Directory (params.hbase_tmp_dir, - owner = params.hbase_user, - cd_access="a", - create_parents = True - ) - - Execute(('chown', '-R', params.hbase_user, params.hbase_tmp_dir), - sudo=True - ) - - Directory (os.path.join(params.local_dir, "jars"), - owner = params.hbase_user, - group = params.user_group, - cd_access="a", - mode=0775, - create_parents = True - ) - - merged_ams_hbase_site = {} - merged_ams_hbase_site.update(params.config['configurations']['ams-hbase-site']) - if params.security_enabled: - merged_ams_hbase_site.update(params.config['configurations']['ams-hbase-security-site']) - - XmlConfig("hbase-site.xml", - conf_dir = params.hbase_conf_dir, - configurations = merged_ams_hbase_site, - configuration_attributes=params.config['configuration_attributes']['ams-hbase-site'], - owner = params.hbase_user, - group = params.user_group - ) - - # Phoenix spool file dir if not /tmp - if not os.path.exists(params.phoenix_server_spool_dir): - Directory(params.phoenix_server_spool_dir, - owner=params.ams_user, - mode = 0755, - group=params.user_group, - cd_access="a", - create_parents=True - ) - pass - - if 'ams-hbase-policy' in params.config['configurations']: - XmlConfig("hbase-policy.xml", - conf_dir = params.hbase_conf_dir, - configurations = params.config['configurations']['ams-hbase-policy'], - configuration_attributes=params.config['configuration_attributes']['ams-hbase-policy'], - owner = params.hbase_user, - group = params.user_group - ) - # Manually overriding ownership of file installed by hadoop package - else: - File( format("{params.hbase_conf_dir}/hbase-policy.xml"), - owner = params.hbase_user, - group = params.user_group - ) - - File(format("{hbase_conf_dir}/hbase-env.sh"), - owner = params.hbase_user, - content=InlineTemplate(params.hbase_env_sh_template) - ) - - # Metrics properties - File(os.path.join(params.hbase_conf_dir, "hadoop-metrics2-hbase.properties"), - owner = params.hbase_user, - group = params.user_group, - content=Template("hadoop-metrics2-hbase.properties.j2") - ) - - # hbase_TemplateConfig( params.metric_prop_file_name, - # tag = 'GANGLIA-MASTER' if name == 'master' else 'GANGLIA-RS' - # ) - - hbase_TemplateConfig('regionservers', user=params.hbase_user) - - if params.security_enabled: - hbase_TemplateConfig( format("hbase_{name}_jaas.conf"), user=params.hbase_user) - hbase_TemplateConfig( format("hbase_client_jaas.conf"), user=params.hbase_user) - hbase_TemplateConfig( format("ams_zookeeper_jaas.conf"), user=params.hbase_user) - - if name != "client": - Directory( params.hbase_pid_dir, - owner = params.hbase_user, - create_parents = True, - cd_access = "a", - mode = 0755, - ) - - Directory (params.hbase_log_dir, - owner = params.hbase_user, - create_parents = True, - cd_access = "a", - mode = 0755, - ) - - if name == "master": - - if not params.is_local_fs_rootdir: - # If executing Stop All, HDFS is probably down - if action != 'stop': - - params.HdfsResource(params.hbase_root_dir, - type="directory", - action="create_on_execute", - owner=params.hbase_user, - mode=0775 - ) - - params.HdfsResource(params.hbase_staging_dir, - type="directory", - action="create_on_execute", - owner=params.hbase_user, - mode=0711 - ) - - params.HdfsResource(None, action="execute") - - if params.is_hbase_distributed: - #Workaround for status commands not aware of operating mode - File(format("{params.hbase_pid_dir}/distributed_mode"), action="create", mode=0644, owner=params.hbase_user) - - pass - - else: - - local_root_dir = params.hbase_root_dir - #cut protocol name - if local_root_dir.startswith("file://"): - local_root_dir = local_root_dir[7:] - #otherwise assume dir name is provided as is - - Directory(local_root_dir, - owner = params.hbase_user, - cd_access="a", - create_parents = True - ) - - Execute(('chown', '-R', params.hbase_user, local_root_dir), - sudo=True - ) - - File(format("{params.hbase_pid_dir}/distributed_mode"), action="delete", owner=params.hbase_user) - - if params.hbase_log4j_props is not None: - File(format("{params.hbase_conf_dir}/log4j.properties"), - mode=0644, - group=params.user_group, - owner=params.hbase_user, - content=params.hbase_log4j_props - ) - elif os.path.exists(format("{params.hbase_conf_dir}/log4j.properties")): - File(format("{params.hbase_conf_dir}/log4j.properties"), - mode=0644, - group=params.user_group, - owner=params.hbase_user - ) - -def hbase_TemplateConfig(name, tag=None, user=None): - import params - - TemplateConfig( os.path.join(params.hbase_conf_dir, name), - owner = user, - template_tag = tag - ) http://git-wip-us.apache.org/repos/asf/ambari/blob/0aab3803/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/hbase_master.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/hbase_master.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/hbase_master.py deleted file mode 100755 index b769a0d..0000000 --- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/hbase_master.py +++ /dev/null @@ -1,70 +0,0 @@ -#!/usr/bin/env python -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -""" - -import sys -from resource_management import * - -from hbase import hbase -from hbase_service import hbase_service -from hbase_decommission import hbase_decommission - - -class HbaseMaster(Script): - def install(self, env): - self.install_packages(env) - - def configure(self, env, action = None): - import params - env.set_params(params) - - hbase('master', action) - - def start(self, env): - import params - env.set_params(params) - self.configure(env, action = 'start') # for security - - hbase_service( 'master', - action = 'start' - ) - - def stop(self, env): - import params - env.set_params(params) - - hbase_service( 'master', - action = 'stop' - ) - - def status(self, env): - import status_params - env.set_params(status_params) - pid_file = format("{pid_dir}/hbase-{hbase_user}-master.pid") - check_process_status(pid_file) - - def decommission(self, env): - import params - env.set_params(params) - - hbase_decommission(env) - - -if __name__ == "__main__": - HbaseMaster().execute() http://git-wip-us.apache.org/repos/asf/ambari/blob/0aab3803/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/hbase_regionserver.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/hbase_regionserver.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/hbase_regionserver.py deleted file mode 100755 index cf0efef..0000000 --- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/hbase_regionserver.py +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env python -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -""" - -import sys -from resource_management import * - -from hbase import hbase -from hbase_service import hbase_service - - -class HbaseRegionServer(Script): - def install(self, env): - self.install_packages(env) - - def configure(self, env, action = None): - import params - env.set_params(params) - - hbase('regionserver', action) - - def start(self, env): - import params - env.set_params(params) - self.configure(env, action = 'start') # for security - - hbase_service( 'regionserver', - action = 'start' - ) - - def stop(self, env): - import params - env.set_params(params) - - hbase_service( 'regionserver', - action = 'stop' - ) - - def status(self, env): - import status_params - env.set_params(status_params) - pid_file = format("{pid_dir}/hbase-{hbase_user}-regionserver.pid") - check_process_status(pid_file) - - def decommission(self, env): - print "Decommission not yet implemented!" - - -if __name__ == "__main__": - HbaseRegionServer().execute() http://git-wip-us.apache.org/repos/asf/ambari/blob/0aab3803/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/hbase_service.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/hbase_service.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/hbase_service.py deleted file mode 100755 index 5f03ca0..0000000 --- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/hbase_service.py +++ /dev/null @@ -1,53 +0,0 @@ -#!/usr/bin/env python -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -""" - -from resource_management import * - -def hbase_service( - name, - action = 'start'): # 'start' or 'stop' or 'status' - - import params - - role = name - cmd = format("{daemon_script} --config {hbase_conf_dir}") - pid_file = format("{hbase_pid_dir}/hbase-{hbase_user}-{role}.pid") - no_op_test = format("ls {pid_file} >/dev/null 2>&1 && ps `cat {pid_file}` >/dev/null 2>&1") - - if action == 'start': - daemon_cmd = format("{cmd} start {role}") - - Execute ( daemon_cmd, - not_if = no_op_test, - user = params.hbase_user - ) - elif action == 'stop': - daemon_cmd = format("{cmd} stop {role}") - - Execute ( daemon_cmd, - user = params.hbase_user, - # BUGFIX: hbase regionserver sometimes hangs when nn is in safemode - timeout = 30, - on_timeout = format("{no_op_test} && kill -9 `cat {pid_file}`") - ) - - File(pid_file, - action = "delete", - ) http://git-wip-us.apache.org/repos/asf/ambari/blob/0aab3803/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/metrics_collector.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/metrics_collector.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/metrics_collector.py deleted file mode 100755 index cf498ec..0000000 --- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/metrics_collector.py +++ /dev/null @@ -1,133 +0,0 @@ -#!/usr/bin/env python -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -""" - -from resource_management import * -from resource_management.libraries.functions.security_commons import build_expectations, \ - cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \ - FILE_TYPE_XML -from ams import ams -from ams_service import ams_service -from hbase import hbase -from status import check_service_status -from ambari_commons import OSConst -from ambari_commons.os_family_impl import OsFamilyImpl - -class AmsCollector(Script): - def install(self, env): - self.install_packages(env) - - def configure(self, env, action = None): - import params - env.set_params(params) - hbase('master', action) - hbase('regionserver', action) - ams(name='collector') - - def start(self, env): - self.configure(env, action = 'start') # for security - # stop hanging components before start - ams_service('collector', action = 'stop') - ams_service('collector', action = 'start') - - def stop(self, env): - import params - env.set_params(params) - # Sometimes, stop() may be called before start(), in case restart() is initiated right after installation - self.configure(env, action = 'stop') # for security - ams_service('collector', action = 'stop') - - def status(self, env): - import status_params - env.set_params(status_params) - check_service_status(name='collector') - - -@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT) -class AmsCollectorDefault(AmsCollector): - def security_status(self, env): - import status_params - - env.set_params(status_params) - props_value_check = {"hbase.security.authentication": "kerberos", - "hbase.security.authorization": "true"} - - props_empty_check = ["hbase.zookeeper.property.authProvider.1", - "hbase.master.keytab.file", - "hbase.master.kerberos.principal", - "hbase.regionserver.keytab.file", - "hbase.regionserver.kerberos.principal" - ] - props_read_check = ['hbase.master.keytab.file', 'hbase.regionserver.keytab.file'] - ams_hbase_site_expectations = build_expectations('hbase-site', props_value_check, - props_empty_check, - props_read_check) - - expectations = {} - expectations.update(ams_hbase_site_expectations) - - security_params = get_params_from_filesystem(status_params.ams_hbase_conf_dir, - {'hbase-site.xml': FILE_TYPE_XML}) - - is_hbase_distributed = security_params['hbase-site']['hbase.cluster.distributed'] - # for embedded mode, when HBase is backed by file, security state is SECURED_KERBEROS by definition when cluster is secured - if status_params.security_enabled and not is_hbase_distributed: - self.put_structured_out({"securityState": "SECURED_KERBEROS"}) - return - - result_issues = validate_security_config_properties(security_params, expectations) - - if not result_issues: # If all validations passed successfully - try: - # Double check the dict before calling execute - if ('hbase-site' not in security_params or - 'hbase.master.keytab.file' not in security_params['hbase-site'] or - 'hbase.master.kerberos.principal' not in security_params['hbase-site']): - self.put_structured_out({"securityState": "UNSECURED"}) - self.put_structured_out( - {"securityIssuesFound": "Keytab file or principal are not set property."}) - return - - cached_kinit_executor(status_params.kinit_path_local, - status_params.hbase_user, - security_params['hbase-site']['hbase.master.keytab.file'], - security_params['hbase-site']['hbase.master.kerberos.principal'], - status_params.hostname, - status_params.tmp_dir) - self.put_structured_out({"securityState": "SECURED_KERBEROS"}) - except Exception as e: - self.put_structured_out({"securityState": "ERROR"}) - self.put_structured_out({"securityStateErrorInfo": str(e)}) - else: - issues = [] - for cf in result_issues: - issues.append("Configuration file %s did not pass the validation. Reason: %s" % ( - cf, result_issues[cf])) - self.put_structured_out({"securityIssuesFound": ". ".join(issues)}) - self.put_structured_out({"securityState": "UNSECURED"}) - - -@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY) -class AmsCollectorWindows(AmsCollector): - def install(self, env): - self.install_packages(env) - self.configure(env) # for security - -if __name__ == "__main__": - AmsCollector().execute() http://git-wip-us.apache.org/repos/asf/ambari/blob/0aab3803/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/metrics_monitor.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/metrics_monitor.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/metrics_monitor.py deleted file mode 100755 index f6cce8f..0000000 --- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/metrics_monitor.py +++ /dev/null @@ -1,59 +0,0 @@ -#!/usr/bin/env python -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -""" - -from resource_management import * -from ams import ams -from ams_service import ams_service -from status import check_service_status - -class AmsMonitor(Script): - def install(self, env): - self.install_packages(env) - self.configure(env) # for security - - def configure(self, env): - import params - env.set_params(params) - ams(name='monitor') - - def start(self, env): - self.configure(env) # for security - - ams_service( 'monitor', - action = 'start' - ) - - def stop(self, env): - import params - env.set_params(params) - - ams_service( 'monitor', - action = 'stop' - ) - - def status(self, env): - import status_params - env.set_params(status_params) - check_service_status(name='monitor') - - -if __name__ == "__main__": - AmsMonitor().execute() - http://git-wip-us.apache.org/repos/asf/ambari/blob/0aab3803/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/params.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/params.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/params.py deleted file mode 100755 index b0a2b75..0000000 --- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/params.py +++ /dev/null @@ -1,257 +0,0 @@ -#!/usr/bin/env python -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -""" - -from functions import calc_xmn_from_xms -from functions import check_append_heap_property -from functions import trim_heap_property - -from resource_management import * -import status_params -from ambari_commons import OSCheck - - -if OSCheck.is_windows_family(): - from params_windows import * -else: - from params_linux import * -# server configurations -config = Script.get_config() -exec_tmp_dir = Script.get_tmp_dir() - -def get_combined_memory_mb(value1, value2): - try: - part1 = int(value1.strip()[:-1]) if value1.lower().strip()[-1:] == 'm' else int(value1) - part2 = int(value2.strip()[:-1]) if value2.lower().strip()[-1:] == 'm' else int(value2) - return str(part1 + part2) + 'm' - except: - return None -pass - -#AMBARI_METRICS data -ams_pid_dir = status_params.ams_collector_pid_dir - -ams_collector_script = "/usr/sbin/ambari-metrics-collector" -ams_collector_pid_dir = status_params.ams_collector_pid_dir -ams_collector_hosts = default("/clusterHostInfo/metrics_collector_hosts", []) -if 'cluster-env' in config['configurations'] and \ - 'metrics_collector_vip_host' in config['configurations']['cluster-env']: - metric_collector_host = config['configurations']['cluster-env']['metrics_collector_vip_host'] -else: - metric_collector_host = ams_collector_hosts[0] -if 'cluster-env' in config['configurations'] and \ - 'metrics_collector_vip_port' in config['configurations']['cluster-env']: - metric_collector_port = config['configurations']['cluster-env']['metrics_collector_vip_port'] -else: - metric_collector_web_address = default("/configurations/ams-site/timeline.metrics.service.webapp.address", "0.0.0.0:6188") - if metric_collector_web_address.find(':') != -1: - metric_collector_port = metric_collector_web_address.split(':')[1] - else: - metric_collector_port = '6188' - -ams_collector_log_dir = config['configurations']['ams-env']['metrics_collector_log_dir'] -ams_monitor_log_dir = config['configurations']['ams-env']['metrics_monitor_log_dir'] - -ams_monitor_dir = "/usr/lib/python2.6/site-packages/resource_monitoring" -ams_monitor_pid_dir = status_params.ams_monitor_pid_dir -ams_monitor_script = "/usr/sbin/ambari-metrics-monitor" - -ams_hbase_home_dir = "/usr/lib/ams-hbase/" - -ams_hbase_normalizer_enabled = default("/configurations/ams-hbase-site/hbase.normalizer.enabled", None) -ams_hbase_fifo_compaction_enabled = default("/configurations/ams-site/timeline.metrics.hbase.fifo.compaction.enabled", None) - -#hadoop params - -hbase_excluded_hosts = config['commandParams']['excluded_hosts'] -hbase_drain_only = config['commandParams']['mark_draining_only'] -hbase_included_hosts = config['commandParams']['included_hosts'] - -hbase_user = status_params.hbase_user -smokeuser = config['configurations']['cluster-env']['smokeuser'] -hbase_root_dir = config['configurations']['ams-hbase-site']['hbase.rootdir'] -hbase_pid_dir = status_params.hbase_pid_dir - -is_hbase_distributed = config['configurations']['ams-hbase-site']['hbase.cluster.distributed'] -is_local_fs_rootdir = hbase_root_dir.startswith('file://') -is_ams_distributed = config['configurations']['ams-site']['timeline.metrics.service.operation.mode'] == 'distributed' - -# security is disabled for embedded mode, when HBase is backed by file -security_enabled = False if not is_hbase_distributed else config['configurations']['cluster-env']['security_enabled'] - -# this is "hadoop-metrics.properties" for 1.x stacks -metric_prop_file_name = "hadoop-metrics2-hbase.properties" - -# not supporting 32 bit jdk. -java64_home = config['hostLevelParams']['java_home'] -java_version = int(config['hostLevelParams']['java_version']) - -metrics_collector_heapsize = default('/configurations/ams-env/metrics_collector_heapsize', "512") -host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False) -metrics_report_interval = default("/configurations/ams-site/timeline.metrics.sink.report.interval", 60) -metrics_collection_period = default("/configurations/ams-site/timeline.metrics.sink.collection.period", 60) - -hbase_log_dir = config['configurations']['ams-hbase-env']['hbase_log_dir'] -hbase_classpath_additional = default("/configurations/ams-hbase-env/hbase_classpath_additional", None) -master_heapsize = config['configurations']['ams-hbase-env']['hbase_master_heapsize'] -regionserver_heapsize = config['configurations']['ams-hbase-env']['hbase_regionserver_heapsize'] - -# Check if hbase java options already have appended "m". If Yes, remove the trailing m. -metrics_collector_heapsize = check_append_heap_property(str(metrics_collector_heapsize), "m") -master_heapsize = check_append_heap_property(str(master_heapsize), "m") -regionserver_heapsize = check_append_heap_property(str(regionserver_heapsize), "m") - -regionserver_xmn_max = default('/configurations/ams-hbase-env/hbase_regionserver_xmn_max', None) -if regionserver_xmn_max: - regionserver_xmn_max = int(trim_heap_property(str(regionserver_xmn_max), "m")) - regionserver_xmn_percent = config['configurations']['ams-hbase-env']['hbase_regionserver_xmn_ratio'] - regionserver_xmn_size = calc_xmn_from_xms(regionserver_heapsize, regionserver_xmn_percent, regionserver_xmn_max) -else: - regionserver_xmn_size = config['configurations']['ams-hbase-env']['regionserver_xmn_size'] -pass - -hbase_master_xmn_size = config['configurations']['ams-hbase-env']['hbase_master_xmn_size'] -hbase_master_maxperm_size = config['configurations']['ams-hbase-env']['hbase_master_maxperm_size'] - -# Check if hbase java options already have appended "m". If Yes, remove the trailing m. -hbase_master_maxperm_size = check_append_heap_property(str(hbase_master_maxperm_size), "m") -hbase_master_xmn_size = check_append_heap_property(str(hbase_master_xmn_size), "m") -regionserver_xmn_size = check_append_heap_property(str(regionserver_xmn_size), "m") - -# Choose heap size for embedded mode as sum of master + regionserver -if not is_hbase_distributed: - hbase_heapsize = get_combined_memory_mb(master_heapsize, regionserver_heapsize) - if hbase_heapsize is None: - hbase_heapsize = master_heapsize -else: - hbase_heapsize = master_heapsize - -max_open_files_limit = default("/configurations/ams-hbase-env/max_open_files_limit", "32768") - -if not is_hbase_distributed: - zookeeper_quorum_hosts = 'localhost' - zookeeper_clientPort = '61181' -else: - zookeeper_quorum_hosts = default("/hostname", 'localhost') - if 'zoo.cfg' in config['configurations'] and 'clientPort' in config['configurations']['zoo.cfg']: - zookeeper_clientPort = config['configurations']['zoo.cfg']['clientPort'] - else: - zookeeper_clientPort = '2181' - -ams_checkpoint_dir = config['configurations']['ams-site']['timeline.metrics.aggregator.checkpoint.dir'] -hbase_pid_dir = status_params.hbase_pid_dir -_hbase_tmp_dir = config['configurations']['ams-hbase-site']['hbase.tmp.dir'] -hbase_tmp_dir = substitute_vars(_hbase_tmp_dir, config['configurations']['ams-hbase-site']) -_zookeeper_data_dir = config['configurations']['ams-hbase-site']['hbase.zookeeper.property.dataDir'] -zookeeper_data_dir = substitute_vars(_zookeeper_data_dir, config['configurations']['ams-hbase-site']) -# TODO UPGRADE default, update site during upgrade -_local_dir_conf = default('/configurations/ams-hbase-site/hbase.local.dir', "${hbase.tmp.dir}/local") -local_dir = substitute_vars(_local_dir_conf, config['configurations']['ams-hbase-site']) - -phoenix_max_global_mem_percent = default('/configurations/ams-site/phoenix.query.maxGlobalMemoryPercentage', '20') -phoenix_client_spool_dir = default('/configurations/ams-site/phoenix.spool.directory', '/tmp') -phoenix_server_spool_dir = default('/configurations/ams-hbase-site/phoenix.spool.directory', '/tmp') -# Substitute vars if present -phoenix_client_spool_dir = substitute_vars(phoenix_client_spool_dir, config['configurations']['ams-hbase-site']) -phoenix_server_spool_dir = substitute_vars(phoenix_server_spool_dir, config['configurations']['ams-hbase-site']) - -client_jaas_config_file = format("{hbase_conf_dir}/hbase_client_jaas.conf") -master_jaas_config_file = format("{hbase_conf_dir}/hbase_master_jaas.conf") -regionserver_jaas_config_file = format("{hbase_conf_dir}/hbase_regionserver_jaas.conf") - -rs_hosts = ["localhost"] - -smoke_test_user = config['configurations']['cluster-env']['smokeuser'] -smokeuser_permissions = "RWXCA" -service_check_data = functions.get_unique_id_and_date() -user_group = config['configurations']['cluster-env']["user_group"] -hadoop_user = "hadoop" - -kinit_cmd = "" - -if security_enabled: - _hostname_lowercase = config['hostname'].lower() - client_jaas_config_file = format("{hbase_conf_dir}/hbase_client_jaas.conf") - smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab'] - hbase_user_keytab = config['configurations']['ams-hbase-env']['hbase_user_keytab'] - - ams_collector_jaas_config_file = format("{hbase_conf_dir}/ams_collector_jaas.conf") - ams_collector_keytab_path = config['configurations']['ams-hbase-security-site']['hbase.myclient.keytab'] - ams_collector_jaas_princ = config['configurations']['ams-hbase-security-site']['hbase.myclient.principal'].replace('_HOST',_hostname_lowercase) - - ams_zookeeper_jaas_config_file = format("{hbase_conf_dir}/ams_zookeeper_jaas.conf") - ams_zookeeper_keytab = config['configurations']['ams-hbase-security-site']['ams.zookeeper.keytab'] - ams_zookeeper_principal_name = config['configurations']['ams-hbase-security-site']['ams.zookeeper.principal'].replace('_HOST',_hostname_lowercase) - - master_jaas_config_file = format("{hbase_conf_dir}/hbase_master_jaas.conf") - master_keytab_path = config['configurations']['ams-hbase-security-site']['hbase.master.keytab.file'] - master_jaas_princ = config['configurations']['ams-hbase-security-site']['hbase.master.kerberos.principal'].replace('_HOST',_hostname_lowercase) - - regionserver_jaas_config_file = format("{hbase_conf_dir}/hbase_regionserver_jaas.conf") - regionserver_keytab_path = config['configurations']['ams-hbase-security-site']['hbase.regionserver.keytab.file'] - regionserver_jaas_princ = config['configurations']['ams-hbase-security-site']['hbase.regionserver.kerberos.principal'].replace('_HOST',_hostname_lowercase) - - zk_servicename = ams_zookeeper_principal_name.rpartition('/')[0] - -#log4j.properties -if (('ams-hbase-log4j' in config['configurations']) and ('content' in config['configurations']['ams-hbase-log4j'])): - hbase_log4j_props = config['configurations']['ams-hbase-log4j']['content'] -else: - hbase_log4j_props = None - -if (('ams-log4j' in config['configurations']) and ('content' in config['configurations']['ams-log4j'])): - log4j_props = config['configurations']['ams-log4j']['content'] -else: - log4j_props = None - -hbase_env_sh_template = config['configurations']['ams-hbase-env']['content'] -ams_env_sh_template = config['configurations']['ams-env']['content'] - -hbase_staging_dir = default("/configurations/ams-hbase-site/hbase.bulkload.staging.dir", "/amshbase/staging") - -#for create_hdfs_directory -hostname = config["hostname"] -hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab'] -hdfs_user = config['configurations']['hadoop-env']['hdfs_user'] -hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name'] -kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None)) - - - -hdfs_site = config['configurations']['hdfs-site'] -default_fs = config['configurations']['core-site']['fs.defaultFS'] - -import functools -#create partial functions with common arguments for every HdfsResource call -#to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code -HdfsResource = functools.partial( - HdfsResource, - user=hdfs_user, - security_enabled = security_enabled, - keytab = hdfs_user_keytab, - kinit_path_local = kinit_path_local, - hadoop_bin_dir = hadoop_bin_dir, - hadoop_conf_dir = hadoop_conf_dir, - principal_name = hdfs_principal_name, - hdfs_site = hdfs_site, - default_fs = default_fs - ) - - - http://git-wip-us.apache.org/repos/asf/ambari/blob/0aab3803/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/params_linux.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/params_linux.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/params_linux.py deleted file mode 100755 index 838e987..0000000 --- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/params_linux.py +++ /dev/null @@ -1,50 +0,0 @@ -#!/usr/bin/env python -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -""" - -from resource_management import * -from resource_management.libraries.functions import conf_select -from ambari_commons import OSCheck -from ambari_commons.constants import AMBARI_SUDO_BINARY - -config = Script.get_config() - -ams_collector_conf_dir = "/etc/ambari-metrics-collector/conf" -ams_monitor_conf_dir = "/etc/ambari-metrics-monitor/conf/" -ams_user = config['configurations']['ams-env']['ambari_metrics_user'] -#RPM versioning support -rpm_version = default("/configurations/hadoop-env/rpm_version", None) - -#hadoop params -if rpm_version is not None: - #RPM versioning support - rpm_version = default("/configurations/hadoop-env/rpm_version", None) - -hadoop_native_lib = format("/usr/lib/ams-hbase/lib/hadoop-native") -hadoop_bin_dir = "/usr/bin" -daemon_script = "/usr/lib/ams-hbase/bin/hbase-daemon.sh" -region_mover = "/usr/lib/ams-hbase/bin/region_mover.rb" -region_drainer = "/usr/lib/ams-hbase/bin/draining_servers.rb" -hbase_cmd = "/usr/lib/ams-hbase/bin/hbase" - -hadoop_conf_dir = conf_select.get_hadoop_conf_dir() -hbase_conf_dir = "/etc/ams-hbase/conf" - -limits_conf_dir = "/etc/security/limits.d" -sudo = AMBARI_SUDO_BINARY http://git-wip-us.apache.org/repos/asf/ambari/blob/0aab3803/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/params_windows.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/params_windows.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/params_windows.py deleted file mode 100755 index acb5bba..0000000 --- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/params_windows.py +++ /dev/null @@ -1,53 +0,0 @@ -#!/usr/bin/env python -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -""" - -import os - -from resource_management.libraries.script.script import Script - - -config = Script.get_config() - -hadoop_user = config["configurations"]["cluster-env"]["hadoop.user.name"] -ams_user = hadoop_user - -try: - ams_collector_conf_dir = os.environ["COLLECTOR_CONF_DIR"] - ams_collector_home_dir = os.environ["COLLECTOR_HOME"] - hbase_cmd = os.path.join(os.environ["COLLECTOR_HOME"], "hbase", "bin", "hbase.cmd") - hbase_conf_dir = os.path.join(os.environ["COLLECTOR_HOME"], "hbase", "conf") -except: - ams_collector_conf_dir = None - ams_collector_home_dir = None - hbase_cmd = None - hbase_conf_dir = None - -try: - ams_monitor_conf_dir = os.environ["MONITOR_CONF_DIR"] - ams_monitor_home_dir = os.environ["MONITOR_HOME"] -except: - ams_monitor_conf_dir = None - ams_monitor_home_dir = None - -hadoop_native_lib = os.path.join(os.environ["HADOOP_HOME"], "bin") -hadoop_bin_dir = os.path.join(os.environ["HADOOP_HOME"], "bin") -hadoop_conf_dir = os.path.join(os.environ["HADOOP_HOME"], "conf") - -from service_mapping import * http://git-wip-us.apache.org/repos/asf/ambari/blob/0aab3803/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/service_check.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/service_check.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/service_check.py deleted file mode 100755 index f19c823..0000000 --- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/service_check.py +++ /dev/null @@ -1,166 +0,0 @@ -#!/usr/bin/env python -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -""" - -from resource_management.core.logger import Logger -from resource_management.core.base import Fail -from resource_management import Script -from resource_management import Template - -from ambari_commons import OSConst -from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl - -import httplib -import urllib -import ambari_simplejson as json # simplejson is much faster comparing to Python 2.6 json module and has the same functions set. -import os -import random -import time -import socket - - -class AMSServiceCheck(Script): - AMS_METRICS_POST_URL = "/ws/v1/timeline/metrics/" - AMS_METRICS_GET_URL = "/ws/v1/timeline/metrics?%s" - AMS_CONNECT_TRIES = 30 - AMS_CONNECT_TIMEOUT = 15 - - @OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY) - def service_check(self, env): - from resource_management.libraries.functions.windows_service_utils import check_windows_service_exists - import params - - env.set_params(params) - - #Just check that the services were correctly installed - #Check the monitor on all hosts - Logger.info("Metrics Monitor service check was started.") - if not check_windows_service_exists(params.ams_monitor_win_service_name): - raise Fail("Metrics Monitor service was not properly installed. Check the logs and retry the installation.") - #Check the collector only where installed - if params.ams_collector_home_dir and os.path.isdir(params.ams_collector_home_dir): - Logger.info("Metrics Collector service check was started.") - if not check_windows_service_exists(params.ams_collector_win_service_name): - raise Fail("Metrics Collector service was not properly installed. Check the logs and retry the installation.") - - @OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT) - def service_check(self, env): - import params - - Logger.info("Ambari Metrics service check was started.") - env.set_params(params) - - random_value1 = random.random() - headers = {"Content-type": "application/json"} - - for i in xrange(0, self.AMS_CONNECT_TRIES): - try: - current_time = int(time.time()) * 1000 - metric_json = Template('smoketest_metrics.json.j2', hostname=params.hostname, random1=random_value1, - current_time=current_time).get_content() - Logger.info("Generated metrics:\n%s" % metric_json) - - Logger.info("Connecting (POST) to %s:%s%s" % (params.metric_collector_host, - params.metric_collector_port, - self.AMS_METRICS_POST_URL)) - conn = httplib.HTTPConnection(params.metric_collector_host, - int(params.metric_collector_port)) - conn.request("POST", self.AMS_METRICS_POST_URL, metric_json, headers) - - response = conn.getresponse() - Logger.info("Http response: %s %s" % (response.status, response.reason)) - except (httplib.HTTPException, socket.error) as ex: - if i < self.AMS_CONNECT_TRIES - 1: #range/xrange returns items from start to end-1 - time.sleep(self.AMS_CONNECT_TIMEOUT) - Logger.info("Connection failed. Next retry in %s seconds." - % (self.AMS_CONNECT_TIMEOUT)) - continue - else: - raise Fail("Metrics were not saved. Service check has failed. " - "\nConnection failed.") - - data = response.read() - Logger.info("Http data: %s" % data) - conn.close() - - if response.status == 200: - Logger.info("Metrics were saved.") - break - else: - Logger.info("Metrics were not saved. Service check has failed.") - if i < self.AMS_CONNECT_TRIES - 1: #range/xrange returns items from start to end-1 - time.sleep(self.AMS_CONNECT_TIMEOUT) - Logger.info("Next retry in %s seconds." - % (self.AMS_CONNECT_TIMEOUT)) - else: - raise Fail("Metrics were not saved. Service check has failed. POST request status: %s %s \n%s" % - (response.status, response.reason, data)) - - get_metrics_parameters = { - "metricNames": "AMBARI_METRICS.SmokeTest.FakeMetric", - "appId": "amssmoketestfake", - "hostname": params.hostname, - "startTime": current_time - 60000, - "endTime": current_time + 61000, - "precision": "seconds", - "grouped": "false", - } - encoded_get_metrics_parameters = urllib.urlencode(get_metrics_parameters) - - Logger.info("Connecting (GET) to %s:%s%s" % (params.metric_collector_host, - params.metric_collector_port, - self.AMS_METRICS_GET_URL % encoded_get_metrics_parameters)) - - conn = httplib.HTTPConnection(params.metric_collector_host, - int(params.metric_collector_port)) - conn.request("GET", self.AMS_METRICS_GET_URL % encoded_get_metrics_parameters) - response = conn.getresponse() - Logger.info("Http response: %s %s" % (response.status, response.reason)) - - data = response.read() - Logger.info("Http data: %s" % data) - conn.close() - - if response.status == 200: - Logger.info("Metrics were retrieved.") - else: - Logger.info("Metrics were not retrieved. Service check has failed.") - raise Fail("Metrics were not retrieved. Service check has failed. GET request status: %s %s \n%s" % - (response.status, response.reason, data)) - data_json = json.loads(data) - - def floats_eq(f1, f2, delta): - return abs(f1-f2) < delta - - for metrics_data in data_json["metrics"]: - if (str(current_time) in metrics_data["metrics"] and str(current_time + 1000) in metrics_data["metrics"] - and floats_eq(metrics_data["metrics"][str(current_time)], random_value1, 0.0000001) - and floats_eq(metrics_data["metrics"][str(current_time + 1000)], current_time, 1)): - Logger.info("Values %s and %s were found in the response." % (random_value1, current_time)) - break - pass - else: - Logger.info("Values %s and %s were not found in the response." % (random_value1, current_time)) - raise Fail("Values %s and %s were not found in the response." % (random_value1, current_time)) - - Logger.info("Ambari Metrics service check is finished.") - -if __name__ == "__main__": - AMSServiceCheck().execute() - http://git-wip-us.apache.org/repos/asf/ambari/blob/0aab3803/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/service_mapping.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/service_mapping.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/service_mapping.py deleted file mode 100755 index 2eeb427..0000000 --- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/service_mapping.py +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env python -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -""" -ams_collector_win_service_name = "AmbariMetricsCollector" -ams_monitor_win_service_name = "AmbariMetricsHostMonitoring" -ams_embedded_hbase_win_service_name = "ams_hbase_master" \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/0aab3803/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/split_points.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/split_points.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/split_points.py deleted file mode 100755 index fa4deaf..0000000 --- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/split_points.py +++ /dev/null @@ -1,236 +0,0 @@ -# !/usr/bin/env python -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -""" - -import os -import sys -import re -import math -import collections -import ast - -metric_filename_ext = '.txt' -# 5 regions for higher order aggregate tables -other_region_static_count = 6 -# Max equidistant points to return per service -max_equidistant_points = 50 - -b_bytes = 1 -k_bytes = 1 << 10 # 1024 -m_bytes = 1 << 20 # 1024^2 -g_bytes = 1 << 30 # 1024^3 -t_bytes = 1 << 40 # 1024^4 -p_bytes = 1 << 50 # 1024^5 - -def to_number(s): - try: - return int(re.sub("\D", "", s)) - except ValueError: - return None - -def format_Xmx_size_to_bytes(value, default='b'): - strvalue = str(value).lower() - if len(strvalue) == 0: - return 0 - modifier = strvalue[-1] - - if modifier == ' ' or modifier in "0123456789": - modifier = default - - m = { - modifier == 'b': b_bytes, - modifier == 'k': k_bytes, - modifier == 'm': m_bytes, - modifier == 'g': g_bytes, - modifier == 't': t_bytes, - modifier == 'p': p_bytes - } [1] - return to_number(strvalue) * m - -# Class that takes AMS HBase configs as input and determines the Region -# pre-splits based on selected services also passed as a parameter to the class. -class FindSplitPointsForAMSRegions(): - - def __init__(self, ams_hbase_site, ams_hbase_env, serviceMetricsDir, - operation_mode = 'embedded', services = None): - self.ams_hbase_site = ams_hbase_site - self.ams_hbase_env = ams_hbase_env - self.serviceMetricsDir = serviceMetricsDir - self.services = services - self.mode = operation_mode - # Add host metrics if not present as input - if self.services and 'HOST' not in self.services: - self.services.append('HOST') - - # Initialize before user - self.initialize() - - def initialize(self): - # calculate regions based on available memory - self.initialize_region_counts() - self.initialize_ordered_set_of_metrics() - - def initialize_region_counts(self): - try: - xmx_master_bytes = format_Xmx_size_to_bytes(self.ams_hbase_env['hbase_master_heapsize'], 'm') - xmx_region_bytes = 0 - if "hbase_regionserver_heapsize" in self.ams_hbase_env: - xmx_region_bytes = format_Xmx_size_to_bytes(self.ams_hbase_env['hbase_regionserver_heapsize'], 'm') - xmx_bytes = xmx_master_bytes + xmx_region_bytes - if self.mode == 'distributed': - xmx_bytes = xmx_region_bytes - - memstore_max_mem = float(self.ams_hbase_site['hbase.regionserver.global.memstore.lowerLimit']) * xmx_bytes - memstore_flush_size = format_Xmx_size_to_bytes(self.ams_hbase_site['hbase.hregion.memstore.flush.size']) - - max_inmemory_regions = (memstore_max_mem / memstore_flush_size) - other_region_static_count - print 'max_inmemory_regions: %s' % max_inmemory_regions - - if max_inmemory_regions > 2: - # Lets say total = 12, so we have 7 regions to allocate between - # METRIC_RECORD and METRIC_AGGREGATE tables, desired = (5, 2) - self.desired_precision_region_count = int(math.floor(0.8 * max_inmemory_regions)) - self.desired_aggregate_region_count = int(max_inmemory_regions - self.desired_precision_region_count) - else: - self.desired_precision_region_count = 1 - self.desired_aggregate_region_count = 1 - - except: - print('Bad config settings, could not calculate max regions available.') - pass - - def initialize_ordered_set_of_metrics(self): - onlyServicefiles = [ f for f in os.listdir(self.serviceMetricsDir) if - os.path.isfile(os.path.join(self.serviceMetricsDir, f)) ] - - metrics = set() - - for file in onlyServicefiles: - # Process for services selected at deploy time or all services if - # services arg is not passed - if self.services is None or file.rstrip(metric_filename_ext) in self.services: - print 'Processing file: %s' % os.path.join(self.serviceMetricsDir, file) - service_metrics = set() - with open(os.path.join(self.serviceMetricsDir, file), 'r') as f: - for metric in f: - service_metrics.add(metric.strip()) - pass - pass - metrics.update(self.find_equidistant_metrics(list(sorted(service_metrics)))) - pass - pass - - self.metrics = sorted(metrics) - print 'metrics length: %s' % len(self.metrics) - - # Pick 50 metric points for each service that are equidistant from - # each other for a service - def find_equidistant_metrics(self, service_metrics): - equi_metrics = [] - idx = len(service_metrics) / max_equidistant_points - if idx == 0: - return service_metrics - pass - - index = idx - for i in range(0, max_equidistant_points - 1): - equi_metrics.append(service_metrics[index - 1]) - index += idx - pass - - return equi_metrics - - def get_split_points(self): - split_points = collections.namedtuple('SplitPoints', [ 'precision', 'aggregate' ]) - split_points.precision = [] - split_points.aggregate = [] - - metric_list = list(self.metrics) - metrics_total = len(metric_list) - - if self.desired_precision_region_count > 1: - idx = int(math.ceil(metrics_total / self.desired_precision_region_count)) - index = idx - for i in range(0, self.desired_precision_region_count - 1): - if index < metrics_total - 1: - split_points.precision.append(metric_list[index]) - index += idx - - if self.desired_aggregate_region_count > 1: - idx = int(math.ceil(metrics_total / self.desired_aggregate_region_count)) - index = idx - for i in range(0, self.desired_aggregate_region_count - 1): - if index < metrics_total - 1: - split_points.aggregate.append(metric_list[index]) - index += idx - - return split_points - pass - -def main(argv = None): - scriptDir = os.path.realpath(os.path.dirname(argv[0])) - serviceMetricsDir = os.path.join(scriptDir, os.pardir, 'files', 'service-metrics') - - if os.path.exists(serviceMetricsDir): - onlyargs = argv[1:] - if len(onlyargs) < 3: - sys.stderr.write("Usage: dict(ams-hbase-site) dict(ams-hbase-env) list(services)\n") - sys.exit(2) - pass - - ams_hbase_site = None - ams_hbase_env = None - services = None - try: - ams_hbase_site = ast.literal_eval(str(onlyargs[0])) - ams_hbase_env = ast.literal_eval(str(onlyargs[1])) - services = onlyargs[2] - if services: - services = str(services).split(',') - pass - except Exception, ex: - sys.stderr.write(str(ex)) - sys.stderr.write("\nUsage: Expected items not found in input. Found " - " ams-hbase-site => {0}, ams-hbase-env => {1}," - " services => {2}".format(ams_hbase_site, ams_hbase_env, services)) - sys.exit(2) - - print '--------- AMS Regions Split point finder ---------' - print 'Services: %s' % services - - mode = 'distributed' if 'hbase.rootdir' in ams_hbase_site and \ - 'hdfs' in ams_hbase_site['hbase.rootdir'] else \ - 'embedded' - - split_point_finder = FindSplitPointsForAMSRegions( - ams_hbase_site, ams_hbase_env, serviceMetricsDir, mode, services) - - result = split_point_finder.get_split_points() - print 'Split points for precision table : %s' % len(result.precision) - print 'precision: %s' % str(result.precision) - print 'Split points for aggregate table : %s' % len(result.aggregate) - print 'aggregate: %s' % str(result.aggregate) - - return 0 - - else: - print 'Cannot find service metrics dir in %s' % scriptDir - -if __name__ == '__main__': - main(sys.argv) http://git-wip-us.apache.org/repos/asf/ambari/blob/0aab3803/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/status.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/status.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/status.py deleted file mode 100755 index 59466ad..0000000 --- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/AMBARI_METRICS/package/scripts/status.py +++ /dev/null @@ -1,46 +0,0 @@ -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -""" - -from resource_management import * -from ambari_commons import OSConst -from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl -import os - -@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT) -def check_service_status(name): - if name=='collector': - pid_file = format("{ams_collector_pid_dir}/ambari-metrics-collector.pid") - check_process_status(pid_file) - pid_file = format("{hbase_pid_dir}/hbase-{hbase_user}-master.pid") - check_process_status(pid_file) - if os.path.exists(format("{hbase_pid_dir}/distributed_mode")): - pid_file = format("{hbase_pid_dir}/hbase-{hbase_user}-regionserver.pid") - check_process_status(pid_file) - - elif name == 'monitor': - pid_file = format("{ams_monitor_pid_dir}/ambari-metrics-monitor.pid") - check_process_status(pid_file) - -@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY) -def check_service_status(name): - import service_mapping - if name=='collector': - check_windows_service_status(service_mapping.ams_collector_win_service_name) - elif name == 'monitor': - check_windows_service_status(service_mapping.ams_monitor_win_service_name)