http://git-wip-us.apache.org/repos/asf/ambari/blob/83efcfea/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/hive_service.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/hive_service.py b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/hive_service.py new file mode 100644 index 0000000..8507816 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/hive_service.py @@ -0,0 +1,110 @@ +#!/usr/bin/env python +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" + +from resource_management import * +import socket +import sys +import time +from resource_management.core.shell import call + +def hive_service( + name, + action='start'): + + import params + + if name == 'metastore': + pid_file = format("{hive_pid_dir}/{hive_metastore_pid}") + cmd = format( + "env HADOOP_HOME={hadoop_home} JAVA_HOME={java64_home} {start_metastore_path} {hive_log_dir}/hive.out {hive_log_dir}/hive.log {pid_file} {hive_server_conf_dir} {hive_log_dir}") + elif name == 'hiveserver2': + pid_file = format("{hive_pid_dir}/{hive_pid}") + cmd = format( + "env JAVA_HOME={java64_home} {start_hiveserver2_path} {hive_log_dir}/hive-server2.out {hive_log_dir}/hive-server2.log {pid_file} {hive_server_conf_dir} {hive_log_dir}") + + process_id_exists = format("ls {pid_file} >/dev/null 2>&1 && ps `cat {pid_file}` >/dev/null 2>&1") + + if action == 'start': + if name == 'hiveserver2': + check_fs_root() + + demon_cmd = format("{cmd}") + + Execute(demon_cmd, + user=params.hive_user, + not_if=process_id_exists + ) + + if params.hive_jdbc_driver == "com.mysql.jdbc.Driver" or \ + params.hive_jdbc_driver == "org.postgresql.Driver" or \ + params.hive_jdbc_driver == "oracle.jdbc.driver.OracleDriver": + + db_connection_check_command = format( + "{java64_home}/bin/java -cp {check_db_connection_jar}:/usr/share/java/{jdbc_jar_name} org.apache.ambari.server.DBConnectionVerification '{hive_jdbc_connection_url}' {hive_metastore_user_name} {hive_metastore_user_passwd!p} {hive_jdbc_driver}") + + Execute(db_connection_check_command, + path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin', tries=5, try_sleep=10) + + # AMBARI-5800 - wait for the server to come up instead of just the PID existance + if name == 'hiveserver2': + SOCKET_WAIT_SECONDS = 120 + address=params.hive_server_host + port=int(params.hive_server_port) + + start_time = time.time() + end_time = start_time + SOCKET_WAIT_SECONDS + + s = socket.socket() + s.settimeout(5) + + is_service_socket_valid = False + print "Waiting for the Hive server to start..." + try: + while time.time() < end_time: + try: + s.connect((address, port)) + is_service_socket_valid = True + break + except socket.error, e: + time.sleep(5) + finally: + s.close() + + elapsed_time = time.time() - start_time + + if is_service_socket_valid == False: + raise Fail("Connection to Hive server %s on port %s failed after %d seconds" % (address, port, elapsed_time)) + + print "Successfully connected to Hive at %s on port %s after %d seconds" % (address, port, elapsed_time) + + elif action == 'stop': + demon_cmd = format("kill `cat {pid_file}` >/dev/null 2>&1 && rm -f {pid_file}") + Execute(demon_cmd, + not_if = format("! ({process_id_exists})") + ) + +def check_fs_root(): + import params + fs_root_url = format("{fs_root}{hive_apps_whs_dir}") + cmd = "/usr/lib/hive/bin/metatool -listFSRoot 2>/dev/null | grep hdfs://" + code, out = call(cmd, user=params.hive_user) + if code == 0 and fs_root_url.strip() != out.strip(): + cmd = format("/usr/lib/hive/bin/metatool -updateLocation {fs_root}{hive_apps_whs_dir} {out}") + Execute(cmd, user=params.hive_user) \ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/83efcfea/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/install_jars.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/install_jars.py b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/install_jars.py new file mode 100644 index 0000000..b6d542d --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/install_jars.py @@ -0,0 +1,99 @@ +#!/usr/bin/env python +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" + +from resource_management import * +import os +import fnmatch + +def install_tez_jars(): + import params + + destination_hdfs_dirs = get_tez_hdfs_dir_paths(params.tez_lib_uris) + + # If tez libraries are to be stored in hdfs + if destination_hdfs_dirs: + for hdfs_dir in destination_hdfs_dirs: + params.HdfsDirectory(hdfs_dir, + action="create_delayed", + owner=params.tez_user, + mode=0755 + ) + pass + params.HdfsDirectory(None, action="create") + + if params.security_enabled: + kinit_if_needed = format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_principal_name};") + else: + kinit_if_needed = "" + + if kinit_if_needed: + Execute(kinit_if_needed, + user=params.tez_user, + path='/bin' + ) + pass + + app_dir_path = None + lib_dir_path = None + + if len(destination_hdfs_dirs) > 1: + for path in destination_hdfs_dirs: + if 'lib' in path: + lib_dir_path = path + else: + app_dir_path = path + pass + pass + pass + + if app_dir_path: + CopyFromLocal(params.tez_local_api_jars, + mode=0755, + owner=params.tez_user, + dest_dir=app_dir_path, + kinnit_if_needed=kinit_if_needed, + hdfs_user=params.hdfs_user + ) + pass + + if lib_dir_path: + CopyFromLocal(params.tez_local_lib_jars, + mode=0755, + owner=params.tez_user, + dest_dir=lib_dir_path, + kinnit_if_needed=kinit_if_needed, + hdfs_user=params.hdfs_user + ) + pass + + +def get_tez_hdfs_dir_paths(tez_lib_uris = None): + hdfs_path_prefix = 'hdfs://' + lib_dir_paths = [] + if tez_lib_uris and tez_lib_uris.strip().find(hdfs_path_prefix, 0) != -1: + dir_paths = tez_lib_uris.split(',') + for path in dir_paths: + lib_dir_path = path.replace(hdfs_path_prefix, '') + lib_dir_path = lib_dir_path if lib_dir_path.endswith(os.sep) else lib_dir_path + os.sep + lib_dir_paths.append(lib_dir_path) + pass + pass + + return lib_dir_paths \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/83efcfea/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/mysql_server.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/mysql_server.py b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/mysql_server.py new file mode 100644 index 0000000..6df6059 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/mysql_server.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" + +import sys +from resource_management import * + +from mysql_service import mysql_service + +class MysqlServer(Script): + + def install(self, env): + import params + + self.install_packages(env, exclude_packages=params.hive_exclude_packages) + self.configure(env) + + def configure(self, env): + import params + env.set_params(params) + + mysql_service(daemon_name=params.daemon_name, action='start') + + File(params.mysql_adduser_path, + mode=0755, + content=StaticFile('addMysqlUser.sh') + ) + + cmd = format("bash -x {mysql_adduser_path} {daemon_name} {hive_metastore_user_name} {hive_metastore_user_passwd!p} {mysql_host[0]}") + + Execute(cmd, + tries=3, + try_sleep=5, + path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin' + ) + + def start(self, env): + import params + env.set_params(params) + + mysql_service(daemon_name=params.daemon_name, action = 'start') + + def stop(self, env): + import params + env.set_params(params) + + mysql_service(daemon_name=params.daemon_name, action = 'stop') + + def status(self, env): + import status_params + mysql_service(daemon_name=status_params.daemon_name, action = 'status') + +if __name__ == "__main__": + MysqlServer().execute() http://git-wip-us.apache.org/repos/asf/ambari/blob/83efcfea/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/mysql_service.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/mysql_service.py b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/mysql_service.py new file mode 100644 index 0000000..11bbdd8 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/mysql_service.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" + +from resource_management import * + + +def mysql_service(daemon_name=None, action='start'): + status_cmd = format('service {daemon_name} status | grep running') + cmd = format('service {daemon_name} {action}') + + if action == 'status': + Execute(status_cmd) + elif action == 'stop': + Execute(cmd, + logoutput = True, + only_if = status_cmd + ) + elif action == 'start': + # required for running hive + replace_bind_address = format("sed -i 's|^bind-address[ \t]*=.*|bind-address = 0.0.0.0|' {mysql_configname}") + Execute(replace_bind_address) + + Execute(cmd, + logoutput = True, + not_if = status_cmd + ) + + + http://git-wip-us.apache.org/repos/asf/ambari/blob/83efcfea/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/params.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/params.py b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/params.py new file mode 100644 index 0000000..9269666 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/params.py @@ -0,0 +1,215 @@ +#!/usr/bin/env python +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" + +from resource_management import * +import status_params +import os + +# server configurations +config = Script.get_config() +tmp_dir = Script.get_tmp_dir() + +hive_metastore_user_name = config['configurations']['hive-site']['javax.jdo.option.ConnectionUserName'] +hive_jdbc_connection_url = config['configurations']['hive-site']['javax.jdo.option.ConnectionURL'] + +hive_metastore_user_passwd = config['configurations']['hive-site']['javax.jdo.option.ConnectionPassword'] +hive_metastore_db_type = config['configurations']['hive-env']['hive_database_type'] + +#users +hive_user = config['configurations']['hive-env']['hive_user'] +hive_lib = '/usr/lib/hive/lib/' +#JDBC driver jar name +hive_jdbc_driver = config['configurations']['hive-site']['javax.jdo.option.ConnectionDriverName'] +if hive_jdbc_driver == "com.mysql.jdbc.Driver": + jdbc_jar_name = "mysql-connector-java.jar" + jdbc_symlink_name = "mysql-jdbc-driver.jar" +elif hive_jdbc_driver == "org.postgresql.Driver": + jdbc_jar_name = "postgresql-jdbc.jar" + jdbc_symlink_name = "postgres-jdbc-driver.jar" +elif hive_jdbc_driver == "oracle.jdbc.driver.OracleDriver": + jdbc_jar_name = "ojdbc6.jar" + jdbc_symlink_name = "oracle-jdbc-driver.jar" + +check_db_connection_jar_name = "DBConnectionVerification.jar" +check_db_connection_jar = format("/usr/lib/ambari-agent/{check_db_connection_jar_name}") + +#common +hdp_stack_version = config['hostLevelParams']['stack_version'] +hive_metastore_port = get_port_from_url(config['configurations']['hive-site']['hive.metastore.uris']) #"9083" +hive_var_lib = '/var/lib/hive' +ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0] +hive_bin = '/usr/lib/hive/bin' +hive_server_host = config['clusterHostInfo']['hive_server_host'][0] +hive_server_port = default('/configurations/hive-site/hive.server2.thrift.port',"10000") +hive_url = format("jdbc:hive2://{hive_server_host}:{hive_server_port}") + +smokeuser = config['configurations']['cluster-env']['smokeuser'] +smoke_test_sql = format("{tmp_dir}/hiveserver2.sql") +smoke_test_path = format("{tmp_dir}/hiveserver2Smoke.sh") +smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab'] + +fs_root = config['configurations']['core-site']['fs.defaultFS'] +security_enabled = config['configurations']['cluster-env']['security_enabled'] + +kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"]) +hive_metastore_keytab_path = config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file'] + +#hive_env +hive_dbroot = config['configurations']['hive-env']['hive_dbroot'] +hive_log_dir = config['configurations']['hive-env']['hive_log_dir'] +hive_pid_dir = status_params.hive_pid_dir +hive_pid = status_params.hive_pid +#Default conf dir for client +hive_client_conf_dir = "/etc/hive/conf" +hive_server_conf_dir = "/etc/hive/conf" +hive_conf_dirs_list = [hive_server_conf_dir, hive_client_conf_dir] + +if 'role' in config and config['role'] in ["HIVE_SERVER", "HIVE_METASTORE"]: + hive_config_dir = hive_server_conf_dir +else: + hive_config_dir = hive_client_conf_dir + +#hive-site +hive_database_name = config['configurations']['hive-env']['hive_database_name'] + +#Starting hiveserver2 +start_hiveserver2_script = 'startHiveserver2.sh.j2' + +hadoop_home = '/usr/lib/hadoop' + +##Starting metastore +start_metastore_script = 'startMetastore.sh' +hive_metastore_pid = status_params.hive_metastore_pid +java_share_dir = '/usr/share/java' +driver_curl_target = format("{java_share_dir}/{jdbc_jar_name}") + +hdfs_user = config['configurations']['hadoop-env']['hdfs_user'] +user_group = config['configurations']['cluster-env']['user_group'] +artifact_dir = format("{tmp_dir}/AMBARI-artifacts/") + +target = format("{hive_lib}/{jdbc_jar_name}") + +jdk_location = config['hostLevelParams']['jdk_location'] +driver_curl_source = format("{jdk_location}/{jdbc_symlink_name}") + +start_hiveserver2_path = format("{tmp_dir}/start_hiveserver2_script") +start_metastore_path = format("{tmp_dir}/start_metastore_script") + +hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize'] +hive_heapsize = config['configurations']['hive-site']['hive.heapsize'] +java64_home = config['hostLevelParams']['java_home'] + +##### MYSQL + +db_name = config['configurations']['hive-env']['hive_database_name'] +mysql_user = "mysql" +mysql_group = 'mysql' +mysql_host = config['clusterHostInfo']['hive_mysql_host'] + +mysql_adduser_path = format("{tmp_dir}/addMysqlUser.sh") + +##### POSTGRES +postgresql_adduser_file = "addPostgreSQLUser.sh" +postgresql_adduser_path = format("{tmp_dir}/{postgresql_adduser_file}") +postgresql_host = config['clusterHostInfo']['hive_postgresql_host'] +postgresql_pghba_conf_path = "/var/lib/pgsql/data/pg_hba.conf" +postgresql_conf_path = "/var/lib/pgsql/data/postgresql.conf" +postgresql_daemon_name = status_params.postgresql_daemon_name + +######## Metastore Schema +init_metastore_schema = True + +########## HCAT +hcat_conf_dir = '/etc/hive-hcatalog/conf' +hcat_lib = '/usr/lib/hive-hcatalog/share/hcatalog' + +hcat_dbroot = hcat_lib + +hcat_user = config['configurations']['hive-env']['hcat_user'] +webhcat_user = config['configurations']['hive-env']['webhcat_user'] + +hcat_pid_dir = status_params.hcat_pid_dir +hcat_log_dir = config['configurations']['hive-env']['hcat_log_dir'] + +hadoop_conf_dir = '/etc/hadoop/conf' + +#hive-log4j.properties.template +if (('hive-log4j' in config['configurations']) and ('content' in config['configurations']['hive-log4j'])): + log4j_props = config['configurations']['hive-log4j']['content'] +else: + log4j_props = None + +#hive-exec-log4j.properties.template +if (('hive-exec-log4j' in config['configurations']) and ('content' in config['configurations']['hive-exec-log4j'])): + log4j_exec_props = config['configurations']['hive-exec-log4j']['content'] +else: + log4j_exec_props = None + +daemon_name = status_params.daemon_name +hive_env_sh_template = config['configurations']['hive-env']['content'] + +hive_hdfs_user_dir = format("/user/{hive_user}") +hive_hdfs_user_mode = 0700 +hive_apps_whs_dir = config['configurations']['hive-site']["hive.metastore.warehouse.dir"] +#for create_hdfs_directory +hostname = config["hostname"] +hadoop_conf_dir = "/etc/hadoop/conf" +hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab'] +hdfs_user = config['configurations']['hadoop-env']['hdfs_user'] +hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name'] +kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"]) + +# Tez libraries +tez_lib_uris = default("/configurations/tez-site/tez.lib.uris", None) +tez_local_api_jars = '/usr/lib/tez/tez*.jar' +tez_local_lib_jars = '/usr/lib/tez/lib/*.jar' +tez_user = config['configurations']['tez-env']['tez_user'] + +if System.get_instance().os_family == "ubuntu": + mysql_configname = '/etc/mysql/my.cnf' +else: + mysql_configname = '/etc/my.cnf' + + +# Hive security +hive_authorization_enabled = config['configurations']['hive-site']['hive.security.authorization.enabled'] + +mysql_jdbc_driver_jar = "/usr/share/java/mysql-connector-java.jar" + +# There are other packages that contain /usr/share/java/mysql-connector-java.jar (like libmysql-java), +# trying to install mysql-connector-java upon them can cause packages to conflict. +if os.path.exists(mysql_jdbc_driver_jar): + hive_exclude_packages = ['mysql-connector-java'] +else: + hive_exclude_packages = [] + +import functools +#create partial functions with common arguments for every HdfsDirectory call +#to create hdfs directory we need to call params.HdfsDirectory in code +#create partial functions with common arguments for every HdfsDirectory call +#to create hdfs directory we need to call params.HdfsDirectory in code +HdfsDirectory = functools.partial( + HdfsDirectory, + conf_dir=hadoop_conf_dir, + hdfs_user=hdfs_user, + security_enabled = security_enabled, + keytab = hdfs_user_keytab, + kinit_path_local = kinit_path_local +) http://git-wip-us.apache.org/repos/asf/ambari/blob/83efcfea/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/postgresql_server.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/postgresql_server.py b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/postgresql_server.py new file mode 100644 index 0000000..a1cd13f --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/postgresql_server.py @@ -0,0 +1,109 @@ +#!/usr/bin/env python +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" + +import sys +from resource_management import * + +from postgresql_service import postgresql_service + +class PostgreSQLServer(Script): + + def install(self, env): + self.install_packages(env) + self.configure(env) + + def configure(self, env): + import params + env.set_params(params) + + # init the database, the ':' makes the command always return 0 in case the database has + # already been initialized when the postgresql server colocates with ambari server + Execute(format("service {postgresql_daemon_name} initdb || :")) + + # update the configuration files + self.update_pghda_conf(env) + self.update_postgresql_conf(env) + + # restart the postgresql server for the changes to take effect + self.stop(env) + self.start(env) + + # create the database and hive_metastore_user + File(params.postgresql_adduser_path, + mode=0755, + content=StaticFile(format("{postgresql_adduser_file}")) + ) + + cmd = format("bash -x {postgresql_adduser_path} {postgresql_daemon_name} {hive_metastore_user_name} {hive_metastore_user_passwd!p} {db_name}") + + Execute(cmd, + tries=3, + try_sleep=5, + path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin' + ) + + def start(self, env): + import params + env.set_params(params) + + postgresql_service(postgresql_daemon_name=params.postgresql_daemon_name, action = 'start') + + def stop(self, env): + import params + env.set_params(params) + + postgresql_service(postgresql_daemon_name=params.postgresql_daemon_name, action = 'stop') + + def status(self, env): + import status_params + postgresql_service(postgresql_daemon_name=status_params.postgresql_daemon_name, action = 'status') + + def update_postgresql_conf(self, env): + import params + env.set_params(params) + + # change the listen_address to * + Execute(format("sed -i '/^[[:space:]]*listen_addresses[[:space:]]*=.*/d' {postgresql_conf_path}")) + Execute(format("echo \"listen_addresses = '*'\" | tee -a {postgresql_conf_path}")) + + # change the standard_conforming_string to off + Execute(format("sed -i '/^[[:space:]]*standard_conforming_strings[[:space:]]*=.*/d' {postgresql_conf_path}")) + Execute(format("echo \"standard_conforming_strings = off\" | tee -a {postgresql_conf_path}")) + + def update_pghda_conf(self, env): + import params + env.set_params(params) + + # trust hive_metastore_user and postgres locally + Execute(format("sed -i '/^[[:space:]]*local[[:space:]]*all[[:space:]]*all.*$/s/^/#/' {postgresql_pghba_conf_path}")) + Execute(format("sed -i '/^[[:space:]]*local[[:space:]]*all[[:space:]]*postgres.*$/d' {postgresql_pghba_conf_path}")) + Execute(format("sed -i '/^[[:space:]]*local[[:space:]]*all[[:space:]]*\"{hive_metastore_user_name}\".*$/d' {postgresql_pghba_conf_path}")) + Execute(format("echo \"local all postgres trust\" | tee -a {postgresql_pghba_conf_path}")) + Execute(format("echo \"local all \\\"{hive_metastore_user_name}\\\" trust\" | tee -a {postgresql_pghba_conf_path}")) + + # trust hive_metastore_user and postgres via local interface + Execute(format("sed -i '/^[[:space:]]*host[[:space:]]*all[[:space:]]*all.*$/s/^/#/' {postgresql_pghba_conf_path}")) + Execute(format("sed -i '/^[[:space:]]*host[[:space:]]*all[[:space:]]*postgres.*$/d' {postgresql_pghba_conf_path}")) + Execute(format("sed -i '/^[[:space:]]*host[[:space:]]*all[[:space:]]*\"{hive_metastore_user_name}\".*$/d' {postgresql_pghba_conf_path}")) + Execute(format("echo \"host all postgres 0.0.0.0/0 trust\" | tee -a {postgresql_pghba_conf_path}")) + Execute(format("echo \"host all \\\"{hive_metastore_user_name}\\\" 0.0.0.0/0 trust\" | tee -a {postgresql_pghba_conf_path}")) + +if __name__ == "__main__": + PostgreSQLServer().execute() http://git-wip-us.apache.org/repos/asf/ambari/blob/83efcfea/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/postgresql_service.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/postgresql_service.py b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/postgresql_service.py new file mode 100644 index 0000000..6443e05 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/postgresql_service.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" + +from resource_management import * + + +def postgresql_service(postgresql_daemon_name=None, action='start'): + status_cmd = format('service {postgresql_daemon_name} status | grep running') + cmd = format('service {postgresql_daemon_name} {action}') + + if action == 'status': + Execute(status_cmd) + elif action == 'stop': + Execute(cmd, + logoutput = True, + only_if = status_cmd + ) + elif action == 'start': + Execute(cmd, + logoutput = True, + not_if = status_cmd + ) http://git-wip-us.apache.org/repos/asf/ambari/blob/83efcfea/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/service_check.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/service_check.py b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/service_check.py new file mode 100644 index 0000000..09ba1bf --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/service_check.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" + +from resource_management import * +import socket +import sys + +from hcat_service_check import hcat_service_check + +class HiveServiceCheck(Script): + def service_check(self, env): + import params + env.set_params(params) + + address=format("{hive_server_host}") + port=int(format("{hive_server_port}")) + s = socket.socket() + print "Test connectivity to hive server" + try: + s.connect((address, port)) + print "Successfully connected to %s on port %s" % (address, port) + s.close() + except socket.error, e: + print "Connection to %s on port %s failed: %s" % (address, port, e) + sys.exit(1) + + hcat_service_check() + +if __name__ == "__main__": + HiveServiceCheck().execute() http://git-wip-us.apache.org/repos/asf/ambari/blob/83efcfea/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/status_params.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/status_params.py b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/status_params.py new file mode 100644 index 0000000..a90fd15 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/status_params.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" + +from resource_management import * + +config = Script.get_config() + +hive_pid_dir = config['configurations']['hive-env']['hive_pid_dir'] +hive_pid = 'hive-server.pid' + +hive_metastore_pid = 'hive.pid' + +hcat_pid_dir = config['configurations']['hive-env']['hcat_pid_dir'] #hcat_pid_dir + +if System.get_instance().os_family == "suse" or System.get_instance().os_family == "ubuntu": + daemon_name = 'mysql' +else: + daemon_name = 'mysqld' + +postgresql_daemon_name = "postgresql" http://git-wip-us.apache.org/repos/asf/ambari/blob/83efcfea/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/templates/hcat-env.sh.j2 ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/templates/hcat-env.sh.j2 b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/templates/hcat-env.sh.j2 new file mode 100644 index 0000000..0b9dcc3 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/templates/hcat-env.sh.j2 @@ -0,0 +1,43 @@ +{# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#} + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +JAVA_HOME={{java64_home}} +HCAT_PID_DIR={{hcat_pid_dir}}/ +HCAT_LOG_DIR={{hcat_log_dir}}/ +HCAT_CONF_DIR={{hcat_conf_dir}} +HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}} +#DBROOT is the path where the connector jars are downloaded +DBROOT={{hcat_dbroot}} +USER={{hcat_user}} +METASTORE_PORT={{hive_metastore_port}} http://git-wip-us.apache.org/repos/asf/ambari/blob/83efcfea/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/templates/startHiveserver2.sh.j2 ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/templates/startHiveserver2.sh.j2 b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/templates/startHiveserver2.sh.j2 new file mode 100644 index 0000000..a8fe21c --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/templates/startHiveserver2.sh.j2 @@ -0,0 +1,29 @@ +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +HIVE_SERVER2_OPTS=" -hiveconf hive.log.file=hiveserver2.log -hiveconf hive.log.dir=$5" +{% if hive_authorization_enabled == True and str(hdp_stack_version).startswith('2.1') %} +# HiveServer 2 -hiveconf options +HIVE_SERVER2_OPTS="${HIVE_SERVER2_OPTS} -hiveconf hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateUserAuthenticator -hiveconf hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory " +{% endif %} + +HIVE_CONF_DIR=$4 /usr/lib/hive/bin/hiveserver2 -hiveconf hive.metastore.uris=" " ${HIVE_SERVER2_OPTS} > $1 2> $2 & +echo $!|cat>$3 http://git-wip-us.apache.org/repos/asf/ambari/blob/83efcfea/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/configuration/nagios-env.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/configuration/nagios-env.xml b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/configuration/nagios-env.xml new file mode 100644 index 0000000..fad8374 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/configuration/nagios-env.xml @@ -0,0 +1,53 @@ +<?xml version="1.0"?> +<?xml-stylesheet type="text/xsl" href="configuration.xsl"?> +<!-- +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +--> + +<configuration> + <property> + <name>nagios_user</name> + <value>nagios</value> + <property-type>USER</property-type> + <description>Nagios Username.</description> + </property> + <property> + <name>nagios_group</name> + <value>nagios</value> + <property-type>GROUP</property-type> + <description>Nagios Group.</description> + </property> + <property> + <name>nagios_web_login</name> + <value>nagiosadmin</value> + <description>Nagios web user.</description> + </property> + <property require-input = "true"> + <name>nagios_web_password</name> + <value></value> + <property-type>PASSWORD</property-type> + <description>Nagios Admin Password.</description> + </property> + <property require-input = "true"> + <name>nagios_contact</name> + <value></value> + <description>Hadoop Admin Email.</description> + </property> + +</configuration> http://git-wip-us.apache.org/repos/asf/ambari/blob/83efcfea/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/metainfo.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/metainfo.xml b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/metainfo.xml new file mode 100644 index 0000000..bebc7d6 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/metainfo.xml @@ -0,0 +1,160 @@ +<?xml version="1.0"?> +<!-- + Licensed to the Apache Software Foundation (ASF) under one or more + contributor license agreements. See the NOTICE file distributed with + this work for additional information regarding copyright ownership. + The ASF licenses this file to You under the Apache License, Version 2.0 + (the "License"); you may not use this file except in compliance with + the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +--> +<metainfo> + <schemaVersion>2.0</schemaVersion> + <services> + <service> + <name>NAGIOS</name> + <displayName>Nagios</displayName> + <comment>Nagios Monitoring and Alerting system</comment> + <version>3.5.0</version> + <components> + <component> + <name>NAGIOS_SERVER</name> + <displayName>Nagios Server</displayName> + <category>MASTER</category> + <cardinality>1</cardinality> + <dependencies> + <dependency> + <name>HDFS/HDFS_CLIENT</name> + <scope>host</scope> + <auto-deploy> + <enabled>true</enabled> + </auto-deploy> + </dependency> + <dependency> + <name>MAPREDUCE2/MAPREDUCE2_CLIENT</name> + <scope>host</scope> + <auto-deploy> + <enabled>true</enabled> + </auto-deploy> + </dependency> + <dependency> + <name>OOZIE/OOZIE_CLIENT</name> + <scope>host</scope> + <auto-deploy> + <enabled>true</enabled> + </auto-deploy> + </dependency> + <dependency> + <name>YARN/YARN_CLIENT</name> + <scope>host</scope> + <auto-deploy> + <enabled>true</enabled> + </auto-deploy> + </dependency> + <dependency> + <name>HCATALOG/HCAT</name> + <scope>host</scope> + <auto-deploy> + <enabled>true</enabled> + </auto-deploy> + </dependency> + </dependencies> + <commandScript> + <script>scripts/nagios_server.py</script> + <scriptType>PYTHON</scriptType> + <timeout>600</timeout> + </commandScript> + </component> + </components> + <osSpecifics> + <osSpecific> + <osFamily>any</osFamily> + <packages> + <package> + <name>perl</name> + </package> + <package> + <name>fping</name> + </package> + </packages> + </osSpecific> + <osSpecific> + <osFamily>ubuntu12</osFamily> + <packages> + <package> + <name>nagios3</name> + </package> + <package> + <name>nagios3-common</name> + </package> + <package> + <name>nagios3-dbg</name> + </package> + <package> + <name>nagios3-doc</name> + </package> + <package> + <name>nagios-plugins-extra</name> + </package> + <package> + <name>php5-curl</name> + </package> + <package> + <name>libapache2-mod-php5</name> + </package> + </packages> + </osSpecific> + <osSpecific> + <osFamily>redhat5,redhat6,suse11</osFamily> + <packages> + <package> + <name>nagios-plugins-1.4.9</name> + </package> + <package> + <name>nagios-3.5.0-99</name> + </package> + <package> + <name>nagios-www-3.5.0-99</name> + </package> + <package> + <name>nagios-devel-3.5.0-99</name> + </package> + </packages> + </osSpecific> + <osSpecific> + <osFamily>suse11</osFamily> + <packages> + <package> + <name>php5*-json</name> + </package> + <package> + <name>apache2?mod_php*</name> + </package> + <package> + <name>php-curl</name> + </package> + </packages> + </osSpecific> + <osSpecific> + <osFamily>redhat5</osFamily> + <packages> + <package> + <name>php-pecl-json.x86_64</name> + </package> + </packages> + </osSpecific> + </osSpecifics> + <configuration-dependencies> + <config-type>nagios-env</config-type> + </configuration-dependencies> + <monitoringService>true</monitoringService> + </service> + </services> +</metainfo> http://git-wip-us.apache.org/repos/asf/ambari/blob/83efcfea/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/package/files/check_aggregate.php ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/package/files/check_aggregate.php b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/package/files/check_aggregate.php new file mode 100644 index 0000000..792b25b --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/package/files/check_aggregate.php @@ -0,0 +1,248 @@ +<?php +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +define("PASSIVE_MODE_STR", "AMBARIPASSIVE="); + + $options = getopt ("f:s:n:w:c:t:"); + if (!array_key_exists('t', $options) || !array_key_exists('f', $options) || !array_key_exists('w', $options) + || !array_key_exists('c', $options) || !array_key_exists('s', $options)) { + usage(); + exit(3); + } + $status_file=$options['f']; + $status_code=$options['s']; + $type=$options['t']; + $warn=$options['w']; $warn = preg_replace('/%$/', '', $warn); + $crit=$options['c']; $crit = preg_replace('/%$/', '', $crit); + if ($type == "service" && !array_key_exists('n', $options)) { + echo "Service description not provided -n option\n"; + exit(3); + } + if ($type == "service") { + $service_name=$options['n']; + /* echo "DESC: " . $service_name . "\n"; */ + } + + $result = array(); + $status_file_content = file_get_contents($status_file); + + $counts; + if ($type == "service") { + $counts=query_alert_count($status_file_content, $service_name, $status_code); + } else { + $counts=query_host_count($status_file_content, $status_code); + } + + if ($counts['total'] == 0) { + $percent = 0; + } else { + $percent = ($counts['actual']/$counts['total'])*100; + } + if ($percent >= $crit) { + echo "CRITICAL: total:<" . $counts['total'] . ">, affected:<" . $counts['actual'] . ">\n"; + exit (2); + } + if ($percent >= $warn) { + echo "WARNING: total:<" . $counts['total'] . ">, affected:<" . $counts['actual'] . ">\n"; + exit (1); + } + echo "OK: total:<" . $counts['total'] . ">, affected:<" . $counts['actual'] . ">\n"; + exit(0); + + + # Functions + /* print usage */ + function usage () { + echo "Usage: $0 -f <status_file_path> -t type(host/service) -s <status_codes> -n <service description> -w <warn%> -c <crit%>\n"; + } + + /* Query host count */ + function query_host_count ($status_file_content, $status_code) { + $num_matches = preg_match_all("/hoststatus \{([\S\s]*?)\}/", $status_file_content, $matches, PREG_PATTERN_ORDER); + $hostcounts_object = array (); + $total_hosts = 0; + $hosts = 0; + foreach ($matches[0] as $object) { + $total_hosts++; + if (getParameter($object, "current_state") == $status_code) { + $hosts++; + } + } + $hostcounts_object['total'] = $total_hosts; + $hostcounts_object['actual'] = $hosts; + return $hostcounts_object; + } + + /* Query Alert counts */ + function query_alert_count ($status_file_content, $service_name, $status_code) { + $num_matches = preg_match_all("/servicestatus \{([\S\s]*?)\}/", $status_file_content, $matches, PREG_PATTERN_ORDER); + $alertcounts_objects = array (); + $total_alerts=0; + $alerts=0; + foreach ($matches[0] as $object) { + $long_out = getParameter($object, "long_plugin_output"); + $skip_if_match=!strncmp($long_out, PASSIVE_MODE_STR, strlen(PASSIVE_MODE_STR)); + + if (getParameter($object, "service_description") == $service_name && !$skip_if_match) { + $total_alerts++; + if (getParameter($object, "current_state") >= $status_code) { + $alerts++; + } + } + } + $alertcounts_objects['total'] = $total_alerts; + $alertcounts_objects['actual'] = $alerts; + return $alertcounts_objects; + } + + function get_service_type($service_description) + { + $pieces = explode("::", $service_description); + switch ($pieces[0]) { + case "NAMENODE": + $pieces[0] = "HDFS"; + break; + case "JOBTRACKER": + $pieces[0] = "MAPREDUCE"; + break; + case "HBASEMASTER": + $pieces[0] = "HBASE"; + break; + case "SYSTEM": + case "HDFS": + case "MAPREDUCE": + case "HBASE": + case "STORM": + break; + default: + $pieces[0] = "UNKNOWN"; + } + return $pieces[0]; + } + + function getParameter($object, $key) + { + $pattern="/\s" . $key . "[\s= ]*([\S, ]*)\n/"; + $num_mat = preg_match($pattern, $object, $matches); + $value = ""; + if ($num_mat) { + $value = $matches[1]; + } + return $value; + } + +function indent($json) { + + $result = ''; + $pos = 0; + $strLen = strlen($json); + $indentStr = ' '; + $newLine = "\n"; + $prevChar = ''; + $outOfQuotes = true; + + for ($i=0; $i<=$strLen; $i++) { + + // Grab the next character in the string. + $char = substr($json, $i, 1); + + // Are we inside a quoted string? + if ($char == '"' && $prevChar != '\\') { + $outOfQuotes = !$outOfQuotes; + + // If this character is the end of an element, + // output a new line and indent the next line. + } else if(($char == '}' || $char == ']') && $outOfQuotes) { + $result .= $newLine; + $pos --; + for ($j=0; $j<$pos; $j++) { + $result .= $indentStr; + } + } + + // Add the character to the result string. + $result .= $char; + + // If the last character was the beginning of an element, + // output a new line and indent the next line. + if (($char == ',' || $char == '{' || $char == '[') && $outOfQuotes) { + $result .= $newLine; + if ($char == '{' || $char == '[') { + $pos ++; + } + + for ($j = 0; $j < $pos; $j++) { + $result .= $indentStr; + } + } + + $prevChar = $char; + } + + return $result; +} + +/* JSON documment format */ +/* +{ + "programstatus":{ + "last_command_check":"1327385743" + }, + "hostcounts":{ + "up_nodes":"", + "down_nodes":"" + }, + "hoststatus":[ + { + "host_name"="ip-10-242-191-48.ec2.internal", + "current_state":"0", + "last_hard_state":"0", + "plugin_output":"PING OK - Packet loss = 0%, RTA = 0.04 ms", + "last_check":"1327385564", + "current_attempt":"1", + "last_hard_state_change":"1327362079", + "last_time_up":"1327385574", + "last_time_down":"0", + "last_time_unreachable":"0", + "is_flapping":"0", + "last_check":"1327385574", + "servicestatus":[ + ] + } + ], + "servicestatus":[ + { + "service_type":"HDFS", {HBASE, MAPREDUCE, HIVE, ZOOKEEPER} + "service_description":"HDFS Current Load", + "host_name"="ip-10-242-191-48.ec2.internal", + "current_attempt":"1", + "current_state":"0", + "plugin_output":"PING OK - Packet loss = 0%, RTA = 0.04 ms", + "last_hard_state_change":"1327362079", + "last_time_ok":"1327385479", + "last_time_warning":"0", + "last_time_unknown":"0", + "last_time_critical":"0", + "last_check":"1327385574", + "is_flapping":"0" + } + ] +} +*/ + +?> http://git-wip-us.apache.org/repos/asf/ambari/blob/83efcfea/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/package/files/check_ambari_alerts.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/package/files/check_ambari_alerts.py b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/package/files/check_ambari_alerts.py new file mode 100644 index 0000000..833a798 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/package/files/check_ambari_alerts.py @@ -0,0 +1,80 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +import os +import optparse +import json +import traceback + +def main(): + + parser = optparse.OptionParser() + + parser.add_option("-H", "--host", dest="host", default="localhost", help="NameNode host") + parser.add_option("-n", "--name", dest="alert_name", help="Alert name to check") + parser.add_option("-f", "--file", dest="alert_file", help="File containing the alert structure") + + (options, args) = parser.parse_args() + + if options.alert_name is None: + print "Alert name is required (--name or -n)" + exit(-1) + + if options.alert_file is None: + print "Alert file is required (--file or -f)" + exit(-1) + + if not os.path.exists(options.alert_file): + print "Status is unreported" + exit(3) + + try: + with open(options.alert_file, 'r') as f: + data = json.load(f) + + buf_list = [] + exit_code = 0 + + for_hosts = data[options.alert_name] + if for_hosts.has_key(options.host): + for host_entry in for_hosts[options.host]: + buf_list.append(host_entry['text']) + alert_state = host_entry['state'] + if alert_state == 'CRITICAL' and exit_code < 2: + exit_code = 2 + elif alert_state == 'WARNING' and exit_code < 1: + exit_code = 1 + + if 0 == len(buf_list): + print "Status is not reported" + exit(3) + else: + print ", ".join(buf_list) + exit(exit_code) + + except Exception: + traceback.print_exc() + exit(3) + +if __name__ == "__main__": + main() + http://git-wip-us.apache.org/repos/asf/ambari/blob/83efcfea/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/package/files/check_checkpoint_time.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/package/files/check_checkpoint_time.py b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/package/files/check_checkpoint_time.py new file mode 100644 index 0000000..ab889d1 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/package/files/check_checkpoint_time.py @@ -0,0 +1,112 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +import os +import optparse +import time +import urllib2 +import json + +CRIT_MESSAGE = "CRITICAL: Last checkpoint time is below acceptable. Checkpoint was done {h}h. {m}m. ago" +WARNING_MESSAGE = "WARNING: Last checkpoint time is below acceptable. Checkpoint was done {h}h. {m}m. ago" +OK_MESSAGE = "OK: Last checkpoint time" +WARNING_JMX_MESSAGE = "WARNING: NameNode JMX not accessible" + +def main(): + + current_time = int(round(time.time() * 1000)) + + parser = optparse.OptionParser() + + parser.add_option("-H", "--host", dest="host", + default="localhost", help="NameNode host") + parser.add_option("-p", "--port", dest="port", + default="50070", help="NameNode jmx port") + parser.add_option("-w", "--warning", dest="warning", + default="200", help="Percent for warning alert") + parser.add_option("-c", "--critical", dest="crit", + default="200", help="Percent for critical alert") + parser.add_option("-t", "--period", dest="period", + default="21600", help="Period time") + parser.add_option("-x", "--txns", dest="txns", + default="1000000", + help="CheckpointNode will create a checkpoint of the namespace every 'dfs.namenode.checkpoint.txns'") + (options, args) = parser.parse_args() + + host = get_available_nn_host(options) + + last_checkpoint_time_qry = "http://{host}:{port}/jmx?qry=Hadoop:service=NameNode,name=FSNamesystem".\ + format(host=host, port=options.port) + last_checkpoint_time = int(get_value_from_jmx(last_checkpoint_time_qry,"LastCheckpointTime")) + + journal_transaction_info_qry = "http://{host}:{port}/jmx?qry=Hadoop:service=NameNode,name=NameNodeInfo".\ + format(host=host, port=options.port) + journal_transaction_info = get_value_from_jmx(journal_transaction_info_qry,"JournalTransactionInfo") + journal_transaction_info_dict = json.loads(journal_transaction_info) + + last_txid = int(journal_transaction_info_dict['LastAppliedOrWrittenTxId']) + most_txid = int(journal_transaction_info_dict['MostRecentCheckpointTxId']) + + delta = (current_time - last_checkpoint_time)/1000 + + if ((last_txid - most_txid) > int(options.txns)) and (float(delta) / int(options.period)*100 >= int(options.crit)): + print CRIT_MESSAGE.format(h=get_time(delta)['h'], m=get_time(delta)['m']) + exit(2) + elif ((last_txid - most_txid) > int(options.txns)) and (float(delta) / int(options.period)*100 >= int(options.warning)): + print WARNING_MESSAGE.format(h=get_time(delta)['h'], m=get_time(delta)['m']) + exit(1) + else: + print OK_MESSAGE + exit(0) + +def get_time(delta): + h = int(delta/3600) + m = int((delta % 3600)/60) + return {'h':h, 'm':m} + +def get_value_from_jmx(qry, property): + try: + response = urllib2.urlopen(qry) + data=response.read() + except Exception: + print WARNING_JMX_MESSAGE + exit(1) + + data_dict = json.loads(data) + return (data_dict["beans"][0][property]) + +def get_available_nn_host(options): + nn_hosts = options.host.split(" ") + for nn_host in nn_hosts: + try: + urllib2.urlopen("http://{host}:{port}/jmx".format(host=nn_host, port=options.port)) + return nn_host + except Exception: + pass + print WARNING_JMX_MESSAGE + exit(1) + +if __name__ == "__main__": + main() + + + http://git-wip-us.apache.org/repos/asf/ambari/blob/83efcfea/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/package/files/check_cpu.php ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/package/files/check_cpu.php b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/package/files/check_cpu.php new file mode 100644 index 0000000..0744e38 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/package/files/check_cpu.php @@ -0,0 +1,109 @@ +<?php +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + include "hdp_nagios_init.php"; + + $options = getopt ("h:p:w:c:k:r:t:u:e"); + if (!array_key_exists('h', $options) || !array_key_exists('p', $options) || !array_key_exists('w', $options) + || !array_key_exists('c', $options)) { + usage(); + exit(3); + } + + $hosts=$options['h']; + $port=$options['p']; + $warn=$options['w']; $warn = preg_replace('/%$/', '', $warn); + $crit=$options['c']; $crit = preg_replace('/%$/', '', $crit); + $keytab_path=$options['k']; + $principal_name=$options['r']; + $kinit_path_local=$options['t']; + $security_enabled=$options['u']; + $ssl_enabled=$options['e']; + + /* Kinit if security enabled */ + $status = kinit_if_needed($security_enabled, $kinit_path_local, $keytab_path, $principal_name); + $retcode = $status[0]; + $output = $status[1]; + + if ($output != 0) { + echo "CRITICAL: Error doing kinit for nagios. $output"; + exit (2); + } + + $protocol = ($ssl_enabled == "true" ? "https" : "http"); + + + foreach (preg_split('/,/', $hosts) as $host) { + /* Get the json document */ + + $ch = curl_init(); + $username = rtrim(`id -un`, "\n"); + curl_setopt_array($ch, array( CURLOPT_URL => $protocol."://".$host.":".$port."/jmx?qry=java.lang:type=OperatingSystem", + CURLOPT_RETURNTRANSFER => true, + CURLOPT_HTTPAUTH => CURLAUTH_ANY, + CURLOPT_USERPWD => "$username:", + CURLOPT_SSL_VERIFYPEER => FALSE )); + $json_string = curl_exec($ch); + $info = curl_getinfo($ch); + if (intval($info['http_code']) == 401){ + logout(); + $json_string = curl_exec($ch); + } + $info = curl_getinfo($ch); + curl_close($ch); + $json_array = json_decode($json_string, true); + + $object = $json_array['beans'][0]; + + if (count($object) == 0) { + echo "CRITICAL: Data inaccessible, Status code = ". $info['http_code'] ."\n"; + exit(2); + } + + $cpu_load = $object['SystemCpuLoad']; + + if (!isset($object['SystemCpuLoad']) || $cpu_load < 0.0) { + echo "WARNING: Data unavailable, SystemCpuLoad is not set\n"; + exit(1); + } + + $cpu_count = $object['AvailableProcessors']; + + $cpu_percent = $cpu_load*100; + } + + $out_msg = $cpu_count . " CPU, load " . number_format($cpu_percent, 1, '.', '') . '%'; + + if ($cpu_percent > $crit) { + echo $out_msg . ' > ' . $crit . "% : CRITICAL\n"; + exit(2); + } + if ($cpu_percent > $warn) { + echo $out_msg . ' > ' . $warn . "% : WARNING\n"; + exit(1); + } + + echo $out_msg . ' < ' . $warn . "% : OK\n"; + exit(0); + + /* print usage */ + function usage () { + echo "Usage: $0 -h <host> -p port -w <warn%> -c <crit%> -k keytab_path -r principal_name -t kinit_path -u security_enabled -e ssl_enabled\n"; + } +?> http://git-wip-us.apache.org/repos/asf/ambari/blob/83efcfea/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/package/files/check_cpu.pl ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/package/files/check_cpu.pl b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/package/files/check_cpu.pl new file mode 100644 index 0000000..a5680f7 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/package/files/check_cpu.pl @@ -0,0 +1,114 @@ +#!/usr/bin/perl -w +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +use strict; +use Net::SNMP; +use Getopt::Long; + +# Variable +my $base_proc = "1.3.6.1.2.1.25.3.3.1"; +my $proc_load = "1.3.6.1.2.1.25.3.3.1.2"; +my $o_host = undef; +my $o_community = undef; +my $o_warn= undef; +my $o_crit= undef; +my $o_timeout = 15; +my $o_port = 161; + +sub Usage { + print "Usage: $0 -H <host> -C <snmp_community> -w <warn level> -c <crit level>\n"; +} + +Getopt::Long::Configure ("bundling"); +GetOptions( + 'H:s' => \$o_host, + 'C:s' => \$o_community, + 'c:s' => \$o_crit, + 'w:s' => \$o_warn + ); +if (!defined $o_host || !defined $o_community || !defined $o_crit || !defined $o_warn) { + Usage(); + exit 3; +} +$o_warn =~ s/\%//g; +$o_crit =~ s/\%//g; +alarm ($o_timeout); +$SIG{'ALRM'} = sub { + print "Unable to contact host: $o_host\n"; + exit 3; +}; + +# Connect to host +my ($session,$error); +($session, $error) = Net::SNMP->session( + -hostname => $o_host, + -community => $o_community, + -port => $o_port, + -timeout => $o_timeout + ); +if (!defined($session)) { + printf("Error opening session: %s.\n", $error); + exit 3; +} + +my $exit_val=undef; +my $resultat = (Net::SNMP->VERSION < 4) ? + $session->get_table($base_proc) + : $session->get_table(Baseoid => $base_proc); + +if (!defined($resultat)) { + printf("ERROR: Description table : %s.\n", $session->error); + $session->close; + exit 3; +} + +$session->close; + +my ($cpu_used,$ncpu)=(0,0); +foreach my $key ( keys %$resultat) { + if ($key =~ /$proc_load/) { + $cpu_used += $$resultat{$key}; + $ncpu++; + } +} + +if ($ncpu==0) { + print "Can't find CPU usage information : UNKNOWN\n"; + exit 3; +} + +$cpu_used /= $ncpu; + +print "$ncpu CPU, ", $ncpu==1 ? "load" : "average load"; +printf(" %.1f%%",$cpu_used); +$exit_val=0; + +if ($cpu_used > $o_crit) { + print " > $o_crit% : CRITICAL\n"; + $exit_val=2; +} else { + if ($cpu_used > $o_warn) { + print " > $o_warn% : WARNING\n"; + $exit_val=1; + } +} +print " < $o_warn% : OK\n" if ($exit_val eq 0); +exit $exit_val; http://git-wip-us.apache.org/repos/asf/ambari/blob/83efcfea/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/package/files/check_cpu_ha.php ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/package/files/check_cpu_ha.php b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/package/files/check_cpu_ha.php new file mode 100644 index 0000000..91a7c64 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/package/files/check_cpu_ha.php @@ -0,0 +1,116 @@ +<?php +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + include "hdp_nagios_init.php"; + + $options = getopt ("h:p:w:c:k:r:t:u:e"); + if (!array_key_exists('h', $options) || !array_key_exists('p', $options) || !array_key_exists('w', $options) + || !array_key_exists('c', $options)) { + usage(); + exit(3); + } + + $hosts=$options['h']; + $port=$options['p']; + $warn=$options['w']; $warn = preg_replace('/%$/', '', $warn); + $crit=$options['c']; $crit = preg_replace('/%$/', '', $crit); + $keytab_path=$options['k']; + $principal_name=$options['r']; + $kinit_path_local=$options['t']; + $security_enabled=$options['u']; + $ssl_enabled=$options['e']; + + /* Kinit if security enabled */ + $status = kinit_if_needed($security_enabled, $kinit_path_local, $keytab_path, $principal_name); + $retcode = $status[0]; + $output = $status[1]; + + if ($output != 0) { + echo "CRITICAL: Error doing kinit for nagios. $output"; + exit (2); + } + + $protocol = ($ssl_enabled == "true" ? "https" : "http"); + + $jmx_response_available = false; + $jmx_response; + + foreach (preg_split('/,/', $hosts) as $host) { + /* Get the json document */ + + $ch = curl_init(); + $username = rtrim(`id -un`, "\n"); + curl_setopt_array($ch, array( CURLOPT_URL => $protocol."://".$host.":".$port."/jmx?qry=java.lang:type=OperatingSystem", + CURLOPT_RETURNTRANSFER => true, + CURLOPT_HTTPAUTH => CURLAUTH_ANY, + CURLOPT_USERPWD => "$username:", + CURLOPT_SSL_VERIFYPEER => FALSE )); + $json_string = curl_exec($ch); + $info = curl_getinfo($ch); + if (intval($info['http_code']) == 401){ + logout(); + $json_string = curl_exec($ch); + } + $info = curl_getinfo($ch); + curl_close($ch); + $json_array = json_decode($json_string, true); + + $object = $json_array['beans'][0]; + + if (count($object) > 0) { + $jmx_response_available = true; + $jmx_response = $object; + } + } + + if ($jmx_response_available === false) { + echo "CRITICAL: Data inaccessible, Status code = ". $info['http_code'] ."\n"; + exit(2); + } + + $cpu_load = $jmx_response['SystemCpuLoad']; + + if (!isset($jmx_response['SystemCpuLoad']) || $cpu_load < 0.0) { + echo "WARNING: Data unavailable, SystemCpuLoad is not set\n"; + exit(1); + } + + $cpu_count = $jmx_response['AvailableProcessors']; + + $cpu_percent = $cpu_load*100; + + $out_msg = $cpu_count . " CPU, load " . number_format($cpu_percent, 1, '.', '') . '%'; + + if ($cpu_percent > $crit) { + echo $out_msg . ' > ' . $crit . "% : CRITICAL\n"; + exit(2); + } + if ($cpu_percent > $warn) { + echo $out_msg . ' > ' . $warn . "% : WARNING\n"; + exit(1); + } + + echo $out_msg . ' < ' . $warn . "% : OK\n"; + exit(0); + + /* print usage */ + function usage () { + echo "Usage: $0 -h <host> -p port -w <warn%> -c <crit%> -k keytab_path -r principal_name -t kinit_path -u security_enabled -e ssl_enabled\n"; + } +?> http://git-wip-us.apache.org/repos/asf/ambari/blob/83efcfea/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/package/files/check_datanode_storage.php ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/package/files/check_datanode_storage.php b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/package/files/check_datanode_storage.php new file mode 100644 index 0000000..dee22b4 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/package/files/check_datanode_storage.php @@ -0,0 +1,100 @@ +<?php +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* This plugin makes call to master node, get the jmx-json document + * check the storage capacity remaining on local datanode storage + */ + + include "hdp_nagios_init.php"; + + $options = getopt ("h:p:w:c:e:k:r:t:s:"); + if (!array_key_exists('h', $options) || !array_key_exists('p', $options) || !array_key_exists('w', $options) + || !array_key_exists('c', $options)) { + usage(); + exit(3); + } + + $host=$options['h']; + $port=$options['p']; + $warn=$options['w']; $warn = preg_replace('/%$/', '', $warn); + $crit=$options['c']; $crit = preg_replace('/%$/', '', $crit); + $keytab_path=$options['k']; + $principal_name=$options['r']; + $kinit_path_local=$options['t']; + $security_enabled=$options['s']; + $ssl_enabled=$options['e']; + + /* Kinit if security enabled */ + $status = kinit_if_needed($security_enabled, $kinit_path_local, $keytab_path, $principal_name); + $retcode = $status[0]; + $output = $status[1]; + + if ($output != 0) { + echo "CRITICAL: Error doing kinit for nagios. $output"; + exit (2); + } + + $protocol = ($ssl_enabled == "true" ? "https" : "http"); + + /* Get the json document */ + $ch = curl_init(); + $username = rtrim(`id -un`, "\n"); + curl_setopt_array($ch, array( CURLOPT_URL => $protocol."://".$host.":".$port."/jmx?qry=Hadoop:service=DataNode,name=FSDatasetState-*", + CURLOPT_RETURNTRANSFER => true, + CURLOPT_HTTPAUTH => CURLAUTH_ANY, + CURLOPT_USERPWD => "$username:", + CURLOPT_SSL_VERIFYPEER => FALSE )); + $json_string = curl_exec($ch); + $info = curl_getinfo($ch); + if (intval($info['http_code']) == 401){ + logout(); + $json_string = curl_exec($ch); + } + $info = curl_getinfo($ch); + curl_close($ch); + $json_array = json_decode($json_string, true); + $object = $json_array['beans'][0]; + $cap_remain = $object['Remaining']; /* Total capacity - any extenal files created in data directories by non-hadoop app */ + $cap_total = $object['Capacity']; /* Capacity used by all data partitions minus space reserved for M/R */ + if (count($object) == 0) { + echo "CRITICAL: Data inaccessible, Status code = ". $info['http_code'] ."\n"; + exit(2); + } + $percent_full = ($cap_total - $cap_remain)/$cap_total * 100; + + $out_msg = "Capacity:[" . $cap_total . + "], Remaining Capacity:[" . $cap_remain . + "], percent_full:[" . $percent_full . "]"; + + if ($percent_full > $crit) { + echo "CRITICAL: " . $out_msg . "\n"; + exit (2); + } + if ($percent_full > $warn) { + echo "WARNING: " . $out_msg . "\n"; + exit (1); + } + echo "OK: " . $out_msg . "\n"; + exit(0); + + /* print usage */ + function usage () { + echo "Usage: $0 -h <host> -p port -w <warn%> -c <crit%> -k keytab path -r principal name -t kinit path -s security enabled -e ssl enabled\n"; + } +?> http://git-wip-us.apache.org/repos/asf/ambari/blob/83efcfea/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/package/files/check_hdfs_blocks.php ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/package/files/check_hdfs_blocks.php b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/package/files/check_hdfs_blocks.php new file mode 100644 index 0000000..3693aa0 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/package/files/check_hdfs_blocks.php @@ -0,0 +1,102 @@ +<?php +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* This plugin makes call to master node, get the jmx-json document + * check the corrupt or missing blocks % is > threshod + * check_jmx -H hostaddress -p port -w 1% -c 1% + */ + + include "hdp_nagios_init.php"; + + $options = getopt ("h:p:s:e:k:r:t:u:"); + if (!array_key_exists('h', $options) || !array_key_exists('p', $options) || !array_key_exists('s', $options)) { + usage(); + exit(3); + } + $hosts=$options['h']; + $port=$options['p']; + $nn_jmx_property=$options['s']; + $keytab_path=$options['k']; + $principal_name=$options['r']; + $kinit_path_local=$options['t']; + $security_enabled=$options['u']; + $ssl_enabled=$options['e']; + + /* Kinit if security enabled */ + $status = kinit_if_needed($security_enabled, $kinit_path_local, $keytab_path, $principal_name); + $retcode = $status[0]; + $output = $status[1]; + + if ($output != 0) { + echo "CRITICAL: Error doing kinit for nagios. $output"; + exit (2); + } + + $protocol = ($ssl_enabled == "true" ? "https" : "http"); + + + foreach (preg_split('/,/', $hosts) as $host) { + /* Get the json document */ + + $ch = curl_init(); + $username = rtrim(`id -un`, "\n"); + curl_setopt_array($ch, array( CURLOPT_URL => $protocol."://".$host.":".$port."/jmx?qry=Hadoop:service=NameNode,name=".$nn_jmx_property, + CURLOPT_RETURNTRANSFER => true, + CURLOPT_HTTPAUTH => CURLAUTH_ANY, + CURLOPT_USERPWD => "$username:", + CURLOPT_SSL_VERIFYPEER => FALSE )); + $json_string = curl_exec($ch); + $info = curl_getinfo($ch); + if (intval($info['http_code']) == 401){ + logout(); + $json_string = curl_exec($ch); + } + $info = curl_getinfo($ch); + curl_close($ch); + $json_array = json_decode($json_string, true); + $m_percent = 0; + $object = $json_array['beans'][0]; + $missing_blocks = $object['MissingBlocks']; + $total_blocks = $object['BlocksTotal']; + if (count($object) == 0) { + echo "CRITICAL: Data inaccessible, Status code = ". $info['http_code'] ."\n"; + exit(2); + } + if($total_blocks == 0) { + $m_percent = 0; + } else { + $m_percent = ($missing_blocks/$total_blocks)*100; + break; + } + } + $out_msg = "missing_blocks:<" . $missing_blocks . + ">, total_blocks:<" . $total_blocks . ">"; + + if ($m_percent > 0) { + echo "CRITICAL: " . $out_msg . "\n"; + exit (2); + } + echo "OK: " . $out_msg . "\n"; + exit(0); + + /* print usage */ + function usage () { + echo "Usage: $0 -h <host> -p port -s <namenode bean name> -k keytab path -r principal name -t kinit path -u security enabled -e ssl enabled\n"; + } +?> http://git-wip-us.apache.org/repos/asf/ambari/blob/83efcfea/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/package/files/check_hdfs_capacity.php ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/package/files/check_hdfs_capacity.php b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/package/files/check_hdfs_capacity.php new file mode 100644 index 0000000..af72723 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/NAGIOS/package/files/check_hdfs_capacity.php @@ -0,0 +1,109 @@ +<?php +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* This plugin makes call to master node, get the jmx-json document + * check the % HDFS capacity used >= warn and critical limits. + * check_jmx -H hostaddress -p port -w 1 -c 1 + */ + + include "hdp_nagios_init.php"; + + $options = getopt ("h:p:w:c:e:k:r:t:s:"); + if (!array_key_exists('h', $options) || !array_key_exists('p', $options) || !array_key_exists('w', $options) + || !array_key_exists('c', $options)) { + usage(); + exit(3); + } + + $hosts=$options['h']; + $port=$options['p']; + $warn=$options['w']; $warn = preg_replace('/%$/', '', $warn); + $crit=$options['c']; $crit = preg_replace('/%$/', '', $crit); + $keytab_path=$options['k']; + $principal_name=$options['r']; + $kinit_path_local=$options['t']; + $security_enabled=$options['s']; + $ssl_enabled=$options['e']; + + /* Kinit if security enabled */ + $status = kinit_if_needed($security_enabled, $kinit_path_local, $keytab_path, $principal_name); + $retcode = $status[0]; + $output = $status[1]; + + if ($output != 0) { + echo "CRITICAL: Error doing kinit for nagios. $output"; + exit (2); + } + + $protocol = ($ssl_enabled == "true" ? "https" : "http"); + + + foreach (preg_split('/,/', $hosts) as $host) { + /* Get the json document */ + $ch = curl_init(); + $username = rtrim(`id -un`, "\n"); + curl_setopt_array($ch, array( CURLOPT_URL => $protocol."://".$host.":".$port."/jmx?qry=Hadoop:service=NameNode,name=FSNamesystemState", + CURLOPT_RETURNTRANSFER => true, + CURLOPT_HTTPAUTH => CURLAUTH_ANY, + CURLOPT_USERPWD => "$username:", + CURLOPT_SSL_VERIFYPEER => FALSE )); + $json_string = curl_exec($ch); + $info = curl_getinfo($ch); + if (intval($info['http_code']) == 401){ + logout(); + $json_string = curl_exec($ch); + } + $info = curl_getinfo($ch); + curl_close($ch); + $json_array = json_decode($json_string, true); + $percent = 0; + $object = $json_array['beans'][0]; + $CapacityUsed = $object['CapacityUsed']; + $CapacityRemaining = $object['CapacityRemaining']; + if (count($object) == 0) { + echo "CRITICAL: Data inaccessible, Status code = ". $info['http_code'] ."\n"; + exit(2); + } + $CapacityTotal = $CapacityUsed + $CapacityRemaining; + if($CapacityTotal == 0) { + $percent = 0; + } else { + $percent = ($CapacityUsed/$CapacityTotal)*100; + break; + } + } + $out_msg = "DFSUsedGB:<" . round ($CapacityUsed/(1024*1024*1024),1) . + ">, DFSTotalGB:<" . round($CapacityTotal/(1024*1024*1024),1) . ">"; + + if ($percent >= $crit) { + echo "CRITICAL: " . $out_msg . "\n"; + exit (2); + } + if ($percent >= $warn) { + echo "WARNING: " . $out_msg . "\n"; + exit (1); + } + echo "OK: " . $out_msg . "\n"; + exit(0); + + /* print usage */ + function usage () { + echo "Usage: $0 -h <host> -p port -w <warn%> -c <crit%> -k keytab path -r principal name -t kinit path -s security enabled -e ssl enabled\n"; + } +?>
