http://git-wip-us.apache.org/repos/asf/ambari/blob/8e5eeb4d/ambari-server/src/main/resources/common-services/STORM/0.9.1/kerberos.json ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1/kerberos.json b/ambari-server/src/main/resources/common-services/STORM/0.9.1/kerberos.json new file mode 100644 index 0000000..fcfe524 --- /dev/null +++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1/kerberos.json @@ -0,0 +1,120 @@ +{ + "services": [ + { + "name": "STORM", + "identities": [ + { + "name": "/spnego" + }, + { + "name": "/smokeuser" + }, + { + "name": "storm_components", + "principal": { + "value": "${storm-env/storm_user}-${cluster_name|toLower()}@${realm}", + "type": "user", + "configuration": "storm-env/storm_principal_name" + }, + "keytab": { + "file": "${keytab_dir}/storm.headless.keytab", + "owner": { + "name": "${storm-env/storm_user}", + "access": "r" + }, + "group": { + "name": "${cluster-env/user_group}", + "access": "" + }, + "configuration": "storm-env/storm_keytab" + } + } + ], + "configurations": [ + { + "storm-site": { + "nimbus.authorizer": "backtype.storm.security.auth.authorizer.SimpleACLAuthorizer", + "drpc.authorizer": "backtype.storm.security.auth.authorizer.DRPCSimpleACLAuthorizer", + "ui.filter": "org.apache.hadoop.security.authentication.server.AuthenticationFilter", + "storm.principal.tolocal": "backtype.storm.security.auth.KerberosPrincipalToLocal", + "supervisor.enable": "true", + "storm.zookeeper.superACL": "sasl:{{storm_bare_jaas_principal}}", + "java.security.auth.login.config": "{{conf_dir}}/storm_jaas.conf", + "nimbus.admins": "['{{storm_bare_jaas_principal}}']", + "nimbus.supervisor.users": "['{{storm_bare_jaas_principal}}']", + "ui.filter.params": "{'type': 'kerberos', 'kerberos.principal': '{{storm_ui_jaas_principal}}', 'kerberos.keytab': '{{storm_ui_keytab_path}}', 'kerberos.name.rules': 'DEFAULT'}" + } + } + ], + "components": [ + { + "name": "STORM_UI_SERVER", + "identities": [ + { + "name": "/spnego", + "principal": { + "configuration": "storm-env/storm_ui_principal_name" + }, + "keytab": { + "configuration": "storm-env/storm_ui_keytab" + } + } + ] + }, + { + "name": "NIMBUS", + "identities": [ + { + "name": "nimbus_server", + "principal": { + "value": "nimbus/_HOST@${realm}", + "type": "service", + "configuration": "storm-env/nimbus_principal_name" + }, + "keytab": { + "file": "${keytab_dir}/nimbus.service.keytab", + "owner": { + "name": "${storm-env/storm_user}", + "access": "r" + }, + "group": { + "name": "${cluster-env/user_group}", + "access": "" + }, + "configuration": "storm-env/nimbus_keytab" + } + } + ] + }, + { + "name": "DRPC_SERVER", + "identities": [ + { + "name": "nimbus_server", + "principal": { + "value": "nimbus/_HOST@${realm}", + "type": "service", + "configuration": "storm-env/nimbus_principal_name" + }, + "keytab": { + "file": "${keytab_dir}/nimbus.service.keytab", + "owner": { + "name": "${storm-env/storm_user}", + "access": "r" + }, + "group": { + "name": "${cluster-env/user_group}", + "access": "" + }, + "configuration": "storm-env/nimbus_keytab" + } + } + ] + }, + { + "name" : "SUPERVISOR" + } + ] + } + ] +}
http://git-wip-us.apache.org/repos/asf/ambari/blob/8e5eeb4d/ambari-server/src/main/resources/common-services/STORM/0.9.1/metainfo.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1/metainfo.xml b/ambari-server/src/main/resources/common-services/STORM/0.9.1/metainfo.xml new file mode 100644 index 0000000..0f4d520 --- /dev/null +++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1/metainfo.xml @@ -0,0 +1,181 @@ +<?xml version="1.0"?> +<!-- + Licensed to the Apache Software Foundation (ASF) under one or more + contributor license agreements. See the NOTICE file distributed with + this work for additional information regarding copyright ownership. + The ASF licenses this file to You under the Apache License, Version 2.0 + (the "License"); you may not use this file except in compliance with + the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +--> + +<metainfo> + <schemaVersion>2.0</schemaVersion> + <services> + <service> + <name>STORM</name> + <displayName>Storm</displayName> + <comment>Apache Hadoop Stream processing framework</comment> + <version>0.9.1</version> + <components> + + <component> + <name>NIMBUS</name> + <displayName>Nimbus</displayName> + <category>MASTER</category> + <cardinality>1</cardinality> + <versionAdvertised>true</versionAdvertised> + <dependencies> + <dependency> + <name>ZOOKEEPER/ZOOKEEPER_SERVER</name> + <scope>cluster</scope> + <auto-deploy> + <enabled>true</enabled> + </auto-deploy> + </dependency> + </dependencies> + <commandScript> + <script>scripts/nimbus.py</script> + <scriptType>PYTHON</scriptType> + <timeout>1200</timeout> + </commandScript> + <logs> + <log> + <logId>storm_nimbus</logId> + <primary>true</primary> + </log> + </logs> + </component> + + <component> + <name>STORM_REST_API</name> + <displayName>Storm REST API Server</displayName> + <category>MASTER</category> + <cardinality>1</cardinality> + <versionAdvertised>true</versionAdvertised> + <commandScript> + <script>scripts/rest_api.py</script> + <scriptType>PYTHON</scriptType> + <timeout>1200</timeout> + </commandScript> + </component> + + <component> + <name>SUPERVISOR</name> + <displayName>Supervisor</displayName> + <category>SLAVE</category> + <cardinality>1+</cardinality> + <versionAdvertised>true</versionAdvertised> + <commandScript> + <script>scripts/supervisor.py</script> + <scriptType>PYTHON</scriptType> + <timeout>1200</timeout> + </commandScript> + <bulkCommands> + <displayName>Supervisors</displayName> + <masterComponent>SUPERVISOR</masterComponent> + </bulkCommands> + <logs> + <log> + <logId>storm_supervisor</logId> + <primary>true</primary> + </log> + <log> + <logId>storm_worker</logId> + </log> + <log> + <logId>storm_logviewer</logId> + </log> + </logs> + </component> + + <component> + <name>STORM_UI_SERVER</name> + <displayName>Storm UI Server</displayName> + <category>MASTER</category> + <cardinality>1</cardinality> + <versionAdvertised>true</versionAdvertised> + <commandScript> + <script>scripts/ui_server.py</script> + <scriptType>PYTHON</scriptType> + <timeout>1200</timeout> + </commandScript> + <logs> + <log> + <logId>storm_ui</logId> + <primary>true</primary> + </log> + </logs> + </component> + + <component> + <name>DRPC_SERVER</name> + <displayName>DRPC Server</displayName> + <category>MASTER</category> + <cardinality>1</cardinality> + <versionAdvertised>true</versionAdvertised> + <commandScript> + <script>scripts/drpc_server.py</script> + <scriptType>PYTHON</scriptType> + <timeout>1200</timeout> + </commandScript> + <logs> + <log> + <logId>storm_drpc</logId> + <primary>true</primary> + </log> + </logs> + </component> + </components> + + <osSpecifics> + <osSpecific> + <osFamily>any</osFamily> + <packages> + <package> + <name>storm</name> + </package> + </packages> + </osSpecific> + </osSpecifics> + + <commandScript> + <script>scripts/service_check.py</script> + <scriptType>PYTHON</scriptType> + <timeout>300</timeout> + </commandScript> + + <requiredServices> + <service>ZOOKEEPER</service> + </requiredServices> + + <configuration-dependencies> + <config-type>storm-site</config-type> + <config-type>storm-env</config-type> + <config-type>ranger-storm-plugin-properties</config-type> + <config-type>ranger-storm-audit</config-type> + <config-type>ranger-storm-policymgr-ssl</config-type> + <config-type>ranger-storm-security</config-type> + <config-type>admin-properties</config-type> + <config-type>ranger-ugsync-site</config-type> + <config-type>ranger-admin-site</config-type> + <config-type>zookeeper-env</config-type> + <config-type>zoo.cfg</config-type> + <config-type>ams-ssl-client</config-type> + </configuration-dependencies> + <quickLinksConfigurations> + <quickLinksConfiguration> + <fileName>quicklinks.json</fileName> + <default>true</default> + </quickLinksConfiguration> + </quickLinksConfigurations> + </service> + </services> +</metainfo> http://git-wip-us.apache.org/repos/asf/ambari/blob/8e5eeb4d/ambari-server/src/main/resources/common-services/STORM/0.9.1/metrics.json ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1/metrics.json b/ambari-server/src/main/resources/common-services/STORM/0.9.1/metrics.json new file mode 100644 index 0000000..08c2b50 --- /dev/null +++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1/metrics.json @@ -0,0 +1,829 @@ +{ + "STORM_REST_API": { + "Component": [ + { + "type": "org.apache.ambari.server.controller.metrics.RestMetricsPropertyProvider", + "properties": { + "default_port": "8745", + "port_config_type": "storm-site", + "port_property_name": "storm.port", + "protocol": "http" + }, + "metrics": { + "default": { + "metrics/api/cluster/summary/tasks.total": { + "metric": "/api/cluster/summary##tasks.total", + "pointInTime": true, + "temporal": false + }, + "metrics/api/cluster/summary/slots.total": { + "metric": "/api/cluster/summary##slots.total", + "pointInTime": true, + "temporal": false + }, + "metrics/api/cluster/summary/slots.free": { + "metric": "/api/cluster/summary##slots.free", + "pointInTime": true, + "temporal": false + }, + "metrics/api/cluster/summary/supervisors": { + "metric": "/api/cluster/summary##supervisors", + "pointInTime": true, + "temporal": false + }, + "metrics/api/cluster/summary/executors.total": { + "metric": "/api/cluster/summary##executors.total", + "pointInTime": true, + "temporal": false + }, + "metrics/api/cluster/summary/slots.used": { + "metric": "/api/cluster/summary##slots.used", + "pointInTime": true, + "temporal": false + }, + "metrics/api/cluster/summary/topologies": { + "metric": "/api/cluster/summary##topologies", + "pointInTime": true, + "temporal": false + }, + "metrics/api/cluster/summary/nimbus.uptime": { + "metric": "/api/cluster/summary##nimbus.uptime", + "pointInTime": true, + "temporal": false + } + } + } + } + ], + "HostComponent": [ + { + "type": "org.apache.ambari.server.controller.metrics.RestMetricsPropertyProvider", + "properties": { + "default_port": "8745", + "port_config_type": "storm-site", + "port_property_name": "storm.port", + "protocol": "http" + }, + "metrics": { + "default": { + "metrics/api/cluster/summary/tasks.total": { + "metric": "/api/cluster/summary##tasks.total", + "pointInTime": true, + "temporal": false + }, + "metrics/api/cluster/summary/slots.total": { + "metric": "/api/cluster/summary##slots.total", + "pointInTime": true, + "temporal": false + }, + "metrics/api/cluster/summary/slots.free": { + "metric": "/api/cluster/summary##slots.free", + "pointInTime": true, + "temporal": false + }, + "metrics/api/cluster/summary/supervisors": { + "metric": "/api/cluster/summary##supervisors", + "pointInTime": true, + "temporal": false + }, + "metrics/api/cluster/summary/executors.total": { + "metric": "/api/cluster/summary##executors.total", + "pointInTime": true, + "temporal": false + }, + "metrics/api/cluster/summary/slots.used": { + "metric": "/api/cluster/summary##slots.used", + "pointInTime": true, + "temporal": false + }, + "metrics/api/cluster/summary/topologies": { + "metric": "/api/cluster/summary##topologies", + "pointInTime": true, + "temporal": false + }, + "metrics/api/cluster/summary/nimbus.uptime": { + "metric": "/api/cluster/summary##nimbus.uptime", + "pointInTime": true, + "temporal": false + } + } + } + } + ] + }, + "NIMBUS": { + "Component": [ + { + "type": "ganglia", + "metrics": { + "default": { + "metrics/boottime": { + "metric": "boottime", + "pointInTime": true, + "temporal": true + }, + "metrics/network/bytes_in": { + "metric": "bytes_in", + "pointInTime": true, + "temporal": true + }, + "metrics/network/bytes_out": { + "metric": "bytes_out", + "pointInTime": true, + "temporal": true + }, + "metrics/cpu/cpu_aidle": { + "metric": "cpu_aidle", + "pointInTime": true, + "temporal": true + }, + "metrics/load_fifteen": { + "metric": "load_fifteen", + "pointInTime": true, + "temporal": true + }, + "metrics/load_five": { + "metric": "load_five", + "pointInTime": true, + "temporal": true + }, + "metrics/load_one": { + "metric": "load_one", + "pointInTime": true, + "temporal": true + }, + "metrics/memory/mem_buffers": { + "metric": "mem_buffers", + "pointInTime": true, + "temporal": true + }, + "metrics/disk/part_max_used": { + "metric": "part_max_used", + "pointInTime": true, + "temporal": true + }, + "metrics/network/pkts_in": { + "metric": "pkts_in", + "pointInTime": true, + "temporal": true + }, + "metrics/network/pkts_out": { + "metric": "pkts_out", + "pointInTime": true, + "temporal": true + }, + "metrics/process/proc_run": { + "metric": "proc_run", + "pointInTime": true, + "temporal": true + }, + "metrics/process/proc_total": { + "metric": "proc_total", + "pointInTime": true, + "temporal": true + }, + "metrics/memory/swap_total": { + "metric": "swap_total", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/nimbus/jvm/memory/heap/committed": { + "metric": "Nimbus.JVM.Memory.Heap.committed", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/nimbus/jvm/memory/heap/init": { + "metric": "Nimbus.JVM.Memory.Heap.init", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/nimbus/jvm/memory/heap/max": { + "metric": "Nimbus.JVM.Memory.Heap.max", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/nimbus/jvm/memory/heap/used": { + "metric": "Nimbus.JVM.Memory.Heap.used", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/nimbus/jvm/memory/nonheap/committed": { + "metric": "Nimbus.JVM.Memory.NonHeap.committed", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/nimbus/jvm/memory/nonheap/init": { + "metric": "Nimbus.JVM.Memory.NonHeap.init", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/nimbus/jvm/memory/nonheap/max": { + "metric": "Nimbus.JVM.Memory.NonHeap.max", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/nimbus/jvm/memory/nonheap/used": { + "metric": "Nimbus.JVM.Memory.NonHeap.used", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/nimbus/jvm/os/processcputime": { + "metric": "Nimbus.JVM.OS.ProcessCpuTime", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/nimbus/jvm/threading/daemonthreadcount": { + "metric": "Nimbus.JVM.Threading.DaemonThreadCount", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/nimbus/jvm/threading/threadcount": { + "metric": "Nimbus.JVM.Threading.ThreadCount", + "pointInTime": true, + "temporal": true + }, + + "metrics/storm/nimbus/freeslots": { + "metric": "Free Slots", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/nimbus/supervisors": { + "metric": "Supervisors", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/nimbus/topologies": { + "metric": "Topologies", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/nimbus/totalexecutors": { + "metric": "Total Executors", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/nimbus/totalslots": { + "metric": "Total Slots", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/nimbus/totaltasks": { + "metric": "Total Tasks", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/nimbus/usedslots": { + "metric": "Used Slots", + "pointInTime": true, + "temporal": true + } + } + } + } + ], + "HostComponent": [ + { + "type": "ganglia", + "metrics": { + "default": { + "metrics/boottime": { + "metric": "boottime", + "pointInTime": true, + "temporal": true, + "amsHostMetric": true + }, + "metrics/network/bytes_in": { + "metric": "bytes_in", + "pointInTime": true, + "temporal": true, + "amsHostMetric": true + }, + "metrics/network/bytes_out": { + "metric": "bytes_out", + "pointInTime": true, + "temporal": true, + "amsHostMetric": true + }, + "metrics/cpu/cpu_aidle": { + "metric": "cpu_aidle", + "pointInTime": true, + "temporal": true, + "amsHostMetric": true + }, + "metrics/load_fifteen": { + "metric": "load_fifteen", + "pointInTime": true, + "temporal": true, + "amsHostMetric": true + }, + "metrics/load_five": { + "metric": "load_five", + "pointInTime": true, + "temporal": true, + "amsHostMetric": true + }, + "metrics/load_one": { + "metric": "load_one", + "pointInTime": true, + "temporal": true, + "amsHostMetric": true + }, + "metrics/memory/mem_buffers": { + "metric": "mem_buffers", + "pointInTime": true, + "temporal": true, + "amsHostMetric": true + }, + "metrics/disk/part_max_used": { + "metric": "part_max_used", + "pointInTime": true, + "temporal": true, + "amsHostMetric": true + }, + "metrics/network/pkts_in": { + "metric": "pkts_in", + "pointInTime": true, + "temporal": true, + "amsHostMetric": true + }, + "metrics/network/pkts_out": { + "metric": "pkts_out", + "pointInTime": true, + "temporal": true, + "amsHostMetric": true + }, + "metrics/process/proc_run": { + "metric": "proc_run", + "pointInTime": true, + "temporal": true, + "amsHostMetric": true + }, + "metrics/process/proc_total": { + "metric": "proc_total", + "pointInTime": true, + "temporal": true, + "amsHostMetric": true + }, + "metrics/memory/swap_total": { + "metric": "swap_total", + "pointInTime": true, + "temporal": true, + "amsHostMetric": true + }, + "metrics/storm/nimbus/jvm/memory/heap/committed": { + "metric": "Nimbus.JVM.Memory.Heap.committed", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/nimbus/jvm/memory/heap/init": { + "metric": "Nimbus.JVM.Memory.Heap.init", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/nimbus/jvm/memory/heap/max": { + "metric": "Nimbus.JVM.Memory.Heap.max", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/nimbus/jvm/memory/heap/used": { + "metric": "Nimbus.JVM.Memory.Heap.used", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/nimbus/jvm/memory/nonheap/committed": { + "metric": "Nimbus.JVM.Memory.NonHeap.committed", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/nimbus/jvm/memory/nonheap/init": { + "metric": "Nimbus.JVM.Memory.NonHeap.init", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/nimbus/jvm/memory/nonheap/max": { + "metric": "Nimbus.JVM.Memory.NonHeap.max", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/nimbus/jvm/memory/nonheap/used": { + "metric": "Nimbus.JVM.Memory.NonHeap.used", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/nimbus/jvm/os/processcputime": { + "metric": "Nimbus.JVM.OS.ProcessCpuTime", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/nimbus/jvm/threading/daemonthreadcount": { + "metric": "Nimbus.JVM.Threading.DaemonThreadCount", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/nimbus/jvm/threading/threadcount": { + "metric": "Nimbus.JVM.Threading.ThreadCount", + "pointInTime": true, + "temporal": true + } + + } + } + } + ] + }, + "SUPERVISOR": { + "Component": [ + { + "type": "ganglia", + "metrics": { + "default": { + "metrics/boottime": { + "metric": "boottime", + "pointInTime": true, + "temporal": true + }, + "metrics/network/bytes_in": { + "metric": "bytes_in", + "pointInTime": true, + "temporal": true + }, + "metrics/network/bytes_out": { + "metric": "bytes_out", + "pointInTime": true, + "temporal": true + }, + "metrics/cpu/cpu_aidle": { + "metric": "cpu_aidle", + "pointInTime": true, + "temporal": true + }, + "metrics/load_fifteen": { + "metric": "load_fifteen", + "pointInTime": true, + "temporal": true + }, + "metrics/load_five": { + "metric": "load_five", + "pointInTime": true, + "temporal": true + }, + "metrics/load_one": { + "metric": "load_one", + "pointInTime": true, + "temporal": true + }, + "metrics/memory/mem_buffers": { + "metric": "mem_buffers", + "pointInTime": true, + "temporal": true + }, + "metrics/disk/part_max_used": { + "metric": "part_max_used", + "pointInTime": true, + "temporal": true + }, + "metrics/network/pkts_in": { + "metric": "pkts_in", + "pointInTime": true, + "temporal": true + }, + "metrics/network/pkts_out": { + "metric": "pkts_out", + "pointInTime": true, + "temporal": true + }, + "metrics/process/proc_run": { + "metric": "proc_run", + "pointInTime": true, + "temporal": true + }, + "metrics/process/proc_total": { + "metric": "proc_total", + "pointInTime": true, + "temporal": true + }, + "metrics/memory/swap_total": { + "metric": "swap_total", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/supervisor/jvm/memory/heap/committed": { + "metric": "Supervisor.JVM.Memory.Heap.committed", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/supervisor/jvm/memory/heap/init": { + "metric": "Supervisor.JVM.Memory.Heap.init", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/supervisor/jvm/memory/heap/max": { + "metric": "Supervisor.JVM.Memory.Heap.max", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/supervisor/jvm/memory/heap/used": { + "metric": "Supervisor.JVM.Memory.Heap.used", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/supervisor/jvm/memory/nonheap/committed": { + "metric": "Supervisor.JVM.Memory.NonHeap.committed", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/supervisor/jvm/memory/nonheap/init": { + "metric": "Supervisor.JVM.Memory.NonHeap.init", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/supervisor/jvm/memory/nonheap/max": { + "metric": "Supervisor.JVM.Memory.NonHeap.max", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/supervisor/jvm/memory/nonheap/used": { + "metric": "Supervisor.JVM.Memory.NonHeap.used", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/supervisor/jvm/os/processcputime": { + "metric": "Supervisor.JVM.OS.ProcessCpuTime", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/supervisor/jvm/threading/daemonthreadcount": { + "metric": "Supervisor.JVM.Threading.DaemonThreadCount", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/supervisor/jvm/threading/threadcount": { + "metric": "Supervisor.JVM.Threading.ThreadCount", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/worker/$1/jvm/memory/heap/committed": { + "metric": "Worker.(.+).JVM.Memory.Heap.committed", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/worker/$1/jvm/memory/heap/init": { + "metric": "Worker.(.+).JVM.Memory.Heap.init", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/worker/$1/jvm/memory/heap/max": { + "metric": "Worker.(.+).JVM.Memory.Heap.max", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/worker/$1/jvm/memory/heap/used": { + "metric": "Worker.(.+).JVM.Memory.Heap.used", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/worker/$1/jvm/memory/nonheap/committed": { + "metric": "Worker.(.+).JVM.Memory.NonHeap.committed", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/worker/$1/jvm/memory/nonheap/init": { + "metric": "Worker.(.+).JVM.Memory.NonHeap.init", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/worker/$1/jvm/memory/nonheap/max": { + "metric": "Worker.(.+).JVM.Memory.NonHeap.max", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/worker/$1/jvm/memory/nonheap/used": { + "metric": "Worker.(.+).JVM.Memory.NonHeap.used", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/worker/$1/jvm/os/processcputime": { + "metric": "Worker.(.+).JVM.OS.ProcessCpuTime", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/worker/$1/jvm/threading/daemonthreadcount": { + "metric": "Worker.(.+).JVM.Threading.DaemonThreadCount", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/worker/$1/jvm/threading/threadcount": { + "metric": "Worker.(.+).JVM.Threading.ThreadCount", + "pointInTime": true, + "temporal": true + } + } + } + } + ], + "HostComponent": [ + { + "type": "ganglia", + "metrics": { + "default": { + "metrics/boottime": { + "metric": "boottime", + "pointInTime": true, + "temporal": true, + "amsHostMetric":true + }, + "metrics/network/bytes_in": { + "metric": "bytes_in", + "pointInTime": true, + "temporal": true, + "amsHostMetric":true + }, + "metrics/network/bytes_out": { + "metric": "bytes_out", + "pointInTime": true, + "temporal": true, + "amsHostMetric":true + }, + "metrics/cpu/cpu_aidle": { + "metric": "cpu_aidle", + "pointInTime": true, + "temporal": true, + "amsHostMetric":true + }, + "metrics/load_fifteen": { + "metric": "load_fifteen", + "pointInTime": true, + "temporal": true, + "amsHostMetric":true + }, + "metrics/load_five": { + "metric": "load_five", + "pointInTime": true, + "temporal": true, + "amsHostMetric":true + }, + "metrics/load_one": { + "metric": "load_one", + "pointInTime": true, + "temporal": true, + "amsHostMetric":true + }, + "metrics/memory/mem_buffers": { + "metric": "mem_buffers", + "pointInTime": true, + "temporal": true, + "amsHostMetric":true + }, + "metrics/disk/part_max_used": { + "metric": "part_max_used", + "pointInTime": true, + "temporal": true, + "amsHostMetric":true + }, + "metrics/network/pkts_in": { + "metric": "pkts_in", + "pointInTime": true, + "temporal": true, + "amsHostMetric":true + }, + "metrics/network/pkts_out": { + "metric": "pkts_out", + "pointInTime": true, + "temporal": true, + "amsHostMetric":true + }, + "metrics/process/proc_run": { + "metric": "proc_run", + "pointInTime": true, + "temporal": true, + "amsHostMetric":true + }, + "metrics/process/proc_total": { + "metric": "proc_total", + "pointInTime": true, + "temporal": true, + "amsHostMetric":true + }, + "metrics/memory/swap_total": { + "metric": "swap_total", + "pointInTime": true, + "temporal": true, + "amsHostMetric":true + }, + "metrics/storm/supervisor/jvm/memory/heap/committed": { + "metric": "Supervisor.JVM.Memory.Heap.committed", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/supervisor/jvm/memory/heap/init": { + "metric": "Supervisor.JVM.Memory.Heap.init", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/supervisor/jvm/memory/heap/max": { + "metric": "Supervisor.JVM.Memory.Heap.max", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/supervisor/jvm/memory/heap/used": { + "metric": "Supervisor.JVM.Memory.Heap.used", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/supervisor/jvm/memory/nonheap/committed": { + "metric": "Supervisor.JVM.Memory.NonHeap.committed", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/supervisor/jvm/memory/nonheap/init": { + "metric": "Supervisor.JVM.Memory.NonHeap.init", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/supervisor/jvm/memory/nonheap/max": { + "metric": "Supervisor.JVM.Memory.NonHeap.max", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/supervisor/jvm/memory/nonheap/used": { + "metric": "Supervisor.JVM.Memory.NonHeap.used", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/supervisor/jvm/os/processcputime": { + "metric": "Supervisor.JVM.OS.ProcessCpuTime", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/supervisor/jvm/threading/daemonthreadcount": { + "metric": "Supervisor.JVM.Threading.DaemonThreadCount", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/supervisor/jvm/threading/threadcount": { + "metric": "Supervisor.JVM.Threading.ThreadCount", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/worker/$1/jvm/memory/heap/committed": { + "metric": "Worker.(.+).JVM.Memory.Heap.committed", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/worker/$1/jvm/memory/heap/init": { + "metric": "Worker.(.+).JVM.Memory.Heap.init", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/worker/$1/jvm/memory/heap/max": { + "metric": "Worker.(.+).JVM.Memory.Heap.max", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/worker/$1/jvm/memory/heap/used": { + "metric": "Worker.(.+).JVM.Memory.Heap.used", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/worker/$1/jvm/memory/nonheap/committed": { + "metric": "Worker.(.+).JVM.Memory.NonHeap.committed", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/worker/$1/jvm/memory/nonheap/init": { + "metric": "Worker.(.+).JVM.Memory.NonHeap.init", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/worker/$1/jvm/memory/nonheap/max": { + "metric": "Worker.(.+).JVM.Memory.NonHeap.max", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/worker/$1/jvm/memory/nonheap/used": { + "metric": "Worker.(.+).JVM.Memory.NonHeap.used", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/worker/$1/jvm/os/processcputime": { + "metric": "Worker.(.+).JVM.OS.ProcessCpuTime", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/worker/$1/jvm/threading/daemonthreadcount": { + "metric": "Worker.(.+).JVM.Threading.DaemonThreadCount", + "pointInTime": true, + "temporal": true + }, + "metrics/storm/worker/$1/jvm/threading/threadcount": { + "metric": "Worker.(.+).JVM.Threading.ThreadCount", + "pointInTime": true, + "temporal": true + } + } + } + } + ] + } +} http://git-wip-us.apache.org/repos/asf/ambari/blob/8e5eeb4d/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/alerts/check_supervisor_process_win.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/alerts/check_supervisor_process_win.py b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/alerts/check_supervisor_process_win.py new file mode 100644 index 0000000..a698415 --- /dev/null +++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/alerts/check_supervisor_process_win.py @@ -0,0 +1,50 @@ +#!/usr/bin/env python + +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +from resource_management.libraries.functions import check_windows_service_status + + +RESULT_CODE_OK = 'OK' +RESULT_CODE_CRITICAL = 'CRITICAL' +RESULT_CODE_UNKNOWN = 'UNKNOWN' + + +def get_tokens(): + """ + Returns a tuple of tokens in the format {{site/property}} that will be used + to build the dictionary passed into execute + """ + return () + +def execute(configurations={}, parameters={}, host_name=None): + """ + Returns a tuple containing the result code and a pre-formatted result label + + Keyword arguments: + configurations (dictionary): a mapping of configuration key to value + parameters (dictionary): a mapping of script parameter key to value + host_name (string): the name of this host where the alert is running + """ + + try: + check_windows_service_status("supervisor") + return (RESULT_CODE_OK, ["Supervisor is running"]) + except: + return (RESULT_CODE_CRITICAL, ["Supervisor is stopped"]) http://git-wip-us.apache.org/repos/asf/ambari/blob/8e5eeb4d/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/files/wordCount.jar ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/files/wordCount.jar b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/files/wordCount.jar new file mode 100644 index 0000000..aed64be Binary files /dev/null and b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/files/wordCount.jar differ http://git-wip-us.apache.org/repos/asf/ambari/blob/8e5eeb4d/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/drpc_server.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/drpc_server.py b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/drpc_server.py new file mode 100644 index 0000000..b156578 --- /dev/null +++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/drpc_server.py @@ -0,0 +1,139 @@ +#!/usr/bin/env python +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" + +import sys +from resource_management.libraries.functions import check_process_status +from resource_management.libraries.script import Script +from resource_management.libraries.functions import conf_select +from resource_management.libraries.functions import stack_select +from resource_management.libraries.functions import format +from resource_management.core.resources.system import Execute +from resource_management.libraries.functions.stack_features import check_stack_feature +from resource_management.libraries.functions import StackFeature +from storm import storm +from service import service +from service_check import ServiceCheck +from resource_management.libraries.functions.security_commons import build_expectations, \ + cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \ + FILE_TYPE_JAAS_CONF + +class DrpcServer(Script): + + def get_component_name(self): + return "storm-client" + + def install(self, env): + self.install_packages(env) + self.configure(env) + + def configure(self, env): + import params + env.set_params(params) + + storm() + + def pre_upgrade_restart(self, env, upgrade_type=None): + import params + env.set_params(params) + + if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version): + conf_select.select(params.stack_name, "storm", params.version) + stack_select.select("storm-client", params.version) + + def start(self, env, upgrade_type=None): + import params + env.set_params(params) + self.configure(env) + + service("drpc", action="start") + + def stop(self, env, upgrade_type=None): + import params + env.set_params(params) + + service("drpc", action="stop") + + def status(self, env): + import status_params + env.set_params(status_params) + check_process_status(status_params.pid_drpc) + + def security_status(self, env): + import status_params + + env.set_params(status_params) + + if status_params.security_enabled: + # Expect the following files to be available in status_params.config_dir: + # storm_jaas.conf + + try: + props_value_check = None + props_empty_check = ['StormServer/keyTab', 'StormServer/principal'] + props_read_check = ['StormServer/keyTab'] + storm_env_expectations = build_expectations('storm_jaas', props_value_check, props_empty_check, + props_read_check) + + storm_expectations = {} + storm_expectations.update(storm_env_expectations) + + security_params = get_params_from_filesystem(status_params.conf_dir, + {'storm_jaas.conf': FILE_TYPE_JAAS_CONF}) + + result_issues = validate_security_config_properties(security_params, storm_expectations) + if not result_issues: # If all validations passed successfully + # Double check the dict before calling execute + if ( 'storm_jaas' not in security_params + or 'StormServer' not in security_params['storm_jaas'] + or 'keyTab' not in security_params['storm_jaas']['StormServer'] + or 'principal' not in security_params['storm_jaas']['StormServer']): + self.put_structured_out({"securityState": "ERROR"}) + self.put_structured_out({"securityIssuesFound": "Keytab file or principal are not set property."}) + return + + cached_kinit_executor(status_params.kinit_path_local, + status_params.storm_user, + security_params['storm_jaas']['StormServer']['keyTab'], + security_params['storm_jaas']['StormServer']['principal'], + status_params.hostname, + status_params.tmp_dir) + self.put_structured_out({"securityState": "SECURED_KERBEROS"}) + else: + issues = [] + for cf in result_issues: + issues.append("Configuration file %s did not pass the validation. Reason: %s" % (cf, result_issues[cf])) + self.put_structured_out({"securityIssuesFound": ". ".join(issues)}) + self.put_structured_out({"securityState": "UNSECURED"}) + except Exception as e: + self.put_structured_out({"securityState": "ERROR"}) + self.put_structured_out({"securityStateErrorInfo": str(e)}) + else: + self.put_structured_out({"securityState": "UNSECURED"}) + + def get_log_folder(self): + import params + return params.log_dir + + def get_user(self): + import params + return params.storm_user + +if __name__ == "__main__": + DrpcServer().execute() http://git-wip-us.apache.org/repos/asf/ambari/blob/8e5eeb4d/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/nimbus.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/nimbus.py b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/nimbus.py new file mode 100644 index 0000000..b2148a5 --- /dev/null +++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/nimbus.py @@ -0,0 +1,157 @@ +#!/usr/bin/env python +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" + +import sys +from resource_management.libraries.functions import check_process_status +from resource_management.libraries.script import Script +from resource_management.libraries.functions import format +from resource_management.libraries.functions import conf_select +from resource_management.libraries.functions import stack_select +from resource_management.core.resources.system import Execute +from resource_management.libraries.functions.stack_features import check_stack_feature +from resource_management.libraries.functions import StackFeature +from storm import storm +from service import service +from resource_management.libraries.functions.security_commons import build_expectations, \ + cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \ + FILE_TYPE_JAAS_CONF +from setup_ranger_storm import setup_ranger_storm +from ambari_commons import OSConst +from ambari_commons.os_family_impl import OsFamilyImpl +from resource_management.core.resources.service import Service + +class Nimbus(Script): + def get_component_name(self): + return "storm-nimbus" + + def install(self, env): + self.install_packages(env) + self.configure(env) + + def configure(self, env): + import params + env.set_params(params) + storm("nimbus") + + +@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT) +class NimbusDefault(Nimbus): + + def pre_upgrade_restart(self, env, upgrade_type=None): + import params + env.set_params(params) + if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version): + conf_select.select(params.stack_name, "storm", params.version) + stack_select.select("storm-client", params.version) + stack_select.select("storm-nimbus", params.version) + + + def start(self, env, upgrade_type=None): + import params + env.set_params(params) + self.configure(env) + setup_ranger_storm(upgrade_type=upgrade_type) + service("nimbus", action="start") + + + def stop(self, env, upgrade_type=None): + import params + env.set_params(params) + service("nimbus", action="stop") + + + def status(self, env): + import status_params + env.set_params(status_params) + check_process_status(status_params.pid_nimbus) + + + def security_status(self, env): + import status_params + env.set_params(status_params) + if status_params.security_enabled: + # Expect the following files to be available in status_params.config_dir: + # storm_jaas.conf + try: + props_value_check = None + props_empty_check = ['StormServer/keyTab', 'StormServer/principal'] + props_read_check = ['StormServer/keyTab'] + storm_env_expectations = build_expectations('storm_jaas', props_value_check, props_empty_check, props_read_check) + storm_expectations = {} + storm_expectations.update(storm_env_expectations) + security_params = get_params_from_filesystem(status_params.conf_dir, {'storm_jaas.conf': FILE_TYPE_JAAS_CONF}) + result_issues = validate_security_config_properties(security_params, storm_expectations) + if not result_issues: # If all validations passed successfully + # Double check the dict before calling execute + if ( 'storm_jaas' not in security_params + or 'StormServer' not in security_params['storm_jaas'] + or 'keyTab' not in security_params['storm_jaas']['StormServer'] + or 'principal' not in security_params['storm_jaas']['StormServer']): + self.put_structured_out({"securityState": "ERROR"}) + self.put_structured_out({"securityIssuesFound": "Keytab file or principal are not set property."}) + return + + cached_kinit_executor(status_params.kinit_path_local, + status_params.storm_user, + security_params['storm_jaas']['StormServer']['keyTab'], + security_params['storm_jaas']['StormServer']['principal'], + status_params.hostname, + status_params.tmp_dir) + self.put_structured_out({"securityState": "SECURED_KERBEROS"}) + else: + issues = [] + for cf in result_issues: + issues.append("Configuration file %s did not pass the validation. Reason: %s" % (cf, result_issues[cf])) + self.put_structured_out({"securityIssuesFound": ". ".join(issues)}) + self.put_structured_out({"securityState": "UNSECURED"}) + except Exception as e: + self.put_structured_out({"securityState": "ERROR"}) + self.put_structured_out({"securityStateErrorInfo": str(e)}) + else: + self.put_structured_out({"securityState": "UNSECURED"}) + + def get_log_folder(self): + import params + return params.log_dir + + def get_user(self): + import params + return params.storm_user + +@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY) +class NimbusWindows(Nimbus): + def start(self, env): + import status_params + env.set_params(status_params) + Service(status_params.nimbus_win_service_name, action="start") + + def stop(self, env): + import status_params + env.set_params(status_params) + Service(status_params.nimbus_win_service_name, action="stop") + + def status(self, env): + import status_params + from resource_management.libraries.functions.windows_service_utils import check_windows_service_status + env.set_params(status_params) + check_windows_service_status(status_params.nimbus_win_service_name) + +if __name__ == "__main__": + Nimbus().execute() http://git-wip-us.apache.org/repos/asf/ambari/blob/8e5eeb4d/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/nimbus_prod.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/nimbus_prod.py b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/nimbus_prod.py new file mode 100644 index 0000000..39bda4d --- /dev/null +++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/nimbus_prod.py @@ -0,0 +1,81 @@ +#!/usr/bin/env python +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" + +import sys +from resource_management.libraries.script import Script +from storm import storm +from supervisord_service import supervisord_service, supervisord_check_status +from resource_management.libraries.functions import conf_select +from resource_management.libraries.functions import stack_select +from resource_management.libraries.functions import format +from resource_management.core.resources.system import Execute +from resource_management.libraries.functions.stack_features import check_stack_feature +from resource_management.libraries.functions import StackFeature + +class Nimbus(Script): + + def get_component_name(self): + return "storm-nimbus" + + def install(self, env): + self.install_packages(env) + self.configure(env) + + def configure(self, env): + import params + env.set_params(params) + + storm() + + def pre_upgrade_restart(self, env, upgrade_type=None): + import params + env.set_params(params) + + if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version): + conf_select.select(params.stack_name, "storm", params.version) + stack_select.select("storm-client", params.version) + stack_select.select("storm-nimbus", params.version) + + def start(self, env, upgrade_type=None): + import params + env.set_params(params) + self.configure(env) + + supervisord_service("nimbus", action="start") + + def stop(self, env, upgrade_type=None): + import params + env.set_params(params) + + supervisord_service("nimbus", action="stop") + + def status(self, env): + supervisord_check_status("nimbus") + + def get_log_folder(self): + import params + return params.log_dir + + def get_user(self): + import params + return params.storm_user + +if __name__ == "__main__": + Nimbus().execute() http://git-wip-us.apache.org/repos/asf/ambari/blob/8e5eeb4d/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/pacemaker.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/pacemaker.py b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/pacemaker.py new file mode 100644 index 0000000..2ea02e8 --- /dev/null +++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/pacemaker.py @@ -0,0 +1,138 @@ +#!/usr/bin/env python +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" + +import sys +from resource_management.libraries.functions import check_process_status +from resource_management.libraries.script import Script +from resource_management.libraries.functions import conf_select +from resource_management.libraries.functions import stack_select +from resource_management.libraries.functions import format +from resource_management.core.resources.system import Execute +from resource_management.libraries.functions.stack_features import check_stack_feature +from resource_management.libraries.functions import StackFeature +from storm import storm +from service import service +from service_check import ServiceCheck +from resource_management.libraries.functions.security_commons import build_expectations, \ + cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \ + FILE_TYPE_JAAS_CONF + +class PaceMaker(Script): + + def get_component_name(self): + return "storm-client" + + def install(self, env): + self.install_packages(env) + self.configure(env) + + def configure(self, env): + import params + env.set_params(params) + storm() + + def pre_upgrade_restart(self, env, upgrade_type=None): + import params + env.set_params(params) + + if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version): + conf_select.select(params.stack_name, "storm", params.version) + stack_select.select("storm-client", params.version) + + def start(self, env, upgrade_type=None): + import params + env.set_params(params) + self.configure(env) + + service("pacemaker", action="start") + + def stop(self, env, upgrade_type=None): + import params + env.set_params(params) + + service("pacemaker", action="stop") + + def status(self, env): + import status_params + env.set_params(status_params) + check_process_status(status_params.pid_pacemaker) + + def security_status(self, env): + import status_params + + env.set_params(status_params) + + if status_params.security_enabled: + # Expect the following files to be available in status_params.config_dir: + # storm_jaas.conf + + try: + props_value_check = None + props_empty_check = ['StormServer/keyTab', 'StormServer/principal'] + props_read_check = ['StormServer/keyTab'] + storm_env_expectations = build_expectations('storm_jaas', props_value_check, props_empty_check, + props_read_check) + + storm_expectations = {} + storm_expectations.update(storm_env_expectations) + + security_params = get_params_from_filesystem(status_params.conf_dir, + {'storm_jaas.conf': FILE_TYPE_JAAS_CONF}) + + result_issues = validate_security_config_properties(security_params, storm_expectations) + if not result_issues: # If all validations passed successfully + # Double check the dict before calling execute + if ( 'storm_jaas' not in security_params + or 'StormServer' not in security_params['storm_jaas'] + or 'keyTab' not in security_params['storm_jaas']['StormServer'] + or 'principal' not in security_params['storm_jaas']['StormServer']): + self.put_structured_out({"securityState": "ERROR"}) + self.put_structured_out({"securityIssuesFound": "Keytab file or principal are not set property."}) + return + + cached_kinit_executor(status_params.kinit_path_local, + status_params.storm_user, + security_params['storm_jaas']['StormServer']['keyTab'], + security_params['storm_jaas']['StormServer']['principal'], + status_params.hostname, + status_params.tmp_dir) + self.put_structured_out({"securityState": "SECURED_KERBEROS"}) + else: + issues = [] + for cf in result_issues: + issues.append("Configuration file %s did not pass the validation. Reason: %s" % (cf, result_issues[cf])) + self.put_structured_out({"securityIssuesFound": ". ".join(issues)}) + self.put_structured_out({"securityState": "UNSECURED"}) + except Exception as e: + self.put_structured_out({"securityState": "ERROR"}) + self.put_structured_out({"securityStateErrorInfo": str(e)}) + else: + self.put_structured_out({"securityState": "UNSECURED"}) + + def get_log_folder(self): + import params + return params.log_dir + + def get_user(self): + import params + return params.storm_user + +if __name__ == "__main__": + PaceMaker().execute() http://git-wip-us.apache.org/repos/asf/ambari/blob/8e5eeb4d/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/params.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/params.py b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/params.py new file mode 100644 index 0000000..f10a3f3 --- /dev/null +++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/params.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" +from ambari_commons import OSCheck +from resource_management.libraries.functions.default import default + +if OSCheck.is_windows_family(): + from params_windows import * +else: + from params_linux import * + +host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False) +retryAble = default("/commandParams/command_retry_enabled", False) http://git-wip-us.apache.org/repos/asf/ambari/blob/8e5eeb4d/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/params_linux.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/params_linux.py new file mode 100644 index 0000000..d715a25 --- /dev/null +++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/params_linux.py @@ -0,0 +1,351 @@ +#!/usr/bin/env python +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" +import os +import re +import ambari_simplejson as json # simplejson is much faster comparing to Python 2.6 json module and has the same functions set. + +import status_params + +from ambari_commons.constants import AMBARI_SUDO_BINARY +from resource_management.libraries.functions.constants import Direction +from resource_management.libraries.functions import format +from resource_management.libraries.functions.version import format_stack_version +from resource_management.libraries.functions.default import default +from resource_management.libraries.functions.get_bare_principal import get_bare_principal +from resource_management.libraries.script import Script +from resource_management.libraries.resources.hdfs_resource import HdfsResource +from resource_management.libraries.functions import stack_select +from resource_management.libraries.functions import conf_select +from resource_management.libraries.functions import get_kinit_path +from resource_management.libraries.functions.get_not_managed_resources import get_not_managed_resources +from resource_management.libraries.functions.stack_features import check_stack_feature +from resource_management.libraries.functions import StackFeature +from resource_management.libraries.functions.expect import expect + +# server configurations +config = Script.get_config() +tmp_dir = Script.get_tmp_dir() +stack_root = status_params.stack_root +sudo = AMBARI_SUDO_BINARY + +cluster_name = config['clusterName'] + +stack_name = status_params.stack_name +upgrade_direction = default("/commandParams/upgrade_direction", Direction.UPGRADE) +version = default("/commandParams/version", None) + +agent_stack_retry_on_unavailability = config['hostLevelParams']['agent_stack_retry_on_unavailability'] +agent_stack_retry_count = expect("/hostLevelParams/agent_stack_retry_count", int) + +storm_component_home_dir = status_params.storm_component_home_dir +conf_dir = status_params.conf_dir + +stack_version_unformatted = status_params.stack_version_unformatted +stack_version_formatted = status_params.stack_version_formatted +stack_supports_ru = stack_version_formatted and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version_formatted) +stack_supports_storm_kerberos = stack_version_formatted and check_stack_feature(StackFeature.STORM_KERBEROS, stack_version_formatted) +stack_supports_storm_ams = stack_version_formatted and check_stack_feature(StackFeature.STORM_AMS, stack_version_formatted) +stack_supports_ranger_kerberos = stack_version_formatted and check_stack_feature(StackFeature.RANGER_KERBEROS_SUPPORT, stack_version_formatted) + +# default hadoop params +rest_lib_dir = "/usr/lib/storm/contrib/storm-rest" +storm_bin_dir = "/usr/bin" +storm_lib_dir = "/usr/lib/storm/lib/" + +# hadoop parameters for 2.2+ +if stack_supports_ru: + rest_lib_dir = format("{storm_component_home_dir}/contrib/storm-rest") + storm_bin_dir = format("{storm_component_home_dir}/bin") + storm_lib_dir = format("{storm_component_home_dir}/lib") + log4j_dir = format("{storm_component_home_dir}/log4j2") + +storm_user = config['configurations']['storm-env']['storm_user'] +log_dir = config['configurations']['storm-env']['storm_log_dir'] +pid_dir = status_params.pid_dir +local_dir = config['configurations']['storm-site']['storm.local.dir'] +user_group = config['configurations']['cluster-env']['user_group'] +java64_home = config['hostLevelParams']['java_home'] +jps_binary = format("{java64_home}/bin/jps") +nimbus_port = config['configurations']['storm-site']['nimbus.thrift.port'] +storm_zookeeper_root_dir = default('/configurations/storm-site/storm.zookeeper.root', None) +storm_zookeeper_servers = config['configurations']['storm-site']['storm.zookeeper.servers'] +storm_zookeeper_port = config['configurations']['storm-site']['storm.zookeeper.port'] +storm_logs_supported = config['configurations']['storm-env']['storm_logs_supported'] + +# nimbus.seeds is supported in HDP 2.3.0.0 and higher +nimbus_seeds_supported = default('/configurations/storm-env/nimbus_seeds_supported', False) +nimbus_host = default('/configurations/storm-site/nimbus.host', None) +nimbus_seeds = default('/configurations/storm-site/nimbus.seeds', None) +default_topology_max_replication_wait_time_sec = default('/configurations/storm-site/topology.max.replication.wait.time.sec.default', -1) +nimbus_hosts = default("/clusterHostInfo/nimbus_hosts", []) +default_topology_min_replication_count = default('/configurations/storm-site/topology.min.replication.count.default', 1) + +#Calculate topology.max.replication.wait.time.sec and topology.min.replication.count +if len(nimbus_hosts) > 1: + # for HA Nimbus + actual_topology_max_replication_wait_time_sec = -1 + actual_topology_min_replication_count = len(nimbus_hosts) / 2 + 1 +else: + # for non-HA Nimbus + actual_topology_max_replication_wait_time_sec = default_topology_max_replication_wait_time_sec + actual_topology_min_replication_count = default_topology_min_replication_count + +if 'topology.max.replication.wait.time.sec.default' in config['configurations']['storm-site']: + del config['configurations']['storm-site']['topology.max.replication.wait.time.sec.default'] +if 'topology.min.replication.count.default' in config['configurations']['storm-site']: + del config['configurations']['storm-site']['topology.min.replication.count.default'] + +rest_api_port = "8745" +rest_api_admin_port = "8746" +rest_api_conf_file = format("{conf_dir}/config.yaml") +storm_env_sh_template = config['configurations']['storm-env']['content'] +jmxremote_port = config['configurations']['storm-env']['jmxremote_port'] + +if 'ganglia_server_host' in config['clusterHostInfo'] and len(config['clusterHostInfo']['ganglia_server_host'])>0: + ganglia_installed = True + ganglia_server = config['clusterHostInfo']['ganglia_server_host'][0] + ganglia_report_interval = 60 +else: + ganglia_installed = False + +security_enabled = config['configurations']['cluster-env']['security_enabled'] + +storm_ui_host = default("/clusterHostInfo/storm_ui_server_hosts", []) + +storm_user_nofile_limit = default('/configurations/storm-env/storm_user_nofile_limit', 128000) +storm_user_nproc_limit = default('/configurations/storm-env/storm_user_noproc_limit', 65536) + +if security_enabled: + _hostname_lowercase = config['hostname'].lower() + _storm_principal_name = config['configurations']['storm-env']['storm_principal_name'] + storm_jaas_principal = _storm_principal_name.replace('_HOST',_hostname_lowercase) + storm_keytab_path = config['configurations']['storm-env']['storm_keytab'] + + if stack_supports_storm_kerberos: + storm_ui_keytab_path = config['configurations']['storm-env']['storm_ui_keytab'] + _storm_ui_jaas_principal_name = config['configurations']['storm-env']['storm_ui_principal_name'] + storm_ui_jaas_principal = _storm_ui_jaas_principal_name.replace('_HOST',_hostname_lowercase) + storm_bare_jaas_principal = get_bare_principal(_storm_principal_name) + _nimbus_principal_name = config['configurations']['storm-env']['nimbus_principal_name'] + nimbus_jaas_principal = _nimbus_principal_name.replace('_HOST', _hostname_lowercase) + nimbus_bare_jaas_principal = get_bare_principal(_nimbus_principal_name) + nimbus_keytab_path = config['configurations']['storm-env']['nimbus_keytab'] + +kafka_bare_jaas_principal = None +if stack_supports_storm_kerberos: + if security_enabled: + storm_thrift_transport = config['configurations']['storm-site']['_storm.thrift.secure.transport'] + # generate KafkaClient jaas config if kafka is kerberoized + _kafka_principal_name = default("/configurations/kafka-env/kafka_principal_name", None) + kafka_bare_jaas_principal = get_bare_principal(_kafka_principal_name) + else: + storm_thrift_transport = config['configurations']['storm-site']['_storm.thrift.nonsecure.transport'] + +ams_collector_hosts = default("/clusterHostInfo/metrics_collector_hosts", []) +has_metric_collector = not len(ams_collector_hosts) == 0 +if has_metric_collector: + if 'cluster-env' in config['configurations'] and \ + 'metrics_collector_vip_host' in config['configurations']['cluster-env']: + metric_collector_host = config['configurations']['cluster-env']['metrics_collector_vip_host'] + else: + metric_collector_host = ams_collector_hosts[0] + if 'cluster-env' in config['configurations'] and \ + 'metrics_collector_vip_port' in config['configurations']['cluster-env']: + metric_collector_port = config['configurations']['cluster-env']['metrics_collector_vip_port'] + else: + metric_collector_web_address = default("/configurations/ams-site/timeline.metrics.service.webapp.address", "localhost:6188") + if metric_collector_web_address.find(':') != -1: + metric_collector_port = metric_collector_web_address.split(':')[1] + else: + metric_collector_port = '6188' + + metric_collector_report_interval = 60 + metric_collector_app_id = "nimbus" + if default("/configurations/ams-site/timeline.metrics.service.http.policy", "HTTP_ONLY") == "HTTPS_ONLY": + metric_collector_protocol = 'https' + else: + metric_collector_protocol = 'http' + metric_truststore_path= default("/configurations/ams-ssl-client/ssl.client.truststore.location", "") + metric_truststore_type= default("/configurations/ams-ssl-client/ssl.client.truststore.type", "") + metric_truststore_password= default("/configurations/ams-ssl-client/ssl.client.truststore.password", "") + pass +metrics_report_interval = default("/configurations/ams-site/timeline.metrics.sink.report.interval", 60) +metrics_collection_period = default("/configurations/ams-site/timeline.metrics.sink.collection.period", 10) +metric_collector_sink_jar = "/usr/lib/storm/lib/ambari-metrics-storm-sink*.jar" + +jar_jvm_opts = '' + +# Atlas related params +atlas_hosts = default('/clusterHostInfo/atlas_server_hosts', []) +has_atlas = len(atlas_hosts) > 0 +atlas_plugin_package = "atlas-metadata*-hive-plugin" +atlas_ubuntu_plugin_package = "atlas-metadata.*-hive-plugin" + +if has_atlas: + atlas_home_dir = os.environ['METADATA_HOME_DIR'] if 'METADATA_HOME_DIR' in os.environ else stack_root + '/current/atlas-server' + atlas_conf_dir = os.environ['METADATA_CONF'] if 'METADATA_CONF' in os.environ else '/etc/atlas/conf' + jar_jvm_opts = '-Datlas.conf=' + atlas_conf_dir + +# ranger host +stack_supports_ranger_audit_db = stack_version_formatted and check_stack_feature(StackFeature.RANGER_AUDIT_DB_SUPPORT, stack_version_formatted) +ranger_admin_hosts = default("/clusterHostInfo/ranger_admin_hosts", []) +has_ranger_admin = not len(ranger_admin_hosts) == 0 +xml_configurations_supported = config['configurations']['ranger-env']['xml_configurations_supported'] +ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0] + +#ranger storm properties +policymgr_mgr_url = config['configurations']['admin-properties']['policymgr_external_url'] +xa_audit_db_name = config['configurations']['admin-properties']['audit_db_name'] +xa_audit_db_user = config['configurations']['admin-properties']['audit_db_user'] +xa_db_host = config['configurations']['admin-properties']['db_host'] +repo_name = str(config['clusterName']) + '_storm' + +common_name_for_certificate = config['configurations']['ranger-storm-plugin-properties']['common.name.for.certificate'] + +storm_ui_port = config['configurations']['storm-site']['ui.port'] + +repo_config_username = config['configurations']['ranger-storm-plugin-properties']['REPOSITORY_CONFIG_USERNAME'] +ranger_env = config['configurations']['ranger-env'] +ranger_plugin_properties = config['configurations']['ranger-storm-plugin-properties'] +policy_user = config['configurations']['ranger-storm-plugin-properties']['policy_user'] +storm_cluster_log4j_content = config['configurations']['storm-cluster-log4j']['content'] +storm_worker_log4j_content = config['configurations']['storm-worker-log4j']['content'] + +# some commands may need to supply the JAAS location when running as storm +storm_jaas_file = format("{conf_dir}/storm_jaas.conf") + +# For curl command in ranger plugin to get db connector +jdk_location = config['hostLevelParams']['jdk_location'] +java_share_dir = '/usr/share/java' + +if has_ranger_admin: + enable_ranger_storm = (config['configurations']['ranger-storm-plugin-properties']['ranger-storm-plugin-enabled'].lower() == 'yes') + xa_audit_db_password = unicode(config['configurations']['admin-properties']['audit_db_password']) if stack_supports_ranger_audit_db else None + repo_config_password = unicode(config['configurations']['ranger-storm-plugin-properties']['REPOSITORY_CONFIG_PASSWORD']) + xa_audit_db_flavor = (config['configurations']['admin-properties']['DB_FLAVOR']).lower() + + if stack_supports_ranger_audit_db: + if xa_audit_db_flavor == 'mysql': + jdbc_jar_name = default("/hostLevelParams/custom_mysql_jdbc_name", None) + audit_jdbc_url = format('jdbc:mysql://{xa_db_host}/{xa_audit_db_name}') + jdbc_driver = "com.mysql.jdbc.Driver" + elif xa_audit_db_flavor == 'oracle': + jdbc_jar_name = default("/hostLevelParams/custom_oracle_jdbc_name", None) + colon_count = xa_db_host.count(':') + if colon_count == 2 or colon_count == 0: + audit_jdbc_url = format('jdbc:oracle:thin:@{xa_db_host}') + else: + audit_jdbc_url = format('jdbc:oracle:thin:@//{xa_db_host}') + jdbc_driver = "oracle.jdbc.OracleDriver" + elif xa_audit_db_flavor == 'postgres': + jdbc_jar_name = default("/hostLevelParams/custom_postgres_jdbc_name", None) + audit_jdbc_url = format('jdbc:postgresql://{xa_db_host}/{xa_audit_db_name}') + jdbc_driver = "org.postgresql.Driver" + elif xa_audit_db_flavor == 'mssql': + jdbc_jar_name = default("/hostLevelParams/custom_mssql_jdbc_name", None) + audit_jdbc_url = format('jdbc:sqlserver://{xa_db_host};databaseName={xa_audit_db_name}') + jdbc_driver = "com.microsoft.sqlserver.jdbc.SQLServerDriver" + elif xa_audit_db_flavor == 'sqla': + jdbc_jar_name = default("/hostLevelParams/custom_sqlanywhere_jdbc_name", None) + audit_jdbc_url = format('jdbc:sqlanywhere:database={xa_audit_db_name};host={xa_db_host}') + jdbc_driver = "sap.jdbc4.sqlanywhere.IDriver" + + downloaded_custom_connector = format("{tmp_dir}/{jdbc_jar_name}") if stack_supports_ranger_audit_db else None + driver_curl_source = format("{jdk_location}/{jdbc_jar_name}") if stack_supports_ranger_audit_db else None + driver_curl_target = format("{storm_component_home_dir}/lib/{jdbc_jar_name}") if stack_supports_ranger_audit_db else None + sql_connector_jar = '' + + storm_ranger_plugin_config = { + 'username': repo_config_username, + 'password': repo_config_password, + 'nimbus.url': 'http://' + storm_ui_host[0].lower() + ':' + str(storm_ui_port), + 'commonNameForCertificate': common_name_for_certificate + } + + storm_ranger_plugin_repo = { + 'isActive': 'true', + 'config': json.dumps(storm_ranger_plugin_config), + 'description': 'storm repo', + 'name': repo_name, + 'repositoryType': 'storm', + 'assetType': '6' + } + + if stack_supports_ranger_kerberos and security_enabled: + storm_ranger_plugin_config['policy.download.auth.users'] = storm_user + storm_ranger_plugin_config['tag.download.auth.users'] = storm_user + + storm_ranger_plugin_repo = { + 'isEnabled': 'true', + 'configs': storm_ranger_plugin_config, + 'description': 'storm repo', + 'name': repo_name, + 'type': 'storm' + } + + if stack_supports_ranger_kerberos and security_enabled and 'storm-nimbus' in status_params.component_directory.lower(): + ranger_storm_principal = nimbus_jaas_principal + ranger_storm_keytab = nimbus_keytab_path + elif stack_supports_ranger_kerberos and security_enabled and 'storm-client' in status_params.component_directory.lower(): + ranger_storm_principal = storm_ui_jaas_principal + ranger_storm_keytab = storm_ui_keytab_path + + xa_audit_db_is_enabled = False + ranger_audit_solr_urls = config['configurations']['ranger-admin-site']['ranger.audit.solr.urls'] + if xml_configurations_supported and stack_supports_ranger_audit_db: + xa_audit_db_is_enabled = config['configurations']['ranger-storm-audit']['xasecure.audit.destination.db'] + xa_audit_hdfs_is_enabled = config['configurations']['ranger-storm-audit']['xasecure.audit.destination.hdfs'] if xml_configurations_supported else None + ssl_keystore_password = unicode(config['configurations']['ranger-storm-policymgr-ssl']['xasecure.policymgr.clientssl.keystore.password']) if xml_configurations_supported else None + ssl_truststore_password = unicode(config['configurations']['ranger-storm-policymgr-ssl']['xasecure.policymgr.clientssl.truststore.password']) if xml_configurations_supported else None + credential_file = format('/etc/ranger/{repo_name}/cred.jceks') if xml_configurations_supported else None + + #For SQLA explicitly disable audit to DB for Ranger + if xa_audit_db_flavor == 'sqla': + xa_audit_db_is_enabled = False + +namenode_hosts = default("/clusterHostInfo/namenode_host", []) +has_namenode = not len(namenode_hosts) == 0 + +hdfs_user = config['configurations']['hadoop-env']['hdfs_user'] if has_namenode else None +hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab'] if has_namenode else None +hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name'] if has_namenode else None +hdfs_site = config['configurations']['hdfs-site'] if has_namenode else None +default_fs = config['configurations']['core-site']['fs.defaultFS'] if has_namenode else None +hadoop_bin_dir = stack_select.get_hadoop_dir("bin") if has_namenode else None +hadoop_conf_dir = conf_select.get_hadoop_conf_dir() if has_namenode else None +kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None)) + +import functools +#create partial functions with common arguments for every HdfsResource call +#to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code +HdfsResource = functools.partial( + HdfsResource, + user=hdfs_user, + hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore", + security_enabled = security_enabled, + keytab = hdfs_user_keytab, + kinit_path_local = kinit_path_local, + hadoop_bin_dir = hadoop_bin_dir, + hadoop_conf_dir = hadoop_conf_dir, + principal_name = hdfs_principal_name, + hdfs_site = hdfs_site, + default_fs = default_fs, + immutable_paths = get_not_managed_resources() +)
