Filippo Giunchedi has uploaded a new change for review. ( 
https://gerrit.wikimedia.org/r/398505 )

Change subject: First version
......................................................................

First version

Still missing backend servers aggregated metrics.

Bug: T181995
Change-Id: Ib8455100f0d2c435384fc9d88bc5c9ba2158e3f7
---
A prometheus-nutcracker-exporter
1 file changed, 138 insertions(+), 0 deletions(-)


  git pull 
ssh://gerrit.wikimedia.org:29418/operations/debs/prometheus-nutcracker-exporter 
refs/changes/05/398505/1

diff --git a/prometheus-nutcracker-exporter b/prometheus-nutcracker-exporter
new file mode 100755
index 0000000..90ccb28
--- /dev/null
+++ b/prometheus-nutcracker-exporter
@@ -0,0 +1,138 @@
+#!/usr/bin/python
+# Copyright 2017 Moritz Muehlenhoff
+#                Filippo Giunchedi
+#                Wikimedia Foundation
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import json
+import logging
+import re
+import sys
+import time
+import urllib2
+
+from prometheus_client import start_http_server, Summary
+from prometheus_client.core import (CounterMetricFamily, GaugeMetricFamily,
+                                    REGISTRY)
+
+log = logging.getLogger(__name__)
+
+
+class PrometheusNutcrackerCollector(object):
+    scrape_duration = Summary(
+            'nutcracker_scrape_duration_seconds', 'Nutcracker exporter scrape 
duration')
+
+    stats_url = 'http://localhost:22222'
+
+    def _load_stats(self, url):
+        fixed_json = re.sub(r'"("[^"]+")"(?=:)', r'\1', url.read())
+        return json.loads(fixed_json)
+
+    def _pool_metrics(self, stats):
+        metrics = {
+            'client_eof': CounterMetricFamily('nutcracker_pool_client_eof', 
'', labels=['pool']),
+            'client_err': CounterMetricFamily('nutcracker_pool_client_err', 
'', labels=['pool']),
+            'client_connections': 
CounterMetricFamily('nutcracker_pool_client_connections', '', labels=['pool']),
+            'server_ejects': 
CounterMetricFamily('nutcracker_pool_server_ejects', '', labels=['pool']),
+            'forward_error': 
CounterMetricFamily('nutcracker_pool_forward_error', '', labels=['pool']),
+            'fragments': CounterMetricFamily('nutcracker_pool_fragments', '', 
labels=['pool']),
+        }
+
+        for pool_name, pool_stats in stats.iteritems():
+            if not isinstance(pool_stats, dict):
+                continue
+            for stat, value in pool_stats.iteritems():
+                if isinstance(value, dict):
+                    # XXX aggregate server stats
+                    continue
+                if stat not in metrics:
+                    continue
+                metrics[stat].add_metric([pool_name], value)
+
+        for metric in metrics.values():
+            yield metric
+
+
+    @scrape_duration.time()
+    def collect(self):
+        stats = ''
+
+        up = GaugeMetricFamily('nutcracker_up', 'Nutcracker is running')
+        try:
+            url = urllib2.urlopen(self.stats_url)
+            if url.code == 200:
+                stats = self._load_stats(url)
+                up.add_metric([], 1)
+            yield up
+        except urllib2.URLError:
+            log.error('Could not connect to Nutcracker stats URL')
+            up.add_metric([], 0)
+            yield up
+            return
+
+        metrics = {
+            'version': CounterMetricFamily('nutcracker_info', '', 
labels=['version']),
+            'timestamp': CounterMetricFamily('nutcracker_start_time_seconds', 
''),
+            'total_connections': 
CounterMetricFamily('nutcracker_total_connections', ''),
+            'curr_connections': 
CounterMetricFamily('nutcracker_curr_connections', ''),
+        }
+
+        for name, family in metrics.iteritems():
+            if name not in stats:
+                continue
+
+            if name == 'version':
+                family.add_metric([stats.get(name)], 1)
+            else:
+                try:
+                    family.add_metric([], float(stats.get(name)))
+                except ValueError:
+                    family.add_metric([], float('nan'))
+
+        for metric in metrics.values():
+            yield metric
+
+        for metric in self._pool_metrics(stats):
+            yield metric
+
+
+def main():
+    parser = argparse.ArgumentParser()
+    parser.add_argument('-l', '--listen', metavar='ADDRESS',
+                        help='Listen on this address', default=':9198')
+    parser.add_argument('-d', '--debug', action='store_true',
+                        help='Enable debug logging')
+    args = parser.parse_args()
+
+    if args.debug:
+        logging.basicConfig(level=logging.DEBUG)
+    else:
+        logging.basicConfig(level=logging.WARNING)
+
+    address, port = args.listen.split(':', 1)
+
+    log.info('Starting nutcracker_exporter on %s:%s', address, port)
+
+    REGISTRY.register(PrometheusNutcrackerCollector())
+    start_http_server(int(port), addr=address)
+
+    try:
+        while True:
+            time.sleep(1)
+    except KeyboardInterrupt:
+        return 1
+
+
+if __name__ == "__main__":
+    sys.exit(main())

-- 
To view, visit https://gerrit.wikimedia.org/r/398505
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: Ib8455100f0d2c435384fc9d88bc5c9ba2158e3f7
Gerrit-PatchSet: 1
Gerrit-Project: operations/debs/prometheus-nutcracker-exporter
Gerrit-Branch: master
Gerrit-Owner: Filippo Giunchedi <[email protected]>

_______________________________________________
MediaWiki-commits mailing list
[email protected]
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits

Reply via email to