From 84fe43f5dc89c9795ba3d2fecfd9ad881df8924a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20R=C3=BCttgers?= Date: Tue, 1 Sep 2015 11:47:00 +0200 Subject: [PATCH] Added graphite support for storage of statistical data --- README.md | 40 ++++++++++++++++++++++++++++++++++++++++ backend.py | 19 +++++++++++++++++++ lib/graphite.py | 47 +++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 106 insertions(+) create mode 100644 lib/graphite.py diff --git a/README.md b/README.md index 39e5675..ecba268 100644 --- a/README.md +++ b/README.md @@ -122,3 +122,43 @@ to your webserver. jq '.nodes = (.nodes | to_entries | map(.value)) | .version = 2' \ < nodes.json > nodes.json.new mv nodes.json.new nodes.json + + +# Graphite support + +## Comand line arguments +Running `backend.py` with `--with-graphite` will enable graphite support for storing statistical data. + + graphite integration: + --with-graphite Send statistical data to graphite backend + --graphite-host GRAPHITE_HOST + Hostname of the machine running graphite + --graphite-port GRAPHITE_PORT + Port of the carbon daemon + --graphite-prefix GRAPHITE_PREFIX + Storage prefix (default value: 'freifunk.nodes.') + --graphite-metrics GRAPHITE_METRICS + Comma separated list of metrics to store (default + value: 'clients,loadavg,uptime') + +## Graphite configuration + +### storage-schemas.conf + + [freifunk_node_stats] + pattern = ^freifunk\.nodes\. + retentions = 60s:1d,5min:7d,1h:30d,1d:4y + +### storage-aggregation.conf + + [freifunk_node_stats_loadavg] + pattern = ^freifunk\.nodes\..*\.loadavg$ + aggregationMethod = avg + + [freifunk_node_stats_clients] + pattern = ^freifunk\.nodes\..*\.clients$ + aggregationMethod = max + + [freifunk_node_stats_uptime] + pattern = ^freifunk\.nodes\..*\.uptime$ + aggregationMethod = last diff --git a/backend.py b/backend.py index 11e2deb..74c4cf4 100755 --- a/backend.py +++ b/backend.py @@ -18,6 +18,7 @@ from lib.batman import Batman from lib.rrddb import RRD from lib.nodelist import export_nodelist from lib.validate import validate_nodeinfos +from lib.graphite import Graphite NODES_VERSION = 2 GRAPH_VERSION = 1 @@ -150,6 +151,11 @@ def main(params): with open(nodelist_fn, 'w') as f: json.dump(export_nodelist(now, nodedb), f) + # optional Graphite integration + if params['graphite']: + graphite = Graphite(params['graphite_host'], params['graphite_port']) + graphite.update(params['graphite_prefix'], params['graphite_metrics'], nodedb['nodes']) + # optional rrd graphs (trigger with --rrd) if params['rrd']: rrd = RRD(params['rrd_path'], os.path.join(params['dest_dir'], 'nodes')) @@ -182,5 +188,18 @@ if __name__ == '__main__': help='enable the rendering of RRD graphs (cpu ' 'intensive)') + # Graphite integration + graphite = parser.add_argument_group('graphite integration') + graphite.add_argument('--with-graphite', dest='graphite', action='store_true', default=False, + help='Send statistical data to graphite backend') + graphite.add_argument('--graphite-host', dest='graphite_host', default="localhost", + help='Hostname of the machine running graphite') + graphite.add_argument('--graphite-port', dest='graphite_port', default="2003", type=int, + help='Port of the carbon daemon') + graphite.add_argument('--graphite-prefix', dest='graphite_prefix', default="freifunk.nodes.", + help='Storage prefix (default value: \'freifunk.nodes.\')') + graphite.add_argument('--graphite-metrics', dest='graphite_metrics', default="clients,loadavg,uptime", + help='Comma separated list of metrics to store (default value: \'clients,loadavg,uptime\')') + options = vars(parser.parse_args()) main(options) diff --git a/lib/graphite.py b/lib/graphite.py new file mode 100644 index 0000000..571e98a --- /dev/null +++ b/lib/graphite.py @@ -0,0 +1,47 @@ +import socket +import time + + +class Graphite(object): + + def __init__(self, hostname, port): + self.hostname = hostname + self.port = int(port) + + def flatten_dict(self, d): + def expand(key, value): + if isinstance(value, dict): + return [('{}.{}'.format(key, k), v) for k, v in self.flatten_dict(value).items()] + else: + return [(key, value)] + items = [item for k, v in d.items() for item in expand(k, v)] + return dict(items) + + def update(self, prefix, metrics, nodes): + timestamp = int(time.time()) + + sock = socket.socket() + sock.connect((self.hostname, self.port)) + + for node in nodes: + try: + if node['flags']['online']: + stats = self.flatten_dict(node['statistics']) + for metric in metrics.split(','): + try: + msg = '{}{}.{} {} {}\n'.format( + prefix, + node['nodeinfo']['node_id'].replace(' ', '_'), + metric.replace(' ', '_'), + stats[metric], + timestamp + ) + sock.send(msg.encode('utf-8')) + + except KeyError: + pass + + except KeyError: + pass + + sock.close()