Compare commits

..

1 Commits

Author SHA1 Message Date
Jan-Philipp Litza 88e046a48c NodeRRD: add many more DS and reduce retention duration 2015-06-19 15:42:58 +02:00
11 changed files with 86 additions and 641 deletions

3
.gitignore vendored
View File

@ -1,6 +1,3 @@
# backups
*~
# script-generated # script-generated
aliases*.json aliases*.json
nodedb/ nodedb/

View File

@ -2,20 +2,11 @@
""" """
backend.py - ffmap-backend runner backend.py - ffmap-backend runner
https://github.com/ffnord/ffmap-backend https://github.com/ffnord/ffmap-backend
Erweiterte Version von Freifunk Pinneberg
- Graphiken aus RRD-Daten nur auf Anforderung erzeugen
- Verzeichnis für die RRD-Nodedb als Kommandozeilenparameter
- Statistikerzeugung korrigiert: Initialisierung und Befüllung
zu passenden Zeitpunkten
""" """
import argparse import argparse
import configparser
import json import json
import os import os
import sys import sys
import logging, logging.handlers
from datetime import datetime from datetime import datetime
import networkx as nx import networkx as nx
@ -31,18 +22,6 @@ from lib.validate import validate_nodeinfos
NODES_VERSION = 1 NODES_VERSION = 1
GRAPH_VERSION = 1 GRAPH_VERSION = 1
cfg = {
'cfgfile': '/etc/ffmap/ffmap-test.cfg',
'logfile': '/var/log/ffmap.log',
'loglevel': 5,
'dest_dir': '/var/lib/ffmap/mapdata',
'aliases': [],
'prune': 0,
'nodedb': '/var/lib/ffmap/nodedb',
'rrd_data': False,
'rrd_graphs': False,
'redis': False
}
def main(params): def main(params):
os.makedirs(params['dest_dir'], exist_ok=True) os.makedirs(params['dest_dir'], exist_ok=True)
@ -77,11 +56,11 @@ def main(params):
'Unparseable value "{0}" in --mesh parameter.'. 'Unparseable value "{0}" in --mesh parameter.'.
format(value)) format(value))
# read nodedb state from nodes.json # read nodedb state from node.json
try: try:
with open(nodes_fn, 'r') as nodedb_handle: with open(nodes_fn, 'r') as nodedb_handle:
nodedb = json.load(nodedb_handle) nodedb = json.load(nodedb_handle)
except (IOError, ValueError): except IOError:
nodedb = {'nodes': dict()} nodedb = {'nodes': dict()}
# flush nodedb if it uses the old format # flush nodedb if it uses the old format
@ -109,8 +88,9 @@ def main(params):
nodes.import_nodeinfo(nodedb['nodes'], nodeinfo, nodes.import_nodeinfo(nodedb['nodes'], nodeinfo,
now, assume_online=False) now, assume_online=False)
# prepare statistics collection
nodes.reset_statistics(nodedb['nodes']) nodes.reset_statistics(nodedb['nodes'])
for alfred in alfred_instances:
nodes.import_statistics(nodedb['nodes'], alfred.statistics())
# acquire gwl and visdata for each batman instance # acquire gwl and visdata for each batman instance
mesh_info = [] mesh_info = []
@ -127,10 +107,6 @@ def main(params):
nodes.mark_vis_data_online(nodedb['nodes'], vd, now) nodes.mark_vis_data_online(nodedb['nodes'], vd, now)
nodes.mark_gateways(nodedb['nodes'], gwl) nodes.mark_gateways(nodedb['nodes'], gwl)
# get alfred statistics
for alfred in alfred_instances:
nodes.import_statistics(nodedb['nodes'], alfred.statistics())
# clear the nodedb from nodes that have not been online in $prune days # clear the nodedb from nodes that have not been online in $prune days
if params['prune']: if params['prune']:
nodes.prune_nodes(nodedb['nodes'], now, params['prune']) nodes.prune_nodes(nodedb['nodes'], now, params['prune'])
@ -175,35 +151,15 @@ def main(params):
# optional rrd graphs (trigger with --rrd) # optional rrd graphs (trigger with --rrd)
if params['rrd']: if params['rrd']:
if params['nodedb']:
rrd = RRD(params['nodedb'], os.path.join(params['dest_dir'], 'nodes'))
else:
script_directory = os.path.dirname(os.path.realpath(__file__)) script_directory = os.path.dirname(os.path.realpath(__file__))
rrd = RRD(os.path.join(script_directory, 'nodedb'), rrd = RRD(os.path.join(script_directory, 'nodedb'),
os.path.join(params['dest_dir'], 'nodes')) os.path.join(params['dest_dir'], 'nodes'))
rrd.update_database(nodedb['nodes']) rrd.update_database(nodedb['nodes'], batadv_graph)
if params['img']:
rrd.update_images() rrd.update_images()
def set_loglevel(nr):
"""
Umsetzen der Nummer auf einen für "logging" passenden Wert
Die Nummer kann ein Wert zwischen 0 - kein Logging und 5 - Debug sein
"""
level = (None, logging.CRITICAL, logging.ERROR, logging.WARNING,
logging.INFO, logging.DEBUG)
if nr > 5:
nr = 5
elif nr < 0:
nr = 0
return level[nr]
if __name__ == '__main__': if __name__ == '__main__':
parser = argparse.ArgumentParser()
# get options from command line
parser = argparse.ArgumentParser(
description = "Collect data for ffmap: creates json files and "
"optional rrd data and graphs")
parser.add_argument('-a', '--aliases', parser.add_argument('-a', '--aliases',
help='Read aliases from FILE', help='Read aliases from FILE',
@ -215,55 +171,15 @@ if __name__ == '__main__':
'bat0:/run/alfred0.sock.') 'bat0:/run/alfred0.sock.')
parser.add_argument('-d', '--dest-dir', action='store', parser.add_argument('-d', '--dest-dir', action='store',
help='Write output to destination directory', help='Write output to destination directory',
required=False) required=True)
parser.add_argument('-c', '--config', action='store', metavar='FILE',
help='read configuration from FILE')
parser.add_argument('-V', '--vpn', nargs='+', metavar='MAC', parser.add_argument('-V', '--vpn', nargs='+', metavar='MAC',
help='Assume MAC addresses are part of vpn') help='Assume MAC addresses are part of vpn')
parser.add_argument('-p', '--prune', metavar='DAYS', type=int, parser.add_argument('-p', '--prune', metavar='DAYS', type=int,
help='Forget nodes offline for at least DAYS') help='forget nodes offline for at least DAYS')
parser.add_argument('-r', '--with-rrd', dest='rrd', action='store_true', parser.add_argument('--with-rrd', dest='rrd', action='store_true',
default=False, default=False,
help='Enable the collection of RRD data') help='enable the rendering of RRD graphs (cpu '
parser.add_argument('-n', '--nodedb', metavar='RRD_DIR', action='store',
help='Directory for node RRD data files')
parser.add_argument('-i', '--with-img', dest='img', action='store_true',
default=False,
help='Enable the rendering of RRD graphs (cpu '
'intensive)') 'intensive)')
options = vars(parser.parse_args()) options = vars(parser.parse_args())
if options['config']:
cfg['cfgfile'] = options['config']
config = configparser.ConfigParser(cfg)
if config.read(cfg['cfgfile']):
if not options['nodedb']:
options['nodedb'] = config.get('rrd', 'nodedb')
if not options['dest_dir']:
options['dest_dir'] = config.get('global', 'dest_dir')
if not options['rrd']:
options['rrd'] = config.getboolean('rrd', 'enabled')
if not options['img']:
options['img'] = config.getboolean('rrd', 'graphs')
cfg['logfile'] = config.get('global', 'logfile')
cfg['loglevel'] = config.getint('global', 'loglevel')
# At this point global configuration is available. Time to enable logging
# Logging is handled by the operating system, so use WatchedFileHandler
handler = logging.handlers.WatchedFileHandler(cfg['logfile'])
handler.setFormatter(logging.Formatter(fmt='%(asctime)s %(levelname)s %(message)s',
datefmt='%Y-%m-%d %H:%M:%S'))
log = logging.getLogger()
log.addHandler(handler)
loglevel = set_loglevel(cfg['loglevel'])
if loglevel:
log.setLevel(loglevel)
else:
log.disabled = True
log.info("%s started" % sys.argv[0])
if os.path.isfile(cfg['cfgfile']):
log.info("using configuration from '%s'" % cfg['cfgfile'])
main(options) main(options)
log.info("%s finished" % sys.argv[0])

View File

@ -1,185 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Knotendaten manuell ändern
Das ist z.B. für ausgeschaltete Knoten interessant, die nur
temporär nicht zur Verfügung stehen. Die können ausgeblendet
werden.
Das ist besser als löschen, weil so die Statistik nicht
verschwindet
Änderungsprotokoll
==================
Version Datum Änderung(en) von
-------- ----------- ------------------------------------------------------ ----
1.0 2017-08-03 Programm in das ffmap-backend Projekt integriert tho
"""
import argparse
import configparser
import json
import os
import sys
import glob
# Einstellungen werden in folgender Reihenfolge verarbeitet
# später gesetzte Werte überschreiben frühere
# 1. im Programm hart codiert
# 2. aus der zentralen Konfigurationsdatei gelesen
# 3. als Kommandozeilenoptionen angegeben
cfg = {
'cfgfile': '/etc/ffmap/ffmap.cfg',
'logfile': '/var/log/ffmap.log',
'loglevel': 2,
'dest_dir': '/var/lib/ffmap/mapdata',
'nodedb': '/var/lib/ffmap/nodedb',
'imgpath': '/var/www/meshviewer/stats/img'
}
roles_defined = ('node', 'temp', 'mobile', 'offloader', 'service', 'test', 'gate', 'plan', 'hidden')
def main(cfg):
# Pfade zu den beteiligten Dateien
nodes_fn = os.path.join(cfg['dest_dir'], 'nodes.json')
nodelist_fn = os.path.join(cfg['dest_dir'], 'nodelist.json')
# 1. Knotendaten (NodeDB)
# 1.1 Daten laden
try:
with open(nodes_fn, 'r') as nodedb_handle:
nodedb = json.load(nodedb_handle)
except IOError:
print("Error reading nodedb file %s" % nodes_fn)
nodedb = {'nodes': dict()}
# 1.2 Knoten bearbeiten
changed = False
for n in cfg['nodeid']:
if n in nodedb['nodes']:
print("Modify %s in nodedb" % n)
if 'role' in cfg and cfg['role'] in roles_defined:
try:
oldrole = nodedb['nodes'][n]['nodeinfo']['system']['role']
except KeyError:
oldrole = '<unset>'
print(" - change role from '%s' to '%s'" % (oldrole, cfg['role']))
nodedb['nodes'][n]['nodeinfo']['system']['role'] = cfg['role']
changed = True
if 'location' in cfg:
print(" - remove location")
# del nodedb['nodes'][n]['nodeinfo']['location']
changed = True
else:
print("Node %s not found in nodedb" % n)
# 1.3 Geänderte Daten zurückschreiben
if changed:
try:
with open(nodes_fn, 'w') as nodedb_handle:
json.dump(nodedb, nodedb_handle)
except IOError:
print("Error writing nodedb file %s" % nodes_fn)
# 2. Knotenliste (NodeList)
try:
with open(nodelist_fn, 'r') as nodelist_handle:
nodelist = json.load(nodelist_handle)
except IOError:
print("Error reading nodelist file %s" % nodelist_fn)
nodelist = {'nodelist': dict()}
# 2.1 Knoten bearbeiten
changed = False
ixlist = []
for nodeid in cfg['nodeid']:
found = False
for ix, node in enumerate(nodelist['nodes']):
if node['id'] == nodeid:
found = True
break
if found:
print("Modify %s in nodelist" % nodeid)
if 'role' in cfg and cfg['role'] in roles_defined:
try:
oldrole = nodelist['nodes'][ix]['role']
except KeyError:
oldrole = '<unset>'
print(" - change role from '%s' to '%s'" % (oldrole, cfg['role']))
nodelist['nodes'][ix]['role'] = cfg['role']
if 'location' in cfg:
print(" - remove location")
try:
#del nodelist['nodes'][ix]['position']
pass
except KeyError:
pass
changed = True
else:
print ("Node %s not found in nodelist" % nodeid)
# 2.3 Geänderte Daten zurückschreiben
if changed:
try:
with open(nodelist_fn, 'w') as nodelist_handle:
json.dump(nodelist, nodelist_handle)
except IOError:
print("Error writing nodelist file %s" % nodelist_fn)
if __name__ == "__main__":
# Optionen von der Kommandozeile lesen
parser = argparse.ArgumentParser()
parser.add_argument('-c', '--config', action='store',
help='Configuration file')
parser.add_argument('-d', '--dest-dir', action='store',
help='Directory with JSON data files',
required=False)
parser.add_argument('-i', '--nodeid', metavar='ID', action='store',
nargs='+', required=True,
help='Node id to modify')
parser.add_argument('-l', '--location', action='store_true',
help='Clear location information (hides node)',
required=False)
parser.add_argument('-r', '--role', action='store',
help='Set new role',
required=False)
# TODO
# Optionen was genau gemacht werden soll
# -p Position entfernen, Knoten wird nicht mehr angezeigt
# -r <rolle> Rolle einstellen
options = vars(parser.parse_args())
# Konfigurationsdatei einlesen
if options['config']:
cfg['cfgfile'] = options['config']
config = configparser.ConfigParser(cfg)
# config.read liefert eine Liste der geparsten Dateien
# zurück. Wenn sie leer ist, war z.B. die Datei nicht
# vorhanden
if config.read(cfg['cfgfile']):
if 'global' in config:
cfg['logfile'] = config['global']['logfile']
cfg['loglevel'] = config['global']['loglevel']
cfg['dest_dir'] = config['global']['dest_dir']
else:
print('Config file %s not parsed' % cfg['cfgfile'])
# Optionen von der Kommandozeile haben höchste Priorität
cfg['nodeid'] = options['nodeid']
if options['dest_dir']:
cfg['dest_dir'] = options['dest_dir']
if options['location']:
cfg['location'] = True
if options['role']:
cfg['role'] = options['role']
# Alles initialisiert, auf geht's
main(cfg)

View File

@ -1,225 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Lösche einen Knoten manuell aus dem Backend:
- JSON
- NodeDB
- NodeList
- Graph
- RRD-Dateien
- Bilder vom Webserver
Änderungsprotokoll
==================
Version Datum Änderung(en) von
-------- ----------- ------------------------------------------------------ ----
1.0 2017-01-06 Programm in das ffmap-backend Projekt integriert tho
"""
import argparse
import configparser
import json
import os
import sys
import glob
# Einstellungen werden in folgender Reihenfolge verarbeitet
# später gesetzte Werte überschreiben frühere
# 1. im Programm hart codiert
# 2. aus der zentralen Konfigurationsdatei gelesen
# 3. als Kommandozeilenoptionen angegeben
cfg = {
'cfgfile': '/etc/ffmap/ffmap.cfg',
'logfile': '/var/log/ffmap.log',
'loglevel': 2,
'dest_dir': '/var/lib/ffmap/mapdata',
'nodedb': '/var/lib/ffmap/nodedb',
'imgpath': '/var/www/meshviewer/stats/img'
}
def main(cfg):
# Pfade zu den beteiligten Dateien
nodes_fn = os.path.join(cfg['dest_dir'], 'nodes.json')
graph_fn = os.path.join(cfg['dest_dir'], 'graph.json')
nodelist_fn = os.path.join(cfg['dest_dir'], 'nodelist.json')
# 1. Knotendaten (NodeDB) bereinigen
# 1.1 Daten laden
try:
with open(nodes_fn, 'r') as nodedb_handle:
nodedb = json.load(nodedb_handle)
except IOError:
print("Error reading nodedb file %s" % nodes_fn)
nodedb = {'nodes': dict()}
# 1.2 Knoten entfernen
changed = False
for n in cfg['nodeid']:
if n in nodedb['nodes']:
print("Remove %s from nodedb" % n)
del nodedb['nodes'][n]
changed = True
else:
print("Node %s not found in nodedb" % n)
# 1.3 Geänderte Daten zurückschreiben
if changed:
try:
with open(nodes_fn, 'w') as nodedb_handle:
json.dump(nodedb, nodedb_handle)
except IOError:
print("Error writing nodedb file %s" % nodes_fn)
# 2. Knotenliste (NodeList) bereinigen
try:
with open(nodelist_fn, 'r') as nodelist_handle:
nodelist = json.load(nodelist_handle)
except IOError:
print("Error reading nodelist file %s" % nodelist_fn)
nodelist = {'nodelist': dict()}
# 2.1 Knoten entfernen
changed = False
ixlist = []
for nodeid in cfg['nodeid']:
found = False
for ix, node in enumerate(nodelist['nodes']):
if node['id'] == nodeid:
found = True
break
if found:
print("Remove %s from nodelist" % nodeid)
del nodelist['nodes'][ix]
changed = True
else:
print ("Node %s not found in nodelist" % nodeid)
# 2.3 Geänderte Daten zurückschreiben
if changed:
try:
with open(nodelist_fn, 'w') as nodelist_handle:
json.dump(nodelist, nodelist_handle)
except IOError:
print("Error writing nodelist file %s" % nodelist_fn)
# 3. Graph (NodeGraph) bereinigen
# 3.1 Graph laden
try:
with open(graph_fn, 'r') as graph_handle:
graph = json.load(graph_handle)
except IOError:
print("Error reading graph file %s" % graph_fn)
graph = {'graph': dict()}
# 3.2 Finde Knoten und Links
# Nodes und Links gehören zusammen
changed = False
for nodeid in cfg['nodeid']:
found = False
for ixn, node in enumerate(graph["batadv"]["nodes"]):
# Es kann nodes ohne "node_id" geben
try:
if node["node_id"] == nodeid:
found = True
break
except KeyError:
pass
if found:
print("Found %s in graph nodes at index %d" % (nodeid, ixn))
del graph["batadv"]["nodes"][ixn]
# Suche Link source oder target dem gefundenen Index entsprechen
ixlist = []
for ixg, link in enumerate(graph["batadv"]["links"]):
if link["source"] == ixn:
print("Found source link at index %d" % ixg)
print(" -> %s" % graph["batadv"]["nodes"][link["target"]])
ixlist.append(ixg)
if link["target"] == ixn:
print("Found target link at index %d" % ixg)
print(" -> %s" % graph["batadv"]["nodes"][link["source"]])
ixlist.append(ixg)
for ix in ixlist:
del graph["batadv"]["nodes"][ix]
changed = True
else:
print("Node %s not found in graph nodes" % nodeid)
# 3.3 Zurückschreiben der geänderten Daten
if changed:
try:
with open(graph_fn, 'w') as graph_handle:
json.dump(graph, graph_handle)
except IOError:
print("Error writing graph file %s" % graph_fn)
# 4. Entferne RRD-Dateien
for nodeid in cfg['nodeid']:
rrdfile = os.path.join(cfg['nodedb'], nodeid+'.rrd')
if os.path.isfile(rrdfile):
print("Removing RRD database file %s" % os.path.basename(rrdfile))
else:
print("RRD database file %s not found" % os.path.basename(rrdfile))
try:
os.remove(rrdfile)
except OSError:
pass
# 5. Entferne Bilder vom Webserver
count_deleted = 0
for nodeid in cfg['nodeid']:
for imagefile in glob.glob(os.path.join(cfg['imgpath'], nodeid+'_*.png')):
print("Removing stats image %s" % os.path.basename(imagefile))
try:
os.remove(imagefile)
count_deleted += 1
except OSError:
pass
if count_deleted == 0:
print("No stats images found in %s" % cfg['imgpath'])
if __name__ == "__main__":
# Optionen von der Kommandozeile lesen
parser = argparse.ArgumentParser()
parser.add_argument('-c', '--config', action='store',
help='Configuration file')
parser.add_argument('-d', '--dest-dir', action='store',
help='Directory with JSON data files',
required=False)
parser.add_argument('-i', '--nodeid', metavar='ID', action='store',
nargs='+', required=True,
help='Node id to remove')
parser.add_argument('-n', '--nodedb', metavar='RRD_DIR', action='store',
help='Directory for node RRD data files')
options = vars(parser.parse_args())
# Konfigurationsdatei einlesen
if options['config']:
cfg['cfgfile'] = options['config']
config = configparser.ConfigParser(cfg)
# config.read liefert eine Liste der geparsten Dateien
# zurück. Wenn sie leer ist, war z.B. die Datei nicht
# vorhanden
if config.read(cfg['cfgfile']):
if 'global' in config:
cfg['logfile'] = config['global']['logfile']
cfg['loglevel'] = config['global']['loglevel']
cfg['dest_dir'] = config['global']['dest_dir']
if 'rrd' in config:
cfg['nodedb'] = config['rrd']['nodedb']
else:
print('Config file %s not parsed' % cfg['cfgfile'])
# Optionen von der Kommandozeile haben höchste Priorität
cfg['nodeid'] = options['nodeid']
if options['dest_dir']:
cfg['dest_dir'] = options['dest_dir']
if options['nodedb']:
cfg['nodedb'] = options['nodedb']
# Alles initialisiert, auf geht's
main(cfg)

View File

@ -1,74 +0,0 @@
"""
RRD for gateways
"""
import os
import subprocess
from lib.RRD import DS, RRA, RRD
class GateRRD(RRD):
ds_list = [
DS('upstate', 'GAUGE', 120, 0, 1),
DS('clients', 'GAUGE', 120, 0, float('NaN')),
DS('loadavg', 'GAUGE', 120, 0, float('NaN')),
DS('leases', 'GAUGE', 120, 0, float('NaN')),
]
rra_list = [
RRA('AVERAGE', 0.5, 1, 120), # 2 hours of 1 minute samples
RRA('AVERAGE', 0.5, 5, 1440), # 5 days of 5 minute samples
RRA('AVERAGE', 0.5, 15, 672), # 7 days of 15 minute samples
RRA('AVERAGE', 0.5, 60, 720), # 30 days of 1 hour samples
RRA('AVERAGE', 0.5, 720, 730), # 1 year of 12 hour samples
]
def __init__(self, filename, node=None):
"""
Create a new RRD for a given node.
If the RRD isn't supposed to be updated, the node can be omitted.
"""
self.node = node
super().__init__(filename)
self.ensure_sanity(self.ds_list, self.rra_list, step=60)
@property
def imagename(self):
return "{basename}.png".format(
basename=os.path.basename(self.filename).rsplit('.', 2)[0])
# TODO: fix this, python does not support function overloading
def update(self):
values = {
'upstate': int(self.node['flags']['online']),
'clients': float(self.node['statistics']['clients']),
}
if 'loadavg' in self.node['statistics']:
values['loadavg'] = float(self.node['statistics'].get('loadavg', 0))
# Gateways can send the peer count. We use the clients field to store data
if 'peers' in self.node['statistics']:
values['clients'] = self.node['statistics']['peers']
if 'leases' in self.node['statistics']:
values['leases'] = self.node['statistics']['leases']
super().update(values)
def graph(self, directory, timeframe):
"""
Create a graph in the given directory. The file will be named
basename.png if the RRD file is named basename.rrd
"""
args = ['rrdtool', 'graph', os.path.join(directory, self.imagename),
'-s', '-' + timeframe,
'-w', '800',
'-h', '400',
'-l', '0',
'-y', '1:1',
'DEF:clients=' + self.filename + ':clients:AVERAGE',
'VDEF:maxc=clients,MAXIMUM',
'CDEF:c=0,clients,ADDNAN',
'CDEF:d=clients,UN,maxc,UN,1,maxc,IF,*',
'AREA:c#0F0:up\\l',
'AREA:d#F00:down\\l',
'LINE1:c#00F:clients connected\\l']
subprocess.check_output(args)

View File

@ -1,7 +1,3 @@
"""
RRD for nodes
"""
import os import os
import subprocess import subprocess
@ -12,22 +8,35 @@ class NodeRRD(RRD):
ds_list = [ ds_list = [
DS('upstate', 'GAUGE', 120, 0, 1), DS('upstate', 'GAUGE', 120, 0, 1),
DS('clients', 'GAUGE', 120, 0, float('NaN')), DS('clients', 'GAUGE', 120, 0, float('NaN')),
DS('neighbors', 'GAUGE', 120, 0, float('NaN')),
DS('vpn_neighbors', 'GAUGE', 120, 0, float('NaN')),
DS('loadavg', 'GAUGE', 120, 0, float('NaN')), DS('loadavg', 'GAUGE', 120, 0, float('NaN')),
DS('rx_bytes', 'DERIVE', 120, 0, float('NaN')),
DS('rx_packets', 'DERIVE', 120, 0, float('NaN')),
DS('tx_bytes', 'DERIVE', 120, 0, float('NaN')),
DS('tx_packets', 'DERIVE', 120, 0, float('NaN')),
DS('mgmt_rx_bytes', 'DERIVE', 120, 0, float('NaN')),
DS('mgmt_rx_packets', 'DERIVE', 120, 0, float('NaN')),
DS('mgmt_tx_bytes', 'DERIVE', 120, 0, float('NaN')),
DS('mgmt_tx_packets', 'DERIVE', 120, 0, float('NaN')),
DS('forward_bytes', 'DERIVE', 120, 0, float('NaN')),
DS('forward_packets', 'DERIVE', 120, 0, float('NaN')),
] ]
rra_list = [ rra_list = [
RRA('AVERAGE', 0.5, 1, 120), # 2 hours of 1 minute samples # 2 hours of 1 minute samples
RRA('AVERAGE', 0.5, 5, 1440), # 5 days of 5 minute samples RRA('AVERAGE', 0.5, 1, 120),
RRA('AVERAGE', 0.5, 60, 720), # 30 days of 1 hour samples # 7 days of 15 minute samples
RRA('AVERAGE', 0.5, 720, 730), # 1 year of 12 hour samples RRA('AVERAGE', 0.5, 15, 672),
] ]
def __init__(self, filename, node=None): def __init__(self, filename, node=None, graph=None):
""" """
Create a new RRD for a given node. Create a new RRD for a given node.
If the RRD isn't supposed to be updated, the node can be omitted. If the RRD isn't supposed to be updated, the node can be omitted.
""" """
self.node = node self.node = node
self.node_graph = graph
super().__init__(filename) super().__init__(filename)
self.ensure_sanity(self.ds_list, self.rra_list, step=60) self.ensure_sanity(self.ds_list, self.rra_list, step=60)
@ -40,10 +49,25 @@ class NodeRRD(RRD):
def update(self): def update(self):
values = { values = {
'upstate': int(self.node['flags']['online']), 'upstate': int(self.node['flags']['online']),
'clients': self.node['statistics']['clients'] 'clients': float(self.node['statistics']['clients']),
'loadavg': float(self.node['statistics'].get('loadavg', 0)),
} }
if 'loadavg' in self.node['statistics']: for item in ('rx', 'tx', 'mgmt_rx', 'mgmt_tx', 'forward'):
values['loadavg'] = float(self.node['statistics']['loadavg']) try:
values.update({
('%s_bytes' % item): int(self.node['statistics'].get('traffic', {}).get(item, {}).get('bytes', 0)),
('%s_packets' % item): int(self.node['statistics'].get('traffic', {}).get(item, {}).get('packets', 0)),
})
except TypeError:
pass
try:
graph_node = next(key for key, node in self.node_graph.nodes(data=True) if node.get('node_id') == self.node['nodeinfo']['node_id'])
values.update({
'neighbors': float(len(self.node_graph[graph_node])),
'vpn_neighbors': float(len(list(filter(lambda edge: edge.get('vpn', False), self.node_graph[graph_node].values())))),
})
except StopIteration:
pass
super().update(values) super().update(values)
def graph(self, directory, timeframe): def graph(self, directory, timeframe):

View File

@ -20,11 +20,7 @@ class Alfred(object):
if self.unix_sock: if self.unix_sock:
cmd.extend(['-s', self.unix_sock]) cmd.extend(['-s', self.unix_sock])
# There should not be any warnings which would be sent by cron output = subprocess.check_output(cmd)
# every minute. Therefore suppress error output of called program
FNULL = open(os.devnull, 'w')
output = subprocess.check_output(cmd, stderr=FNULL)
FNULL.close()
return json.loads(output.decode("utf-8")).values() return json.loads(output.decode("utf-8")).values()
def nodeinfo(self): def nodeinfo(self):

View File

@ -3,6 +3,7 @@ import json
import os import os
import re import re
class Batman(object): class Batman(object):
""" """
Bindings for B.A.T.M.A.N. Advanced Bindings for B.A.T.M.A.N. Advanced
@ -21,7 +22,7 @@ class Batman(object):
self.environ = env self.environ = env
# compile regular expressions only once on startup # compile regular expressions only once on startup
self.mac_addr_pattern = re.compile(r'(([a-f0-9]{2}:){5}[a-f0-9]{2})') self.mac_addr_pattern = re.compile(r'(([a-z0-9]{2}:){5}[a-z0-9]{2})')
def vis_data(self): def vis_data(self):
return self.vis_data_batadv_vis() return self.vis_data_batadv_vis()
@ -51,10 +52,10 @@ class Batman(object):
def gateway_list(self): def gateway_list(self):
""" """
Parse "batctl meshif <mesh_interface> gwl -n" Parse "batctl -m <mesh_interface> gwl -n"
into an array of dictionaries. into an array of dictionaries.
""" """
cmd = ['batctl', 'meshif', self.mesh_interface, 'gwl', '-n'] cmd = ['batctl', '-m', self.mesh_interface, 'gwl', '-n']
if os.geteuid() > 0: if os.geteuid() > 0:
cmd.insert(0, 'sudo') cmd.insert(0, 'sudo')
output = subprocess.check_output(cmd, env=self.environ) output = subprocess.check_output(cmd, env=self.environ)
@ -80,10 +81,10 @@ class Batman(object):
def gateway_mode(self): def gateway_mode(self):
""" """
Parse "batctl meshif <mesh_interface> gw" Parse "batctl -m <mesh_interface> gw"
return: tuple mode, bandwidth, if mode != server then bandwidth is None return: tuple mode, bandwidth, if mode != server then bandwidth is None
""" """
cmd = ['batctl', 'meshif', self.mesh_interface, 'gw'] cmd = ['batctl', '-m', self.mesh_interface, 'gw']
if os.geteuid() > 0: if os.geteuid() > 0:
cmd.insert(0, 'sudo') cmd.insert(0, 'sudo')
output = subprocess.check_output(cmd, env=self.environ) output = subprocess.check_output(cmd, env=self.environ)

View File

@ -13,20 +13,12 @@ def export_nodelist(now, nodedb):
node_out["status"] = dict() node_out["status"] = dict()
node_out["status"]["online"] = node["flags"]["online"] node_out["status"]["online"] = node["flags"]["online"]
if "firstseen" in node:
node_out["status"]["firstcontact"] = node["firstseen"]
if "lastseen" in node: if "lastseen" in node:
node_out["status"]["lastcontact"] = node["lastseen"] node_out["status"]["lastcontact"] = node["lastseen"]
if "clients" in node["statistics"]: if "clients" in node["statistics"]:
node_out["status"]["clients"] = node["statistics"]["clients"] node_out["status"]["clients"] = node["statistics"]["clients"]
if "role" in node["nodeinfo"]["system"]:
node_out["role"] = node["nodeinfo"]["system"]["role"]
else:
node_out["role"] = "node"
nodelist.append(node_out) nodelist.append(node_out)
return {"version": "1.0.1", "nodes": nodelist, "updated_at": now.isoformat()} return {"version": "1.0.1", "nodes": nodelist, "updated_at": now.isoformat()}

View File

@ -13,9 +13,19 @@ def build_mac_table(nodes):
pass pass
try: try:
for upper_if in node['nodeinfo']['network']['mesh'].values(): for mac in node['nodeinfo']['network']['mesh']['bat0']['interfaces']['wireless']:
for lower_if in upper_if['interfaces'].values(): macs[mac] = node_id
for mac in lower_if: except KeyError:
pass
try:
for mac in node['nodeinfo']['network']['mesh']['bat0']['interfaces']['tunnel']:
macs[mac] = node_id
except KeyError:
pass
try:
for mac in node['nodeinfo']['network']['mesh']['bat0']['interfaces']['other']:
macs[mac] = node_id macs[mac] = node_id
except KeyError: except KeyError:
pass pass

View File

@ -4,7 +4,7 @@ import os
from lib.GlobalRRD import GlobalRRD from lib.GlobalRRD import GlobalRRD
from lib.NodeRRD import NodeRRD from lib.NodeRRD import NodeRRD
from lib.GateRRD import GateRRD
class RRD(object): class RRD(object):
def __init__(self, def __init__(self,
@ -22,30 +22,23 @@ class RRD(object):
self.currentTimeInt = (int(time.time()) / 60) * 60 self.currentTimeInt = (int(time.time()) / 60) * 60
self.currentTime = str(self.currentTimeInt) self.currentTime = str(self.currentTimeInt)
def update_database(self, nodes):
online_nodes = dict(filter(
lambda d: d[1]['flags']['online'], nodes.items()))
client_count = sum(map(
lambda d: d['statistics']['clients'], online_nodes.values()))
# Refresh global database
self.globalDb.update(len(online_nodes), client_count)
# Refresh databases for all single nodes
for node_id, node in online_nodes.items():
if node['flags']['gateway']:
rrd = GateRRD(os.path.join(self.dbPath, node_id + '.rrd'), node)
else:
rrd = NodeRRD(os.path.join(self.dbPath, node_id + '.rrd'), node)
rrd.update()
def update_images(self):
# Create image path if it does not exist
try: try:
os.stat(self.imagePath) os.stat(self.imagePath)
except OSError: except OSError:
os.mkdir(self.imagePath) os.mkdir(self.imagePath)
def update_database(self, nodes, graph):
online_nodes = dict(filter(
lambda d: d[1]['flags']['online'], nodes.items()))
client_count = sum(map(
lambda d: d['statistics']['clients'], online_nodes.values()))
self.globalDb.update(len(online_nodes), client_count)
for node_id, node in online_nodes.items():
rrd = NodeRRD(os.path.join(self.dbPath, node_id + '.rrd'), node, graph)
rrd.update()
def update_images(self):
self.globalDb.graph(os.path.join(self.imagePath, "globalGraph.png"), self.globalDb.graph(os.path.join(self.imagePath, "globalGraph.png"),
self.displayTimeGlobal) self.displayTimeGlobal)