alfred: restructure code, add nodeinfo, statistics, vis

influxdb
Nils Schneider 2014-09-23 11:57:45 +02:00
parent 9257aa01a6
commit 41ee81d92c
14 changed files with 358 additions and 471 deletions

3
.gitignore vendored
View File

@ -1,3 +1,4 @@
*.pyc
aliases.json
aliases*.json
nodedb/
pycache/

View File

@ -1,6 +1,5 @@
import os
import subprocess
from node import Node
from RRD import RRD, DS, RRA
class NodeRRD(RRD):
@ -30,7 +29,7 @@ class NodeRRD(RRD):
return os.path.basename(self.filename).rsplit('.', 2)[0] + ".png"
def update(self):
super().update({'upstate': 1, 'clients': self.node.clientcount})
super().update({'upstate': int(self.node['flags']['online']), 'clients': self.node['statistics']['clients']})
def graph(self, directory, timeframe):
"""

29
alfred.py Executable file → Normal file
View File

@ -2,15 +2,22 @@
import subprocess
import json
class alfred:
def __init__(self,request_data_type = 158):
self.request_data_type = request_data_type
def _fetch(data_type):
output = subprocess.check_output(["alfred-json", "-z", "-f", "json", "-r", str(data_type)])
return json.loads(output.decode("utf-8")).values()
def aliases(self):
output = subprocess.check_output(["alfred-json","-r",str(self.request_data_type),"-f","json","-z"])
alfred_data = json.loads(output.decode("utf-8"))
def nodeinfo():
return _fetch(158)
def statistics():
return _fetch(159)
def vis():
return _fetch(160)
def aliases():
alias = {}
for mac,node in alfred_data.items():
for node in nodeinfo():
node_alias = {}
if 'location' in node:
try:
@ -33,10 +40,6 @@ class alfred:
elif 'name' in node:
node_alias['name'] = node['name']
if len(node_alias):
alias[mac] = node_alias
return alias
alias[node['network']['mac']] = node_alias
if __name__ == "__main__":
ad = alfred()
al = ad.aliases()
print(al)
return alias

View File

@ -1,9 +1,24 @@
{
"b0:48:7a:e7:d3:64" : {
"name" : "Meute-AP"
[
{
"node_id": "krtek",
"hostname": "krtek",
"location": {
"longitude": 10.74,
"latitude": 53.86
},
"8e:3d:c2:10:10:28" : {
"name" : "holstentor",
"vpn" : true
"network": {
"mesh_interfaces": [
"00:25:86:e6:f1:bf"
]
}
}
},
{
"node_id": "gw1",
"hostname": "burgtor",
"network": {
"mesh_interfaces": [
"52:54:00:f3:62:d9"
]
}
}
]

101
backend.py Executable file
View File

@ -0,0 +1,101 @@
#!/usr/bin/env python3
import argparse
import json
import os
import sys
import networkx as nx
from datetime import datetime
from networkx.readwrite import json_graph
import alfred
import nodes
import graph
from batman import batman
from rrddb import rrd
parser = argparse.ArgumentParser()
parser.add_argument('-a', '--aliases',
help='read aliases from FILE',
default=[],
action='append',
metavar='FILE')
parser.add_argument('-m', '--mesh', action='append',
help='batman mesh interface')
parser.add_argument('-d', '--destination-directory', action='store',
help='destination directory for generated files',required=True)
parser.add_argument('--vpn', action='append', metavar='MAC',
help='assume MAC to be part of the VPN')
parser.add_argument('--prune', metavar='DAYS',
help='forget nodes offline for at least DAYS')
args = parser.parse_args()
options = vars(args)
if not options['mesh']:
options['mesh'] = ['bat0']
nodes_fn = os.path.join(options['destination_directory'], 'nodes.json')
graph_fn = os.path.join(options['destination_directory'], 'graph.json')
now = datetime.utcnow().replace(microsecond=0)
try:
nodedb = json.load(open(nodes_fn))
# ignore if old format
if 'links' in nodedb:
raise
except:
nodedb = {'nodes': dict()}
nodedb['timestamp'] = now.isoformat()
for node_id, node in nodedb['nodes'].items():
node['flags']['online'] = False
nodes.import_nodeinfo(nodedb['nodes'], alfred.nodeinfo(), now, assume_online=True)
for aliases in options['aliases']:
with open(aliases, 'r') as f:
nodes.import_nodeinfo(nodedb['nodes'], json.load(f), now, assume_online=False)
nodes.reset_statistics(nodedb['nodes'])
nodes.import_statistics(nodedb['nodes'], alfred.statistics())
bm = list(map(lambda d: (d.vis_data(True), d.gateway_list()), map(batman, options['mesh'])))
for vis_data, gateway_list in bm:
nodes.import_mesh_ifs_vis_data(nodedb['nodes'], vis_data)
nodes.import_vis_clientcount(nodedb['nodes'], vis_data)
nodes.mark_vis_data_online(nodedb['nodes'], vis_data, now)
nodes.mark_gateways(nodedb['nodes'], gateway_list)
if options['prune']:
nodes.prune_nodes(nodedb['nodes'], now, int(options['prune']))
batadv_graph = nx.DiGraph()
for vis_data, gateway_list in bm:
graph.import_vis_data(batadv_graph, nodedb['nodes'], vis_data)
if options['vpn']:
graph.mark_vpn(batadv_graph, frozenset(options['vpn']))
batadv_graph = graph.merge_nodes(batadv_graph)
batadv_graph = graph.to_undirected(batadv_graph)
with open(nodes_fn, 'w') as f:
json.dump(nodedb, f)
with open(graph_fn, 'w') as f:
json.dump({'batadv': json_graph.node_link_data(batadv_graph)}, f)
scriptdir = os.path.dirname(os.path.realpath(__file__))
rrd = rrd(scriptdir + '/nodedb/', options['destination_directory'] + '/nodes')
rrd.update_database(nodedb['nodes'])
rrd.update_images()

View File

@ -1,82 +0,0 @@
#!/usr/bin/env python3
import json
import fileinput
import argparse
import os
import time
from batman import batman
from alfred import alfred
from rrddb import rrd
from nodedb import NodeDB
from d3mapbuilder import D3MapBuilder
# Force encoding to UTF-8
import locale # Ensures that subsequent open()s
locale.getpreferredencoding = lambda _=None: 'UTF-8' # are UTF-8 encoded.
import sys
#sys.stdin = open('/dev/stdin', 'r')
#sys.stdout = open('/dev/stdout', 'w')
#sys.stderr = open('/dev/stderr', 'w')
parser = argparse.ArgumentParser()
parser.add_argument('-a', '--aliases',
help='read aliases from FILE',
action='append',
metavar='FILE')
parser.add_argument('-m', '--mesh', action='append',
default=["bat0"],
help='batman mesh interface')
parser.add_argument('-A', '--alfred', action='store_true',
help='retrieve aliases from alfred')
parser.add_argument('-d', '--destination-directory', action='store',
help='destination directory for generated files',required=True)
args = parser.parse_args()
options = vars(args)
db = NodeDB(int(time.time()))
for mesh_interface in options['mesh']:
bm = batman(mesh_interface)
db.parse_vis_data(bm.vis_data(options['alfred']))
for gw in bm.gateway_list():
db.mark_gateway(gw)
if options['aliases']:
for aliases in options['aliases']:
db.import_aliases(json.load(open(aliases)))
if options['alfred']:
af = alfred()
db.import_aliases(af.aliases())
db.load_state("state.json")
# remove nodes that have been offline for more than 30 days
db.prune_offline(time.time() - 30*86400)
db.dump_state("state.json")
scriptdir = os.path.dirname(os.path.realpath(__file__))
m = D3MapBuilder(db)
#Write nodes json
nodes_json = open(options['destination_directory'] + '/nodes.json.new','w')
nodes_json.write(m.build())
nodes_json.close()
#Move to destination
os.rename(options['destination_directory'] + '/nodes.json.new',options['destination_directory'] + '/nodes.json')
rrd = rrd(scriptdir + "/nodedb/", options['destination_directory'] + "/nodes")
rrd.update_database(db)
rrd.update_images()

0
batman.py Executable file → Normal file
View File

View File

@ -1,35 +0,0 @@
import json
import datetime
class D3MapBuilder:
def __init__(self, db):
self._db = db
def build(self):
output = dict()
now = datetime.datetime.utcnow().replace(microsecond=0)
nodes = self._db.get_nodes()
output['nodes'] = [{'name': x.name, 'id': x.id,
'geo': [float(x) for x in x.gps.split(" ")] if x.gps else None,
'firmware': x.firmware,
'flags': x.flags,
'clientcount': x.clientcount
} for x in nodes]
links = self._db.get_links()
output['links'] = [{'source': x.source.id, 'target': x.target.id,
'quality': x.quality,
'type': x.type,
'id': x.id
} for x in links]
output['meta'] = {
'timestamp': now.isoformat()
}
return json.dumps(output)

66
graph.py Normal file
View File

@ -0,0 +1,66 @@
import networkx as nx
from copy import deepcopy
from functools import reduce
from itertools import chain
from nodes import build_mac_table
def import_vis_data(graph, nodes, vis_data):
macs = build_mac_table(nodes)
nodes_a = map(lambda d: 2*[d['primary']], filter(lambda d: 'primary' in d, vis_data))
nodes_b = map(lambda d: [d['secondary'], d['of']], filter(lambda d: 'secondary' in d, vis_data))
graph.add_nodes_from(map(lambda a, b: (a, dict(primary=b, node_id=macs.get(b))), *zip(*chain(nodes_a, nodes_b))))
edges = filter(lambda d: 'neighbor' in d, vis_data)
graph.add_edges_from(map(lambda d: (d['router'], d['neighbor'], dict(tq=float(d['label']))), edges))
def mark_vpn(graph, vpn_macs):
components = map(frozenset, nx.weakly_connected_components(graph))
components = filter(vpn_macs.intersection, components)
nodes = reduce(lambda a, b: a | b, components, set())
for node in nodes:
for k, v in graph[node].items():
v['vpn'] = True
def to_multigraph(graph):
def f(a):
node = graph.node[a]
return node['primary'] if node else a
G = nx.MultiDiGraph()
map_node = lambda node, data: (data['primary'], dict(node_id=data['node_id'])) if data else (node, dict())
G.add_nodes_from(map(map_node, *zip(*graph.nodes_iter(data=True))))
G.add_edges_from(map(lambda a, b, data: (f(a), f(b), data), *zip(*graph.edges_iter(data=True))))
return G
def merge_nodes(graph):
def merge_edges(data):
tq = min(map(lambda d: d['tq'], data))
vpn = all(map(lambda d: d.get('vpn', False), data))
return dict(tq=tq, vpn=vpn)
G = to_multigraph(graph)
H = nx.DiGraph()
H.add_nodes_from(G.nodes_iter(data=True))
edges = chain.from_iterable([[(e, d, merge_edges(G[e][d].values())) for d in G[e]] for e in G])
H.add_edges_from(edges)
return H
def to_undirected(graph):
G = nx.MultiGraph()
G.add_nodes_from(graph.nodes_iter(data=True))
G.add_edges_from(graph.edges_iter(data=True))
def merge_edges(data):
tq = max(map(lambda d: d['tq'], data))
vpn = all(map(lambda d: d.get('vpn', False), data))
return dict(tq=tq, vpn=vpn, bidirect=len(data) == 2)
H = nx.Graph()
H.add_nodes_from(G.nodes_iter(data=True))
edges = chain.from_iterable([[(e, d, merge_edges(G[e][d].values())) for d in G[e]] for e in G])
H.add_edges_from(edges)
return H

15
link.py
View File

@ -1,15 +0,0 @@
class Link():
def __init__(self):
self.id = None
self.source = None
self.target = None
self.quality = None
self.type = None
class LinkConnector():
def __init__(self):
self.id = None
self.interface = None
def __repr__(self):
return "LinkConnector(%d, %s)" % (self.id, self.interface)

32
node.py
View File

@ -1,32 +0,0 @@
class Node():
def __init__(self):
self.name = ""
self.id = ""
self.macs = set()
self.interfaces = dict()
self.flags = dict({
"online": False,
"gateway": False,
})
self.gps = None
self.firmware = None
self.clientcount = 0
self.lastseen = 0
self.firstseen = 0
def add_mac(self, mac):
mac = mac.lower()
if len(self.macs) == 0:
self.id = mac
self.macs.add(mac)
self.interfaces[mac] = Interface()
def __repr__(self):
return '<Node %s>' % self.macs.__repr__()
class Interface():
def __init__(self):
self.vpn = False

251
nodedb.py
View File

@ -1,251 +0,0 @@
import json
from functools import reduce
from collections import defaultdict
from node import Node, Interface
from link import Link, LinkConnector
class NodeDB:
def __init__(self, time=0):
self.time = time
self._nodes = []
self._links = []
# fetch list of links
def get_links(self):
self.update_vpn_links()
return self.reduce_links()
# fetch list of nodes
def get_nodes(self):
return self._nodes
# remove all offlines nodes with lastseen < timestamp
def prune_offline(self, timestamp):
self._nodes = list(filter(lambda x: x.lastseen >= timestamp, self._nodes))
# write persistent state to file
def dump_state(self, filename):
obj = []
for node in self._nodes:
obj.append({ 'id': node.id
, 'name': node.name
, 'lastseen': node.lastseen
, 'firstseen': node.firstseen
, 'geo': node.gps
})
with open(filename, "w") as f:
json.dump(obj, f)
# load persistent state from file
def load_state(self, filename):
try:
with open(filename, "r") as f:
obj = json.load(f)
for n in obj:
try:
node = self.maybe_node_by_id(n['id'])
except KeyError:
node = Node()
node.id = n['id']
node.name = n['name']
node.lastseen = n['lastseen']
node.gps = n['geo']
self._nodes.append(node)
if 'firstseen' in n:
node.firstseen = n['firstseen']
except:
pass
def maybe_node_by_mac(self, macs):
for node in self._nodes:
for mac in macs:
if mac.lower() in node.macs:
return node
raise KeyError
def maybe_node_by_id(self, mac):
for node in self._nodes:
if mac.lower() == node.id:
return node
raise KeyError
def parse_vis_data(self,vis_data):
for x in vis_data:
if 'of' in x:
try:
node = self.maybe_node_by_mac((x['of'], x['secondary']))
except KeyError:
node = Node()
node.lastseen = self.time
node.firstseen = self.time
node.flags['online'] = True
self._nodes.append(node)
node.add_mac(x['of'])
node.add_mac(x['secondary'])
for x in vis_data:
if 'router' in x:
# TTs will be processed later
if x['label'] == "TT":
continue
try:
node = self.maybe_node_by_mac((x['router'], ))
except KeyError:
node = Node()
node.lastseen = self.time
node.firstseen = self.time
node.flags['online'] = True
node.add_mac(x['router'])
self._nodes.append(node)
try:
if 'neighbor' in x:
try:
node = self.maybe_node_by_mac((x['neighbor'], ))
except KeyError:
continue
if 'gateway' in x:
x['neighbor'] = x['gateway']
node = self.maybe_node_by_mac((x['neighbor'], ))
except KeyError:
node = Node()
node.lastseen = self.time
node.firstseen = self.time
node.flags['online'] = True
node.add_mac(x['neighbor'])
self._nodes.append(node)
for x in vis_data:
if 'router' in x:
# TTs will be processed later
if x['label'] == "TT":
continue
try:
if 'gateway' in x:
x['neighbor'] = x['gateway']
router = self.maybe_node_by_mac((x['router'], ))
neighbor = self.maybe_node_by_mac((x['neighbor'], ))
except KeyError:
continue
# filter TT links merged in previous step
if router == neighbor:
continue
link = Link()
link.source = LinkConnector()
link.source.interface = x['router']
link.source.id = self._nodes.index(router)
link.target = LinkConnector()
link.target.interface = x['neighbor']
link.target.id = self._nodes.index(neighbor)
link.quality = x['label']
link.id = "-".join(sorted((link.source.interface, link.target.interface)))
self._links.append(link)
for x in vis_data:
if 'primary' in x:
try:
node = self.maybe_node_by_mac((x['primary'], ))
except KeyError:
continue
node.id = x['primary']
for x in vis_data:
if 'router' in x and x['label'] == 'TT':
try:
node = self.maybe_node_by_mac((x['router'], ))
node.add_mac(x['gateway'])
node.clientcount += 1
except KeyError:
pass
# don't count node as its own client
for node in self._nodes:
if node.clientcount > 0:
node.clientcount -= 1
def reduce_links(self):
tmp_links = defaultdict(list)
for link in self._links:
tmp_links[link.id].append(link)
links = []
def reduce_link(a, b):
a.id = b.id
a.source = b.source
a.target = b.target
a.type = b.type
a.quality = ", ".join([x for x in (a.quality, b.quality) if x])
return a
for k, v in tmp_links.items():
new_link = reduce(reduce_link, v, Link())
links.append(new_link)
return links
def import_aliases(self, aliases):
for mac, alias in aliases.items():
try:
node = self.maybe_node_by_mac([mac])
except KeyError:
# create an offline node
node = Node()
node.add_mac(mac)
self._nodes.append(node)
if 'name' in alias:
node.name = alias['name']
if 'vpn' in alias and alias['vpn'] and mac and node.interfaces and mac in node.interfaces:
node.interfaces[mac].vpn = True
if 'gps' in alias:
node.gps = alias['gps']
if 'firmware' in alias:
node.firmware = alias['firmware']
if 'id' in alias:
node.id = alias['id']
def mark_gateway(self, gateway):
try:
node = self.maybe_node_by_mac((gateway, ))
node.flags['gateway'] = True
except KeyError:
print("WARNING: did not find gateway ", gateway, " in node list")
def update_vpn_links(self):
changes = 1
while changes > 0:
changes = 0
for link in self._links:
source_interface = self._nodes[link.source.id].interfaces[link.source.interface]
target_interface = self._nodes[link.target.id].interfaces[link.target.interface]
if source_interface.vpn or target_interface.vpn:
source_interface.vpn = True
target_interface.vpn = True
if link.type != "vpn":
changes += 1
link.type = "vpn"

124
nodes.py Normal file
View File

@ -0,0 +1,124 @@
from collections import Counter, defaultdict
from datetime import datetime
from functools import reduce
def build_mac_table(nodes):
macs = dict()
for node_id, node in nodes.items():
try:
for mac in node['nodeinfo']['network']['mesh_interfaces']:
macs[mac] = node_id
except KeyError:
pass
return macs
def prune_nodes(nodes, now, days):
prune = []
for node_id, node in nodes.items():
if not 'lastseen' in node:
prune.append(node_id)
continue
lastseen = datetime.strptime(node['lastseen'], '%Y-%m-%dT%H:%M:%S')
delta = (now - lastseen).seconds
if delta >= days * 86400:
prune.append(node_id)
for node_id in prune:
del nodes[node_id]
def mark_online(node, now):
node.setdefault('firstseen', now.isoformat())
node['flags']['online'] = True
def import_nodeinfo(nodes, nodeinfos, now, assume_online=False):
for nodeinfo in filter(lambda d: 'node_id' in d, nodeinfos):
node = nodes.setdefault(nodeinfo['node_id'], {'flags': dict()})
node['nodeinfo'] = nodeinfo
node['flags']['online'] = False
node['flags']['gateway'] = False
if assume_online:
node['lastseen'] = now.isoformat()
mark_online(node, now)
def reset_statistics(nodes):
for node in nodes.values():
node['statistics'] = { 'clients': 0 }
def import_statistics(nodes, statistics):
def add(node, statistics, target, source, f=lambda d: d):
try:
node['statistics'][target] = f(reduce(dict.__getitem__, source, statistics))
except (KeyError,TypeError):
pass
macs = build_mac_table(nodes)
statistics = filter(lambda d: 'node_id' in d, statistics)
statistics = filter(lambda d: d['node_id'] in nodes, statistics)
for node, statistics in map(lambda d: (nodes[d['node_id']], d), statistics):
add(node, statistics, 'clients', ['clients', 'total'])
add(node, statistics, 'gateway', ['gateway'], lambda d: macs.get(d, d))
add(node, statistics, 'uptime', ['uptime'])
add(node, statistics, 'loadavg', ['loadavg'])
add(node, statistics, 'memory_usage', ['memory'], lambda d: 1 - d['free'] / d['total'])
add(node, statistics, 'rootfs_usage', ['rootfs_usage'])
def import_mesh_ifs_vis_data(nodes, vis_data):
macs = build_mac_table(nodes)
mesh_ifs = defaultdict(lambda: set())
for line in filter(lambda d: 'secondary' in d, vis_data):
primary = line['of']
mesh_ifs[primary].add(primary)
mesh_ifs[primary].add(line['secondary'])
def if_to_node(ifs):
a = filter(lambda d: d in macs, ifs)
a = map(lambda d: nodes[macs[d]], a)
try:
return (next(a), ifs)
except StopIteration:
return None
mesh_nodes = filter(lambda d: d, map(if_to_node, mesh_ifs.values()))
for v in mesh_nodes:
node = v[0]
mesh_ifs = set(node['nodeinfo']['network']['mesh_interfaces'])
node['nodeinfo']['network']['mesh_interfaces'] = list(mesh_ifs | v[1])
def import_vis_clientcount(nodes, vis_data):
macs = build_mac_table(nodes)
data = filter(lambda d: d.get('label', None) == 'TT', vis_data)
data = filter(lambda d: d['router'] in macs, data)
data = map(lambda d: macs[d['router']], data)
for node_id, clientcount in Counter(data).items():
nodes[node_id]['statistics'].setdefault('clients', clientcount)
def mark_gateways(nodes, gateways):
macs = build_mac_table(nodes)
gateways = filter(lambda d: d in macs, gateways)
for node in map(lambda d: nodes[macs[d]], gateways):
node['flags']['gateway'] = True
def mark_vis_data_online(nodes, vis_data, now):
macs = build_mac_table(nodes)
online = set()
for line in vis_data:
if 'primary' in line:
online.add(line['primary'])
elif 'secondary' in line:
online.add(line['secondary'])
elif 'gateway' in line:
# This matches clients' MACs.
# On pre-Gluon nodes the primary MAC will be one of it.
online.add(line['gateway'])
for mac in filter(lambda d: d in macs, online):
mark_online(nodes[macs[mac]], now)

19
rrddb.py Executable file → Normal file
View File

@ -26,23 +26,16 @@ class rrd:
except:
os.mkdir(self.imagePath)
def update_database(self,db):
nodes = db.get_nodes()
clientCount = sum(map(lambda d: d.clientcount, nodes))
def update_database(self, nodes):
online_nodes = dict(filter(lambda d: d[1]['flags']['online'], nodes.items()))
client_count = sum(map(lambda d: d['statistics']['clients'], online_nodes.values()))
curtime = time.time() - 60
self.globalDb.update(len(list(filter(lambda x: x.lastseen >= curtime, nodes))), clientCount)
for node in nodes:
rrd = NodeRRD(
os.path.join(self.dbPath, str(node.id).replace(':', '') + '.rrd'),
node
)
self.globalDb.update(len(online_nodes), client_count)
for node_id, node in online_nodes.items():
rrd = NodeRRD(os.path.join(self.dbPath, node_id + '.rrd'), node)
rrd.update()
def update_images(self):
""" Creates an image for every rrd file in the database directory.
"""
self.globalDb.graph(os.path.join(self.imagePath, "globalGraph.png"), self.displayTimeGlobal)
nodeDbFiles = os.listdir(self.dbPath)