2014-09-23 11:57:45 +02:00
|
|
|
#!/usr/bin/env python3
|
2015-03-24 16:49:37 +01:00
|
|
|
"""
|
|
|
|
backend.py - ffmap-backend runner
|
|
|
|
https://github.com/ffnord/ffmap-backend
|
|
|
|
"""
|
2014-09-23 11:57:45 +02:00
|
|
|
import argparse
|
|
|
|
import json
|
|
|
|
import os
|
2015-03-26 01:53:44 +01:00
|
|
|
import sys
|
2015-08-08 12:49:52 +02:00
|
|
|
import dateutil.parser
|
2014-09-23 11:57:45 +02:00
|
|
|
from datetime import datetime
|
2015-03-24 22:10:54 +01:00
|
|
|
|
|
|
|
import networkx as nx
|
2014-09-23 11:57:45 +02:00
|
|
|
from networkx.readwrite import json_graph
|
|
|
|
|
2015-03-24 22:48:00 +01:00
|
|
|
from lib import graph, nodes
|
|
|
|
from lib.alfred import Alfred
|
2015-03-24 22:10:54 +01:00
|
|
|
from lib.batman import Batman
|
|
|
|
from lib.rrddb import RRD
|
2015-04-08 12:54:46 +02:00
|
|
|
from lib.nodelist import export_nodelist
|
2015-05-03 13:11:22 +02:00
|
|
|
from lib.validate import validate_nodeinfos
|
2015-08-08 12:49:52 +02:00
|
|
|
from lib.respondc import request
|
2014-09-23 11:57:45 +02:00
|
|
|
|
2015-07-30 19:14:19 +02:00
|
|
|
NODES_VERSION = 2
|
2015-04-01 17:58:25 +02:00
|
|
|
GRAPH_VERSION = 1
|
|
|
|
|
2015-08-08 12:49:52 +02:00
|
|
|
def recently_lost_nodes(now, nodesdict, maxage=600):
|
|
|
|
nodes = []
|
|
|
|
for node in nodesdict.values():
|
|
|
|
lastseen = dateutil.parser.parse(node['lastseen'])
|
|
|
|
age = (now - lastseen).total_seconds()
|
|
|
|
|
|
|
|
if age < maxage and age != 0:
|
|
|
|
nodes.append(node)
|
|
|
|
|
|
|
|
return nodes
|
2014-09-23 11:57:45 +02:00
|
|
|
|
2015-03-24 16:49:37 +01:00
|
|
|
def main(params):
|
2015-08-08 12:49:52 +02:00
|
|
|
def node_to_ips(node):
|
|
|
|
try:
|
|
|
|
return node['nodeinfo']['network']['addresses']
|
|
|
|
except KeyError:
|
|
|
|
return []
|
|
|
|
|
2015-04-01 17:41:40 +02:00
|
|
|
os.makedirs(params['dest_dir'], exist_ok=True)
|
|
|
|
|
2015-03-24 22:10:54 +01:00
|
|
|
nodes_fn = os.path.join(params['dest_dir'], 'nodes.json')
|
|
|
|
graph_fn = os.path.join(params['dest_dir'], 'graph.json')
|
2015-04-08 12:54:46 +02:00
|
|
|
nodelist_fn = os.path.join(params['dest_dir'], 'nodelist.json')
|
2014-09-23 11:57:45 +02:00
|
|
|
|
2015-03-24 16:49:37 +01:00
|
|
|
now = datetime.utcnow().replace(microsecond=0)
|
2014-09-23 11:57:45 +02:00
|
|
|
|
2015-03-26 01:53:44 +01:00
|
|
|
# parse mesh param and instantiate Alfred/Batman instances
|
|
|
|
alfred_instances = []
|
|
|
|
batman_instances = []
|
|
|
|
for value in params['mesh']:
|
|
|
|
# (1) only batman-adv if, no alfred sock
|
|
|
|
if ':' not in value:
|
|
|
|
if len(params['mesh']) > 1:
|
|
|
|
raise ValueError(
|
|
|
|
'Multiple mesh interfaces require the use of '
|
|
|
|
'alfred socket paths.')
|
|
|
|
alfred_instances.append(Alfred(unix_sockpath=None))
|
|
|
|
batman_instances.append(Batman(mesh_interface=value))
|
|
|
|
else:
|
|
|
|
# (2) batman-adv if + alfred socket
|
|
|
|
try:
|
|
|
|
batif, alfredsock = value.split(':')
|
|
|
|
alfred_instances.append(Alfred(unix_sockpath=alfredsock))
|
|
|
|
batman_instances.append(Batman(mesh_interface=batif,
|
|
|
|
alfred_sockpath=alfredsock))
|
|
|
|
except ValueError:
|
|
|
|
raise ValueError(
|
|
|
|
'Unparseable value "{0}" in --mesh parameter.'.
|
|
|
|
format(value))
|
|
|
|
|
2015-03-25 15:14:58 +01:00
|
|
|
# read nodedb state from node.json
|
2015-03-26 01:53:44 +01:00
|
|
|
try:
|
2015-07-30 19:14:19 +02:00
|
|
|
with open(nodes_fn, 'r', encoding=('UTF-8')) as nodedb_handle:
|
2015-03-26 01:53:44 +01:00
|
|
|
nodedb = json.load(nodedb_handle)
|
2015-04-01 17:41:40 +02:00
|
|
|
except IOError:
|
2015-07-30 19:14:19 +02:00
|
|
|
nodedb = {'nodes': []}
|
2014-09-23 11:57:45 +02:00
|
|
|
|
2015-04-01 17:55:27 +02:00
|
|
|
# set version we're going to output
|
2015-04-01 17:58:25 +02:00
|
|
|
nodedb['version'] = NODES_VERSION
|
2015-04-01 17:55:27 +02:00
|
|
|
|
2015-03-25 15:14:58 +01:00
|
|
|
# update timestamp and assume all nodes are offline
|
2015-03-24 16:49:37 +01:00
|
|
|
nodedb['timestamp'] = now.isoformat()
|
2014-09-23 11:57:45 +02:00
|
|
|
|
2015-07-30 19:14:19 +02:00
|
|
|
nodesdict = {}
|
|
|
|
|
2015-08-08 12:49:52 +02:00
|
|
|
nodesdict, graph = DO(params, nodesdict, graph)
|
|
|
|
|
2015-07-30 19:14:19 +02:00
|
|
|
for node in nodedb['nodes']:
|
|
|
|
nodesdict[node['nodeinfo']['node_id']] = node
|
|
|
|
|
2015-03-25 15:14:58 +01:00
|
|
|
# integrate alfred nodeinfo
|
2015-03-26 01:53:44 +01:00
|
|
|
for alfred in alfred_instances:
|
2015-05-03 13:11:22 +02:00
|
|
|
nodeinfo = validate_nodeinfos(alfred.nodeinfo())
|
2015-08-08 12:49:52 +02:00
|
|
|
nodes.import_nodeinfo(nodesdict, nodeinfo, now, assume_online=True)
|
|
|
|
|
|
|
|
|
|
|
|
# acquire data from respondd
|
|
|
|
responses = list(request('nodeinfo statistics', ['ff02::2:1001'], interface=params['interface']))
|
|
|
|
|
|
|
|
nodeinfos = list(map(lambda x: x['nodeinfo'], filter(lambda x: 'nodeinfo' in x, responses)))
|
|
|
|
nodes.import_nodeinfo(nodesdict, validate_nodeinfos(nodeinfos), now, assume_online=True)
|
|
|
|
|
|
|
|
ips = [i[0] for i in map(node_to_ips, recently_lost_nodes(now, nodesdict))]
|
|
|
|
a = request('nodeinfo statistics', ips, interface=params['interface'], timeout=2)
|
|
|
|
nodeinfos = list(map(lambda x: x['nodeinfo'], filter(lambda x: 'nodeinfo' in x, a)))
|
|
|
|
nodes.import_nodeinfo(nodesdict, validate_nodeinfos(nodeinfos), now, assume_online=True)
|
|
|
|
responses += a
|
|
|
|
|
|
|
|
ips = [i[0] for i in map(node_to_ips, recently_lost_nodes(now, nodesdict))]
|
|
|
|
a = request('nodeinfo statistics', ips, interface=params['interface'], timeout=2)
|
|
|
|
nodeinfos = list(map(lambda x: x['nodeinfo'], filter(lambda x: 'nodeinfo' in x, a)))
|
|
|
|
nodes.import_nodeinfo(nodesdict, validate_nodeinfos(nodeinfos), now, assume_online=True)
|
|
|
|
responses += a
|
|
|
|
|
|
|
|
ips = [i[0] for i in map(node_to_ips, recently_lost_nodes(now, nodesdict))]
|
|
|
|
a = request('nodeinfo statistics', ips, interface=params['interface'], timeout=2)
|
|
|
|
nodeinfos = list(map(lambda x: x['nodeinfo'], filter(lambda x: 'nodeinfo' in x, a)))
|
|
|
|
nodes.import_nodeinfo(nodesdict, validate_nodeinfos(nodeinfos), now, assume_online=True)
|
|
|
|
responses += a
|
|
|
|
|
|
|
|
for node in nodesdict.values():
|
|
|
|
lastseen = dateutil.parser.parse(node['lastseen'])
|
|
|
|
age = (now - lastseen).total_seconds()
|
|
|
|
|
|
|
|
online = age < params['hysteresis']
|
|
|
|
|
|
|
|
node['flags']['online'] = online
|
|
|
|
|
|
|
|
if not online:
|
|
|
|
nodes.reset_statistics(node)
|
|
|
|
|
2014-09-23 11:57:45 +02:00
|
|
|
|
2015-03-25 15:14:58 +01:00
|
|
|
# integrate static aliases data
|
2015-03-24 16:49:37 +01:00
|
|
|
for aliases in params['aliases']:
|
|
|
|
with open(aliases, 'r') as f:
|
2015-05-03 13:11:22 +02:00
|
|
|
nodeinfo = validate_nodeinfos(json.load(f))
|
2015-07-30 19:14:19 +02:00
|
|
|
nodes.import_nodeinfo(nodesdict, nodeinfo,
|
2015-03-24 17:41:02 +01:00
|
|
|
now, assume_online=False)
|
2014-09-23 11:57:45 +02:00
|
|
|
|
2015-03-26 01:53:44 +01:00
|
|
|
for alfred in alfred_instances:
|
2015-07-30 19:14:19 +02:00
|
|
|
nodes.import_statistics(nodesdict, alfred.statistics())
|
2014-09-23 11:57:45 +02:00
|
|
|
|
2015-08-08 12:49:52 +02:00
|
|
|
nodes.import_statistics(nodesdict, list(map(lambda x: x['statistics'], filter(lambda x: 'statistics' in x, responses))))
|
|
|
|
|
2015-07-31 11:55:32 +02:00
|
|
|
# acquire visdata for each batman instance
|
2015-03-26 01:53:44 +01:00
|
|
|
mesh_info = []
|
|
|
|
for batman in batman_instances:
|
2015-04-01 01:36:29 +02:00
|
|
|
vd = batman.vis_data()
|
2015-03-25 15:14:58 +01:00
|
|
|
|
2015-07-31 11:55:32 +02:00
|
|
|
mesh_info.append(vd)
|
2015-03-25 15:14:58 +01:00
|
|
|
|
|
|
|
# update nodedb from batman-adv data
|
2015-07-31 11:55:32 +02:00
|
|
|
for vd in mesh_info:
|
2015-07-30 19:14:19 +02:00
|
|
|
nodes.import_mesh_ifs_vis_data(nodesdict, vd)
|
|
|
|
nodes.import_vis_clientcount(nodesdict, vd)
|
2015-03-25 15:14:58 +01:00
|
|
|
|
|
|
|
# clear the nodedb from nodes that have not been online in $prune days
|
2015-03-24 16:49:37 +01:00
|
|
|
if params['prune']:
|
2015-07-30 19:14:19 +02:00
|
|
|
nodes.prune_nodes(nodesdict, now, params['prune'])
|
2014-09-23 11:57:45 +02:00
|
|
|
|
2015-03-25 15:14:58 +01:00
|
|
|
# build nxnetworks graph from nodedb and visdata
|
2015-03-24 16:49:37 +01:00
|
|
|
batadv_graph = nx.DiGraph()
|
2015-07-31 11:55:32 +02:00
|
|
|
for vd in mesh_info:
|
2015-07-30 19:14:19 +02:00
|
|
|
graph.import_vis_data(batadv_graph, nodesdict, vd)
|
2014-09-23 11:57:45 +02:00
|
|
|
|
2015-03-25 15:14:58 +01:00
|
|
|
# force mac addresses to be vpn-link only (like gateways for example)
|
2015-03-24 16:49:37 +01:00
|
|
|
if params['vpn']:
|
|
|
|
graph.mark_vpn(batadv_graph, frozenset(params['vpn']))
|
2014-09-23 11:57:45 +02:00
|
|
|
|
2015-07-30 19:14:19 +02:00
|
|
|
nodedb['nodes'] = list(nodesdict.values())
|
|
|
|
|
2015-05-09 21:54:54 +02:00
|
|
|
def extract_tunnel(nodes):
|
|
|
|
macs = set()
|
2015-07-30 19:14:19 +02:00
|
|
|
for node in nodes:
|
2015-05-09 21:54:54 +02:00
|
|
|
try:
|
|
|
|
for mac in node["nodeinfo"]["network"]["mesh"]["bat0"]["interfaces"]["tunnel"]:
|
|
|
|
macs.add(mac)
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
return macs
|
|
|
|
|
|
|
|
graph.mark_vpn(batadv_graph, extract_tunnel(nodedb['nodes']))
|
|
|
|
|
2015-03-24 16:49:37 +01:00
|
|
|
batadv_graph = graph.merge_nodes(batadv_graph)
|
|
|
|
batadv_graph = graph.to_undirected(batadv_graph)
|
2014-09-23 11:57:45 +02:00
|
|
|
|
2015-03-25 15:14:58 +01:00
|
|
|
# write processed data to dest dir
|
2015-03-24 16:49:37 +01:00
|
|
|
with open(nodes_fn, 'w') as f:
|
|
|
|
json.dump(nodedb, f)
|
2014-09-23 11:57:45 +02:00
|
|
|
|
2015-04-01 17:58:25 +02:00
|
|
|
graph_out = {'batadv': json_graph.node_link_data(batadv_graph),
|
|
|
|
'version': GRAPH_VERSION}
|
|
|
|
|
2015-03-24 16:49:37 +01:00
|
|
|
with open(graph_fn, 'w') as f:
|
2015-04-01 17:58:25 +02:00
|
|
|
json.dump(graph_out, f)
|
2014-09-23 11:57:45 +02:00
|
|
|
|
2015-04-08 12:54:46 +02:00
|
|
|
with open(nodelist_fn, 'w') as f:
|
|
|
|
json.dump(export_nodelist(now, nodedb), f)
|
|
|
|
|
2015-03-25 15:14:58 +01:00
|
|
|
# optional rrd graphs (trigger with --rrd)
|
2015-03-24 18:48:05 +01:00
|
|
|
if params['rrd']:
|
|
|
|
script_directory = os.path.dirname(os.path.realpath(__file__))
|
|
|
|
rrd = RRD(os.path.join(script_directory, 'nodedb'),
|
2015-03-24 22:10:54 +01:00
|
|
|
os.path.join(params['dest_dir'], 'nodes'))
|
2015-03-24 18:48:05 +01:00
|
|
|
rrd.update_database(nodedb['nodes'])
|
|
|
|
rrd.update_images()
|
2014-09-23 11:57:45 +02:00
|
|
|
|
|
|
|
|
2015-03-24 16:49:37 +01:00
|
|
|
if __name__ == '__main__':
|
|
|
|
parser = argparse.ArgumentParser()
|
2014-09-23 11:57:45 +02:00
|
|
|
|
2015-03-24 16:49:37 +01:00
|
|
|
parser.add_argument('-a', '--aliases',
|
2015-03-26 01:53:44 +01:00
|
|
|
help='Read aliases from FILE',
|
2015-04-12 19:38:21 +02:00
|
|
|
nargs='+', default=[], metavar='FILE')
|
2015-03-26 01:53:44 +01:00
|
|
|
parser.add_argument('-m', '--mesh',
|
2015-08-08 12:49:52 +02:00
|
|
|
default=[], nargs='+',
|
2015-03-26 02:13:14 +01:00
|
|
|
help='Use given batman-adv mesh interface(s) (defaults'
|
|
|
|
'to bat0); specify alfred unix socket like '
|
|
|
|
'bat0:/run/alfred0.sock.')
|
2015-08-08 12:49:52 +02:00
|
|
|
parser.add_argument('--hysteresis', default=300,
|
|
|
|
help='Duration (seconds) after which a node is considered to be offline')
|
|
|
|
parser.add_argument('-i', '--interface',
|
|
|
|
help='Interface for contacting respondd',
|
|
|
|
required=True)
|
2015-03-24 22:10:54 +01:00
|
|
|
parser.add_argument('-d', '--dest-dir', action='store',
|
2015-03-26 01:53:44 +01:00
|
|
|
help='Write output to destination directory',
|
2015-03-24 16:49:37 +01:00
|
|
|
required=True)
|
2015-03-26 01:53:44 +01:00
|
|
|
parser.add_argument('-V', '--vpn', nargs='+', metavar='MAC',
|
|
|
|
help='Assume MAC addresses are part of vpn')
|
|
|
|
parser.add_argument('-p', '--prune', metavar='DAYS', type=int,
|
2015-03-24 16:49:37 +01:00
|
|
|
help='forget nodes offline for at least DAYS')
|
2015-03-26 01:53:44 +01:00
|
|
|
parser.add_argument('--with-rrd', dest='rrd', action='store_true',
|
2015-03-24 18:48:05 +01:00
|
|
|
default=False,
|
2015-03-26 02:13:14 +01:00
|
|
|
help='enable the rendering of RRD graphs (cpu '
|
|
|
|
'intensive)')
|
2014-09-23 11:57:45 +02:00
|
|
|
|
2015-03-24 16:49:37 +01:00
|
|
|
options = vars(parser.parse_args())
|
|
|
|
main(options)
|