Compare commits

...

8 Commits

Author SHA1 Message Date
Your Name 238aaccabf early respondc support 2015-08-08 12:53:04 +02:00
Your Name d889c93f28 respondc.py 2015-08-08 12:53:03 +02:00
Nils Schneider c2e21b4f5b stop relying on batmanadv gateway feature 2015-07-31 11:57:44 +02:00
Nils Schneider 431d46e191 change nodes.json format to version 2 2015-07-30 19:21:56 +02:00
Nils Schneider 823b64b8ba memory_usage: assume buffers and cached to be free 2015-07-13 17:43:41 +02:00
Nils Schneider f2214ab130 Merge pull request #62 from kantorkel/firstseen
add firstseen to nodelist.json
2015-07-12 18:44:53 +02:00
kantorkel c5b321430e fixed 'blank line contains whitespace'
lib/nodelist.py:18:1: W293 blank line contains whitespace. fixed.
2015-07-12 18:08:17 +02:00
kantorkel ee84327b5c add firstseen to nodelist.json 2015-07-06 15:26:41 +02:00
5 changed files with 156 additions and 44 deletions

View File

@ -22,6 +22,7 @@ For the script's regular execution add the following to the crontab:
- Python 3 - Python 3
- Python 3 Package [Networkx](https://networkx.github.io/) - Python 3 Package [Networkx](https://networkx.github.io/)
- Python 3 Package dateutil
- [alfred-json](https://github.com/tcatm/alfred-json) - [alfred-json](https://github.com/tcatm/alfred-json)
- rrdtool (if run with `--with-rrd`) - rrdtool (if run with `--with-rrd`)
@ -115,3 +116,10 @@ This will remove owner information from nodes.json before copying the data
to your webserver. to your webserver.
[jq]: https://stedolan.github.io/jq/ [jq]: https://stedolan.github.io/jq/
# Convert from nodes.json version 1 to version 2
jq '.nodes = (.nodes | to_entries | map(.value)) | .version = 2' \
< nodes.json > nodes.json.new
mv nodes.json.new nodes.json

View File

@ -7,6 +7,7 @@ import argparse
import json import json
import os import os
import sys import sys
import dateutil.parser
from datetime import datetime from datetime import datetime
import networkx as nx import networkx as nx
@ -18,12 +19,29 @@ from lib.batman import Batman
from lib.rrddb import RRD from lib.rrddb import RRD
from lib.nodelist import export_nodelist from lib.nodelist import export_nodelist
from lib.validate import validate_nodeinfos from lib.validate import validate_nodeinfos
from lib.respondc import request
NODES_VERSION = 1 NODES_VERSION = 2
GRAPH_VERSION = 1 GRAPH_VERSION = 1
def recently_lost_nodes(now, nodesdict, maxage=600):
nodes = []
for node in nodesdict.values():
lastseen = dateutil.parser.parse(node['lastseen'])
age = (now - lastseen).total_seconds()
if age < maxage and age != 0:
nodes.append(node)
return nodes
def main(params): def main(params):
def node_to_ips(node):
try:
return node['nodeinfo']['network']['addresses']
except KeyError:
return []
os.makedirs(params['dest_dir'], exist_ok=True) os.makedirs(params['dest_dir'], exist_ok=True)
nodes_fn = os.path.join(params['dest_dir'], 'nodes.json') nodes_fn = os.path.join(params['dest_dir'], 'nodes.json')
@ -58,71 +76,108 @@ def main(params):
# read nodedb state from node.json # read nodedb state from node.json
try: try:
with open(nodes_fn, 'r') as nodedb_handle: with open(nodes_fn, 'r', encoding=('UTF-8')) as nodedb_handle:
nodedb = json.load(nodedb_handle) nodedb = json.load(nodedb_handle)
except IOError: except IOError:
nodedb = {'nodes': dict()} nodedb = {'nodes': []}
# flush nodedb if it uses the old format
if 'links' in nodedb:
nodedb = {'nodes': dict()}
# set version we're going to output # set version we're going to output
nodedb['version'] = NODES_VERSION nodedb['version'] = NODES_VERSION
# update timestamp and assume all nodes are offline # update timestamp and assume all nodes are offline
nodedb['timestamp'] = now.isoformat() nodedb['timestamp'] = now.isoformat()
for node_id, node in nodedb['nodes'].items():
node['flags']['online'] = False nodesdict = {}
nodesdict, graph = DO(params, nodesdict, graph)
for node in nodedb['nodes']:
nodesdict[node['nodeinfo']['node_id']] = node
# integrate alfred nodeinfo # integrate alfred nodeinfo
for alfred in alfred_instances: for alfred in alfred_instances:
nodeinfo = validate_nodeinfos(alfred.nodeinfo()) nodeinfo = validate_nodeinfos(alfred.nodeinfo())
nodes.import_nodeinfo(nodedb['nodes'], nodeinfo, nodes.import_nodeinfo(nodesdict, nodeinfo, now, assume_online=True)
now, assume_online=True)
# acquire data from respondd
responses = list(request('nodeinfo statistics', ['ff02::2:1001'], interface=params['interface']))
nodeinfos = list(map(lambda x: x['nodeinfo'], filter(lambda x: 'nodeinfo' in x, responses)))
nodes.import_nodeinfo(nodesdict, validate_nodeinfos(nodeinfos), now, assume_online=True)
ips = [i[0] for i in map(node_to_ips, recently_lost_nodes(now, nodesdict))]
a = request('nodeinfo statistics', ips, interface=params['interface'], timeout=2)
nodeinfos = list(map(lambda x: x['nodeinfo'], filter(lambda x: 'nodeinfo' in x, a)))
nodes.import_nodeinfo(nodesdict, validate_nodeinfos(nodeinfos), now, assume_online=True)
responses += a
ips = [i[0] for i in map(node_to_ips, recently_lost_nodes(now, nodesdict))]
a = request('nodeinfo statistics', ips, interface=params['interface'], timeout=2)
nodeinfos = list(map(lambda x: x['nodeinfo'], filter(lambda x: 'nodeinfo' in x, a)))
nodes.import_nodeinfo(nodesdict, validate_nodeinfos(nodeinfos), now, assume_online=True)
responses += a
ips = [i[0] for i in map(node_to_ips, recently_lost_nodes(now, nodesdict))]
a = request('nodeinfo statistics', ips, interface=params['interface'], timeout=2)
nodeinfos = list(map(lambda x: x['nodeinfo'], filter(lambda x: 'nodeinfo' in x, a)))
nodes.import_nodeinfo(nodesdict, validate_nodeinfos(nodeinfos), now, assume_online=True)
responses += a
for node in nodesdict.values():
lastseen = dateutil.parser.parse(node['lastseen'])
age = (now - lastseen).total_seconds()
online = age < params['hysteresis']
node['flags']['online'] = online
if not online:
nodes.reset_statistics(node)
# integrate static aliases data # integrate static aliases data
for aliases in params['aliases']: for aliases in params['aliases']:
with open(aliases, 'r') as f: with open(aliases, 'r') as f:
nodeinfo = validate_nodeinfos(json.load(f)) nodeinfo = validate_nodeinfos(json.load(f))
nodes.import_nodeinfo(nodedb['nodes'], nodeinfo, nodes.import_nodeinfo(nodesdict, nodeinfo,
now, assume_online=False) now, assume_online=False)
nodes.reset_statistics(nodedb['nodes'])
for alfred in alfred_instances: for alfred in alfred_instances:
nodes.import_statistics(nodedb['nodes'], alfred.statistics()) nodes.import_statistics(nodesdict, alfred.statistics())
# acquire gwl and visdata for each batman instance nodes.import_statistics(nodesdict, list(map(lambda x: x['statistics'], filter(lambda x: 'statistics' in x, responses))))
# acquire visdata for each batman instance
mesh_info = [] mesh_info = []
for batman in batman_instances: for batman in batman_instances:
vd = batman.vis_data() vd = batman.vis_data()
gwl = batman.gateway_list()
mesh_info.append((vd, gwl)) mesh_info.append(vd)
# update nodedb from batman-adv data # update nodedb from batman-adv data
for vd, gwl in mesh_info: for vd in mesh_info:
nodes.import_mesh_ifs_vis_data(nodedb['nodes'], vd) nodes.import_mesh_ifs_vis_data(nodesdict, vd)
nodes.import_vis_clientcount(nodedb['nodes'], vd) nodes.import_vis_clientcount(nodesdict, vd)
nodes.mark_vis_data_online(nodedb['nodes'], vd, now)
nodes.mark_gateways(nodedb['nodes'], gwl)
# clear the nodedb from nodes that have not been online in $prune days # clear the nodedb from nodes that have not been online in $prune days
if params['prune']: if params['prune']:
nodes.prune_nodes(nodedb['nodes'], now, params['prune']) nodes.prune_nodes(nodesdict, now, params['prune'])
# build nxnetworks graph from nodedb and visdata # build nxnetworks graph from nodedb and visdata
batadv_graph = nx.DiGraph() batadv_graph = nx.DiGraph()
for vd, gwl in mesh_info: for vd in mesh_info:
graph.import_vis_data(batadv_graph, nodedb['nodes'], vd) graph.import_vis_data(batadv_graph, nodesdict, vd)
# force mac addresses to be vpn-link only (like gateways for example) # force mac addresses to be vpn-link only (like gateways for example)
if params['vpn']: if params['vpn']:
graph.mark_vpn(batadv_graph, frozenset(params['vpn'])) graph.mark_vpn(batadv_graph, frozenset(params['vpn']))
nodedb['nodes'] = list(nodesdict.values())
def extract_tunnel(nodes): def extract_tunnel(nodes):
macs = set() macs = set()
for id, node in nodes.items(): for node in nodes:
try: try:
for mac in node["nodeinfo"]["network"]["mesh"]["bat0"]["interfaces"]["tunnel"]: for mac in node["nodeinfo"]["network"]["mesh"]["bat0"]["interfaces"]["tunnel"]:
macs.add(mac) macs.add(mac)
@ -165,10 +220,15 @@ if __name__ == '__main__':
help='Read aliases from FILE', help='Read aliases from FILE',
nargs='+', default=[], metavar='FILE') nargs='+', default=[], metavar='FILE')
parser.add_argument('-m', '--mesh', parser.add_argument('-m', '--mesh',
default=['bat0'], nargs='+', default=[], nargs='+',
help='Use given batman-adv mesh interface(s) (defaults' help='Use given batman-adv mesh interface(s) (defaults'
'to bat0); specify alfred unix socket like ' 'to bat0); specify alfred unix socket like '
'bat0:/run/alfred0.sock.') 'bat0:/run/alfred0.sock.')
parser.add_argument('--hysteresis', default=300,
help='Duration (seconds) after which a node is considered to be offline')
parser.add_argument('-i', '--interface',
help='Interface for contacting respondd',
required=True)
parser.add_argument('-d', '--dest-dir', action='store', parser.add_argument('-d', '--dest-dir', action='store',
help='Write output to destination directory', help='Write output to destination directory',
required=True) required=True)

View File

@ -1,9 +1,9 @@
def export_nodelist(now, nodedb): def export_nodelist(now, nodedb):
nodelist = list() nodelist = list()
for node_id, node in nodedb["nodes"].items(): for node in nodedb["nodes"]:
node_out = dict() node_out = dict()
node_out["id"] = node_id node_out["id"] = node["nodeinfo"]["node_id"]
node_out["name"] = node["nodeinfo"]["hostname"] node_out["name"] = node["nodeinfo"]["hostname"]
if "location" in node["nodeinfo"]: if "location" in node["nodeinfo"]:
@ -13,6 +13,9 @@ def export_nodelist(now, nodedb):
node_out["status"] = dict() node_out["status"] = dict()
node_out["status"]["online"] = node["flags"]["online"] node_out["status"]["online"] = node["flags"]["online"]
if "firstseen" in node:
node_out["status"]["firstcontact"] = node["firstseen"]
if "lastseen" in node: if "lastseen" in node:
node_out["status"]["lastcontact"] = node["lastseen"] node_out["status"]["lastcontact"] = node["lastseen"]

View File

@ -53,7 +53,6 @@ def prune_nodes(nodes, now, days):
def mark_online(node, now): def mark_online(node, now):
node['lastseen'] = now.isoformat() node['lastseen'] = now.isoformat()
node.setdefault('firstseen', now.isoformat()) node.setdefault('firstseen', now.isoformat())
node['flags']['online'] = True
def import_nodeinfo(nodes, nodeinfos, now, assume_online=False): def import_nodeinfo(nodes, nodeinfos, now, assume_online=False):
@ -61,14 +60,12 @@ def import_nodeinfo(nodes, nodeinfos, now, assume_online=False):
node = nodes.setdefault(nodeinfo['node_id'], {'flags': dict()}) node = nodes.setdefault(nodeinfo['node_id'], {'flags': dict()})
node['nodeinfo'] = nodeinfo node['nodeinfo'] = nodeinfo
node['flags']['online'] = False node['flags']['online'] = False
node['flags']['gateway'] = False
if assume_online: if assume_online:
mark_online(node, now) mark_online(node, now)
def reset_statistics(nodes): def reset_statistics(node):
for node in nodes.values():
node['statistics'] = {'clients': 0} node['statistics'] = {'clients': 0}
@ -86,11 +83,10 @@ def import_statistics(nodes, stats):
stats = filter(lambda d: d['node_id'] in nodes, stats) stats = filter(lambda d: d['node_id'] in nodes, stats)
for node, stats in map(lambda d: (nodes[d['node_id']], d), stats): for node, stats in map(lambda d: (nodes[d['node_id']], d), stats):
add(node, stats, 'clients', ['clients', 'total']) add(node, stats, 'clients', ['clients', 'total'])
add(node, stats, 'gateway', ['gateway'], lambda d: macs.get(d, d))
add(node, stats, 'uptime', ['uptime']) add(node, stats, 'uptime', ['uptime'])
add(node, stats, 'loadavg', ['loadavg']) add(node, stats, 'loadavg', ['loadavg'])
add(node, stats, 'memory_usage', ['memory'], add(node, stats, 'memory_usage', ['memory'],
lambda d: 1 - d['free'] / d['total']) lambda d: 1 - (d['free'] + d['buffers'] + d['cached']) / d['total'])
add(node, stats, 'rootfs_usage', ['rootfs_usage']) add(node, stats, 'rootfs_usage', ['rootfs_usage'])
add(node, stats, 'traffic', ['traffic']) add(node, stats, 'traffic', ['traffic'])
@ -152,14 +148,6 @@ def import_vis_clientcount(nodes, vis_data):
nodes[node_id]['statistics'].setdefault('clients', clientcount) nodes[node_id]['statistics'].setdefault('clients', clientcount)
def mark_gateways(nodes, gateways):
macs = build_mac_table(nodes)
gateways = filter(lambda d: d in macs, gateways)
for node in map(lambda d: nodes[macs[d]], gateways):
node['flags']['gateway'] = True
def mark_vis_data_online(nodes, vis_data, now): def mark_vis_data_online(nodes, vis_data, now):
macs = build_mac_table(nodes) macs = build_mac_table(nodes)

53
lib/respondc.py Normal file
View File

@ -0,0 +1,53 @@
#!/usr/bin/env python3
import socket
import zlib
import json
import sys
def request(request, targets, interface, timeout=0.5, singleshot=False):
try:
if_id = socket.if_nametoindex(interface)
except OSError:
print('interface \'{}\' not found'.format(ifname), file=sys.stderr)
return []
sock = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM)
# request
message = bytes('GET {}'.format(request), 'utf-8')
for target in targets:
sock.sendto(message, (target, 1001, 0, if_id))
print('+ {:s}'.format(str(message)), file=sys.stderr)
sock.settimeout(timeout)
# receive
responses = {}
rsp, err = 0, 0
while True:
print('\r+ {rsp} responses, {err} errors'.format(**locals()), end='', file=sys.stderr)
try:
buffer, address = sock.recvfrom(2048)
except socket.timeout:
print('\n+ no replies for %f seconds, continuing...' % timeout, file=sys.stderr)
break
try:
source = address[0].split('%')[0]
data = zlib.decompress(buffer, -15)
nodeinfo = json.loads(data.decode('utf-8'))
responses[source] = nodeinfo
rsp += 1
except (zlib.error, UnicodeDecodeError, ValueError):
err += 1
print('- unreadable answer from {addr}'.format(addr=source), file=sys.stderr)
if singleshot:
break
return responses.values()