#!/usr/bin/env python2.5 # -*- encoding: utf-8 -*- # # MuninStatus.py: récupère la page munin et la filtre pour ne garder que les liens intéressants # # Copyright (c) 2009, Nicolas Dandrimont # # This file is released under the GNU General Public License version 2 # import sys import urllib try: import BeautifulSoup except ImportError: print "munin.py nécessite BeautifulSoup (paquet python-beautifulsoup)" sys.exit(2) URL = "http://munin.crans.org/" ENCODING = "utf-8" ATTRS = { 'class': ('crit', 'warn'), } def to_unicode(input, encodings=None): if not encodings: encodings = ['utf-8', 'iso-8859-15'] for encoding in encodings: try: return input.decode(encoding) except UnicodeDecodeError: pass return input.decode(encodings[-1], 'ignore') def family(nodes): parents = sum((node.findParents() for node in nodes), []) children = sum((node.findChildren() for node in nodes), []) return nodes + parents + children def keep_item_headings(headings, items, level = 1): kept = [] def level_up(heading): parent = heading for _ in range(level): parent = parent.parent return parent for heading in headings: parent = level_up(heading) found_parent = False for parents in [item.findParents() for item in items]: if parent in parents: found_parent = True break if found_parent: kept.append(heading) return kept def execute(macro, _): munin = BeautifulSoup.BeautifulSoup(urllib.urlopen(URL).read(), fromEncoding = ENCODING) warning_items = munin.findAll(attrs=ATTRS) warning_hosts = keep_item_headings(munin.findAll(attrs = "host"), warning_items) warning_domains = keep_item_headings(munin.findAll(attrs = "domain"), warning_items) keep = family(warning_items + warning_hosts + warning_domains) for item in munin.findChildren(): if item not in keep: item.extract() for tag in munin.findAll(href=True): tag["href"] = u'http://munin.crans.org/' + tag["href"] if munin.html and munin.html.body: return to_unicode(munin.html.body.prettify()) else: return u"Munin n'a rien à dire."