Nouveau script de comptage d'upload et de déconnexion.
* Il compte en progressif et stocke les calculs temporaires dans une table appelée accounting * Aux itérations suivantes, il ne prend en compte que les deltas
This commit is contained in:
parent
6093c61b42
commit
38cf8eb8e9
4 changed files with 520 additions and 2 deletions
498
surveillance/deconnexion2.py
Executable file
498
surveillance/deconnexion2.py
Executable file
|
@ -0,0 +1,498 @@
|
|||
#!/bin/bash /usr/scripts/python.sh
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Script qui fait le comptage d'upload des adhérents, et limite leur débit
|
||||
montant s'ils dépassent le quota prévu
|
||||
"""
|
||||
|
||||
################################################################################
|
||||
# Import des commandes #
|
||||
################################################################################
|
||||
|
||||
import sys
|
||||
import psycopg2
|
||||
import psycopg2.extras
|
||||
import datetime
|
||||
import pytz
|
||||
import cStringIO
|
||||
|
||||
import gestion.config.mails.upload as mails_upload
|
||||
import gestion.affichage as affichage
|
||||
import lc_ldap.shortcuts as shortcuts
|
||||
import lc_ldap.objets as objets
|
||||
|
||||
from surveillance.fiche_deconnexion.generate import generate_ps
|
||||
from gestion.config import NETs, plage_ens, prefix
|
||||
from gestion.config import upload as upload
|
||||
import surveillance.analyse2 as analyse
|
||||
import gestion.mail as mail_module
|
||||
|
||||
# Ça printe au lieu de faire bobo quand c'est à True
|
||||
DEBUG = False
|
||||
|
||||
EPOCH = pytz.utc.localize(datetime.datetime(1970, 1, 1))
|
||||
TZ = pytz.timezone('Europe/Paris')
|
||||
DELTA = datetime.timedelta(0, upload.interval * 3600, 0)
|
||||
CUR_DATE = TZ.normalize(TZ.localize(datetime.datetime.now()))
|
||||
|
||||
QUERY = """WITH
|
||||
machines_sans_doublon
|
||||
AS
|
||||
(
|
||||
SELECT DISTINCT ON(mac_addr)
|
||||
*
|
||||
FROM
|
||||
machines
|
||||
)
|
||||
SELECT
|
||||
SUM(agregat.total) as up, machines_sans_doublon.type, machines_sans_doublon.id
|
||||
FROM (
|
||||
SELECT
|
||||
'upload', sum(bytes) AS total, mac_src
|
||||
FROM
|
||||
upload
|
||||
WHERE
|
||||
stamp_updated > now() - %(delta_inf)s
|
||||
AND
|
||||
stamp_updated < now() - %(delta_sup)s
|
||||
AND NOT
|
||||
(
|
||||
ip_dst <<= inet%(plage_ens)s
|
||||
OR
|
||||
ip_dst <<= inet%(plage_ipv6)s
|
||||
OR
|
||||
ip_dst <<= inet%(appt)s
|
||||
OR
|
||||
ip_src <<= inet%(ipv6_local)s
|
||||
OR
|
||||
ip_src = inet'0.0.0.0'
|
||||
OR
|
||||
ip_src <<= inet%(plage_adm)s
|
||||
OR
|
||||
ip_dst <<= inet%(plage_adm)s
|
||||
)
|
||||
AND
|
||||
(
|
||||
ip_src <<= inet%(allone)s
|
||||
OR
|
||||
ip_src <<= inet%(alltwo)s
|
||||
OR
|
||||
ip_src <<= inet%(plage_ipv6)s
|
||||
OR
|
||||
ip_src <<= inet%(appt)s
|
||||
)
|
||||
AND NOT EXISTS
|
||||
(
|
||||
SELECT 1
|
||||
FROM exemptes
|
||||
WHERE upload.ip_src <<= exemptes.ip_crans
|
||||
AND upload.ip_dst <<= exemptes.ip_dest
|
||||
)
|
||||
AND NOT EXISTS
|
||||
(
|
||||
SELECT 1
|
||||
FROM exemptes6
|
||||
WHERE upload.mac_src = exemptes6.mac_crans
|
||||
AND upload.ip_dst <<= exemptes6.ip_dest
|
||||
)
|
||||
GROUP BY
|
||||
mac_src
|
||||
) AS agregat
|
||||
INNER JOIN
|
||||
machines_sans_doublon
|
||||
ON
|
||||
machines_sans_doublon.mac_addr = agregat.mac_src
|
||||
GROUP BY
|
||||
machines_sans_doublon.type, machines_sans_doublon.id
|
||||
ORDER BY
|
||||
up;
|
||||
"""
|
||||
|
||||
def get_last_update(curseur):
|
||||
"""Fonction effectuant une requête à la base PostgreSQL pour
|
||||
récupérer la date du dernier udate"""
|
||||
|
||||
requete = """SELECT DISTINCT date FROM accounting;"""
|
||||
curseur.execute(requete)
|
||||
result = curseur.fetchall()
|
||||
if len(result) > 1 or not result:
|
||||
curseur.execute("TRUNCATE accounting;")
|
||||
result = [[pytz.utc.localize(datetime.datetime.utcfromtimestamp(0))]]
|
||||
return result[0][0]
|
||||
|
||||
def get_upload_data(curseur):
|
||||
"""Fonction effectuant la requête à la base postgresql"""
|
||||
|
||||
# Spécifie si on doit effectuer une soustraction
|
||||
substract = True
|
||||
to_substract = {}
|
||||
|
||||
# On récupère la date de dernière modification de la base de données.
|
||||
last_update = get_last_update(curseur)
|
||||
|
||||
delta = TZ.normalize(TZ.localize(datetime.datetime.now())) - last_update
|
||||
|
||||
# Le delta compte deux fois, donc dès qu'on dépasse 12h, il vaut mieux flusher/recommencer.
|
||||
if delta >= datetime.timedelta(0, upload.interval/2 * 3600, 0):
|
||||
CURSEUR.execute("TRUNCATE accounting;")
|
||||
substract = False
|
||||
delta = DELTA
|
||||
|
||||
# Si on doit faire une mise à jour (et non un nouveau comptage complet)
|
||||
if substract:
|
||||
curseur.execute(QUERY, {
|
||||
'plage_ens': plage_ens,
|
||||
'allone': NETs['all'][0],
|
||||
'alltwo': NETs['all'][1],
|
||||
'plage_ipv6': prefix['subnet'][0],
|
||||
'appt': NETs['personnel-ens'][0],
|
||||
'ipv6_local': 'fe80::/8',
|
||||
'plage_adm': NETs['adm'][0],
|
||||
'delta_inf': DELTA + delta,
|
||||
'delta_sup': DELTA,
|
||||
})
|
||||
to_substract = curseur.fetchall()
|
||||
|
||||
# On récupère aussi ce qui est déjà en BDD (possiblement
|
||||
# rien)
|
||||
curseur.execute("SELECT * FROM accounting;")
|
||||
already_logged = curseur.fetchall()
|
||||
|
||||
# Et on compte ce qu'il faut ajouter (potentiellement tout)
|
||||
curseur.execute(QUERY, {
|
||||
'plage_ens': plage_ens,
|
||||
'allone': NETs['all'][0],
|
||||
'alltwo': NETs['all'][1],
|
||||
'plage_ipv6': prefix['subnet'][0],
|
||||
'appt': NETs['personnel-ens'][0],
|
||||
'ipv6_local': 'fe80::/8',
|
||||
'plage_adm': NETs['adm'][0],
|
||||
'delta_inf': delta,
|
||||
'delta_sup': datetime.timedelta(0),
|
||||
})
|
||||
to_add = curseur.fetchall()
|
||||
|
||||
print to_substract
|
||||
print already_logged
|
||||
print to_add
|
||||
return (to_substract, already_logged, to_add)
|
||||
|
||||
def account(curseur):
|
||||
"""Fonction effectuant la récupération des données et leur merging."""
|
||||
|
||||
accounted = {}
|
||||
|
||||
to_substract, already_logged, to_add = get_upload_data(curseur)
|
||||
|
||||
# On compte en négatif les trucs trop vieux
|
||||
for entry in to_substract:
|
||||
accounted[(entry['type'], entry['id'])] = -entry['up']
|
||||
|
||||
# Le reste en positif.
|
||||
for entry in already_logged + to_add:
|
||||
accounted[(entry['type'], entry['id'])] = accounted.get((entry['type'], entry['id']), 0) + entry['up']
|
||||
|
||||
return accounted
|
||||
|
||||
def upload_hard(proprio, elupload, elid, eltype, curseur, ldap):
|
||||
"""Blackliste le proprio en hard, et fait les envois de mail"""
|
||||
# Test: validation_url('upload')
|
||||
try:
|
||||
data = {
|
||||
'dn': proprio.dn.split(',')[0],
|
||||
'blid': len(proprio['blacklist'])
|
||||
}
|
||||
reco_url = mail_module.validation_url('upload', data)
|
||||
reco_url_error = u""
|
||||
except Exception as error:
|
||||
reco_url_error = u"[[erreur de génération: %r]]" % error
|
||||
reco_url = u""
|
||||
|
||||
# On cherche à savoir où et quand on
|
||||
# a vu les machines du proprio pour la dernière fois
|
||||
####################################################
|
||||
machines = proprio.machines()
|
||||
macs_dates_chambres = []
|
||||
for machine in machines:
|
||||
if isinstance(machine, objets.machineFixe):
|
||||
mac = unicode(machine.get('macAddress', [u'<automatique>'])[0])
|
||||
|
||||
# Si automatique, on passe
|
||||
if mac == u'<automatique>':
|
||||
continue
|
||||
|
||||
date, chambre = reperage_chambre(mac)
|
||||
macs_dates_chambres.append([mac, date, chambre])
|
||||
|
||||
mdcf = affichage.tableau(macs_dates_chambres, ('mac', 'date', 'chambre'), (20, 21, 7), ('c', 'c', 'c'))
|
||||
|
||||
mail_data = {
|
||||
'from': upload.expediteur,
|
||||
'to': unicode(proprio['mail'][0]),
|
||||
'upload': "%.2f" % (elupload,),
|
||||
'proprio': u" ".join([
|
||||
unicode(proprio.get('prenom', [u''])[0]),
|
||||
unicode(proprio.get('nom', [u''])[0]),
|
||||
]),
|
||||
'lang_info': 'English version below',
|
||||
'mdc': mdcf,
|
||||
'chambre': unicode(proprio.get('chbre', [u'????'])[0]),
|
||||
'id': unicode(proprio.dn.split(',')[0]),
|
||||
'reco_url': reco_url,
|
||||
'reco_url_error': reco_url_error,
|
||||
}
|
||||
|
||||
# On sanctionne
|
||||
###############
|
||||
debut = TZ.normalize(TZ.localize(datetime.datetime.now()))
|
||||
stamp_debut = int((debut - EPOCH).total_seconds())
|
||||
end = debut.strftime("%Y/%m/%d %H:%M:%S")
|
||||
hier = TZ.normalize(debut - datetime.timedelta(1)).strftime("%Y/%m/%d %H:%M:%S")
|
||||
# On ne reconnecte pas auto si url dispo
|
||||
if reco_url:
|
||||
fin = "-"
|
||||
else:
|
||||
fin = int((TZ.normalize(debut + datetime.timedelta(1)) - EPOCH).total_seconds())
|
||||
|
||||
self_call_type = proprio.dn.split(',')[0].split('=')[0]
|
||||
|
||||
try:
|
||||
proprio.blacklist('autodisc_upload', 'Upload %s Mo' % (elupload,), stamp_debut, fin)
|
||||
if not DEBUG:
|
||||
proprio.history_gen()
|
||||
proprio.save()
|
||||
else:
|
||||
print proprio['blacklist']
|
||||
proprio.cancel()
|
||||
|
||||
# On inscrit l'instance dans la table des avertis_hard
|
||||
######################################################
|
||||
if not DEBUG:
|
||||
curseur.execute("INSERT INTO avertis_upload_hard (type, id, date) VALUES ('%s', '%d', 'now')" % (eltype, elid))
|
||||
analyse.self_call([
|
||||
"--%s" % (self_call_type,),
|
||||
"%s" % (elid,),
|
||||
"--dns",
|
||||
"--begin",
|
||||
"%s" % (hier,),
|
||||
"--end",
|
||||
"%s" % (end,),
|
||||
"--limit", upload.analyse_limit,
|
||||
"--fichier", upload.analyse_file_tpl % (end.replace("/", "_").replace(":", "_").replace(" ", "_"), self_call_type, elid)
|
||||
])
|
||||
except Exception as error:
|
||||
sys.stderr.write("Blacklist de %s pour %s Mo échoué, %s\n" % (proprio.dn.split(',')[0], elupload, error))
|
||||
return
|
||||
|
||||
# On envoie un mail à l'adhérent
|
||||
# On envoie un mail à disconnect
|
||||
################################
|
||||
with mail_module.ServerConnection() as smtp_conn:
|
||||
corps = mail_module.generate('upload_hard', mail_data).as_string()
|
||||
if not DEBUG:
|
||||
smtp_conn.sendmail(upload.expediteur, proprio.email(), corps)
|
||||
else:
|
||||
print corps
|
||||
|
||||
mail_data['to'] = upload.expediteur
|
||||
corps = mail_module.generate('upload_notif', mail_data).as_string()
|
||||
if not DEBUG:
|
||||
smtp_conn.sendmail(upload.expediteur, upload.expediteur, corps)
|
||||
else:
|
||||
print corps
|
||||
|
||||
# Vérification du nombre de déconnexions
|
||||
#########################################
|
||||
nb_decos = len([
|
||||
blacklist
|
||||
for blacklist
|
||||
in proprio['blacklist']
|
||||
if int(blacklist['debut']) > stamp_debut - upload.periode_watch
|
||||
and blacklist['type'] == u'autodisc_upload'
|
||||
])
|
||||
if DEBUG:
|
||||
print nb_decos
|
||||
|
||||
if nb_decos >= upload.max_decos and not DEBUG:
|
||||
# Génération du fichier postscript
|
||||
try:
|
||||
fichier_ps = generate_ps('upload', proprio, ldap)
|
||||
except Exception as error:
|
||||
fichier_ps = ("ERREUR lors de la génération. Merci de regénérer manuellement la fiche avec la commande :\n" + "/usr/scripts/surveillance/fiche_deconnexion/generate.py --upload %s" % (proprio.dn.split(',')[0],))
|
||||
|
||||
# Envoi du mail à disconnect
|
||||
with mail_module.ServerConnection() as smtp_conn:
|
||||
corps = mails_upload.message_disconnect_multi % {
|
||||
'from': upload.expediteur,
|
||||
'to': upload.expediteur,
|
||||
'nbdeco': nb_decos,
|
||||
'proprio': u" ".join([
|
||||
unicode(proprio.get('prenom', [u''])[0]),
|
||||
unicode(proprio.get('nom', [u''])[0])
|
||||
]),
|
||||
'ps': fichier_ps,
|
||||
}
|
||||
corps = corps.encode('utf-8')
|
||||
smtp_conn.sendmail(upload.expediteur, upload.expediteur, corps)
|
||||
|
||||
def upload_soft(proprio, elupload, elid, eltype, curseur):
|
||||
"""Envoit un mail et stocke l'info"""
|
||||
# On inscrit l'ip dans la table des avertis soft
|
||||
################################################
|
||||
if not DEBUG:
|
||||
curseur.execute("INSERT INTO avertis_upload_soft (type, id, date) VALUES ('%s', '%d', 'now')" % (eltype, elid))
|
||||
|
||||
# On envoie un mail à l'adhérent
|
||||
################################
|
||||
with mail_module.ServerConnection() as smtp_connect:
|
||||
corps = mail_module.generate('upload_soft', {
|
||||
'from': upload.expediteur,
|
||||
'to': unicode(proprio['mail'][0]),
|
||||
'upload': "%.2f" % (elupload,),
|
||||
'proprio': u" ".join([
|
||||
unicode(proprio.get('prenom', [u''])[0]),
|
||||
unicode(proprio.get('nom', [u''])[0]),
|
||||
]),
|
||||
'lang_info':'English version below',
|
||||
'limite_soft': upload.soft,
|
||||
'limite_hard': upload.hard,
|
||||
}).as_string()
|
||||
if not DEBUG:
|
||||
smtp_connect.sendmail(upload.expediteur, proprio.email(), corps)
|
||||
else:
|
||||
print corps
|
||||
|
||||
# On envoie un mail à disconnect
|
||||
################################
|
||||
if upload.disconnect_mail_soft and not DEBUG:
|
||||
corps = mails_upload.message_disconnect_soft % {
|
||||
'from': upload.expediteur,
|
||||
'to': upload.expediteur,
|
||||
'upload': "%.2f" % (elupload,),
|
||||
'proprio': u" ".join([
|
||||
unicode(proprio.get('prenom', [u''])[0]),
|
||||
unicode(proprio.get('nom', [u''])[0]),
|
||||
]),
|
||||
}
|
||||
corps = corps.encode('utf-8')
|
||||
smtp_connect.sendmail(upload.expediteur, upload.expediteur, corps)
|
||||
|
||||
def single_check(eltype, elid, elupload, ldap, curseur):
|
||||
"""Fait un test sur un utilisateur, et
|
||||
applique une blackliste ou envoit un averto."""
|
||||
|
||||
if elupload >= upload.hard:
|
||||
# L'adhérent a t il été blacklisté ?
|
||||
####################################
|
||||
if (eltype, elid) in AVERTIS_UPLOAD_HARD and not DEBUG:
|
||||
return
|
||||
|
||||
# Propriétaire issu de LDAP
|
||||
###########################
|
||||
if eltype == 'club':
|
||||
proprio = ldap.search(u'(cid=%d)' % (int(elid),), 'w')
|
||||
elif eltype == 'adherent':
|
||||
proprio = ldap.search(u'(aid=%d)' % (int(elid),), 'w')
|
||||
else:
|
||||
return
|
||||
|
||||
if len(proprio) != 1:
|
||||
print 'Erreur : Proprio non trouvé (%s) %d' % (eltype, elid)
|
||||
return
|
||||
|
||||
proprio = proprio[0]
|
||||
upload_hard(proprio, elupload, elid, eltype, curseur, ldap)
|
||||
|
||||
elif elupload >= upload.soft:
|
||||
# L'adhérent a t il été averti ou est déjà déco ?
|
||||
#################################################
|
||||
if ((eltype, elid) in AVERTIS_UPLOAD_SOFT or (eltype, elid) in AVERTIS_UPLOAD_HARD) and not DEBUG:
|
||||
return
|
||||
|
||||
# Objets LDAP
|
||||
#############
|
||||
if eltype == 'club':
|
||||
proprio = ldap.search(u'(cid=%d)' % (int(elid),), 'w')
|
||||
elif eltype == 'adherent':
|
||||
proprio = ldap.search(u'(aid=%d)' % (int(elid),), 'w')
|
||||
else:
|
||||
return
|
||||
|
||||
if len(proprio) != 1:
|
||||
print 'Erreur : Proprio non trouvé (%s) %d' % (eltype, elid)
|
||||
return
|
||||
|
||||
proprio = proprio[0]
|
||||
upload_soft(proprio, elupload, elid, eltype, curseur)
|
||||
|
||||
def check_and_blacklist(accounted, curseur, ldap):
|
||||
"""Récupère le dico des uploaders, et bloque ceux qui sont
|
||||
hors limite"""
|
||||
|
||||
for ((eltype, elid), elupload) in accounted.iteritems():
|
||||
single_check(eltype, elid, float(elupload)/1024/1024, ldap, curseur)
|
||||
|
||||
# On supprime les vieux avertisements
|
||||
curseur.execute("DELETE FROM avertis_upload_hard WHERE date < timestamp 'now' - interval '85200 seconds'") # 23h et 40min pour prolonger les blacklists toujours au dessus de la limite
|
||||
curseur.execute("DELETE FROM avertis_upload_soft WHERE date < timestamp 'now' - interval '1 day'")
|
||||
|
||||
def main(curseur, ldap):
|
||||
"""Fonction effectuant les appels aux autres fonctions"""
|
||||
|
||||
# On récupère les données
|
||||
accounted = account(curseur)
|
||||
|
||||
# On crée un stream qui sera ensuite utilisé par PostgreSQL
|
||||
stream = cStringIO.StringIO("\n".join(["%s\t%s\t%s\t%s" % (key[0], key[1], value, CUR_DATE) for (key, value) in accounted.iteritems()]))
|
||||
|
||||
# On met à jour accounting.
|
||||
curseur.execute("TRUNCATE accounting;")
|
||||
curseur.copy_from(stream, "accounting")
|
||||
|
||||
# Check and blacklist
|
||||
check_and_blacklist(accounted, curseur, ldap)
|
||||
|
||||
# Pour trouver la chambre où était la machine que l'on déconnecte.
|
||||
# TODO: mettre ça dans annuaires_pg
|
||||
def reperage_chambre(mac):
|
||||
"""Repère la chambre où a été vue la mac pour la dernière fois"""
|
||||
if mac == '<automatique>':
|
||||
return "Inconnue", "Inconnue"
|
||||
|
||||
pgsql = psycopg2.connect(host="thot.adm.crans.org", database='mac_prises', user='crans')
|
||||
# A priori, pas besoin, on ne fait que des select
|
||||
pgsql.set_session(autocommit=True)
|
||||
curseur = pgsql.cursor()
|
||||
requete = "SELECT date, chambre FROM correspondance WHERE mac=%s ORDER BY date DESC LIMIT 1;"
|
||||
curseur.execute(requete, (mac,))
|
||||
result = curseur.fetchall()
|
||||
if result:
|
||||
return result[0][0], result[0][1]
|
||||
else:
|
||||
return "Inconnue", "Inconnue"
|
||||
|
||||
if __name__ == "__main__":
|
||||
LDAP = shortcuts.lc_ldap_admin()
|
||||
|
||||
# Connection à la base sql via pgsql
|
||||
PGSQL = psycopg2.connect(database='filtrage', user='crans')
|
||||
PGSQL.set_session(autocommit=True)
|
||||
CURSEUR = PGSQL.cursor(cursor_factory=psycopg2.extras.DictCursor)
|
||||
|
||||
# Avertis upload hard
|
||||
REQUETE = "SELECT type, id FROM avertis_upload_hard WHERE date > now() - %(interval)s;"
|
||||
CURSEUR.execute(REQUETE, {
|
||||
'interval': DELTA,
|
||||
})
|
||||
|
||||
AVERTIS_UPLOAD_HARD = CURSEUR.fetchall()
|
||||
|
||||
# Avertis upload soft
|
||||
REQUETE = "SELECT type, id FROM avertis_upload_soft WHERE date > now() - %(interval)s"
|
||||
CURSEUR.execute(REQUETE, {
|
||||
'interval': DELTA,
|
||||
})
|
||||
|
||||
AVERTIS_UPLOAD_SOFT = CURSEUR.fetchall()
|
||||
|
||||
main(CURSEUR, LDAP)
|
Loading…
Add table
Add a link
Reference in a new issue