diff --git a/squid/copy-to-squid.sh b/squid/copy-to-squid.sh new file mode 100755 index 00000000..9edd6847 --- /dev/null +++ b/squid/copy-to-squid.sh @@ -0,0 +1,20 @@ +#! /bin/sh + +# +# copie les fichiers de blackliste du nfs vers les dossiers de squid +# + +echo -e "\033[1;32mSuppression des fichiers de squid...\033[1;0m" + +rm -v /usr/share/squid/errors/*/ERR_CUSTOM_* + +echo -e "\033[1;32mCopie des fichiers vers squid...\033[1;0m" + +for i in /usr/share/squid/errors/* +do + cp -v /usr/scripts/squid/errors/* $i +done + +echo -e "\033[1;32mRedémarage de squid...\033[1;0m" + +/etc/init.d/squid restart diff --git a/squid/dump-from-wiki.py b/squid/dump-from-wiki.py new file mode 100755 index 00000000..4272a540 --- /dev/null +++ b/squid/dump-from-wiki.py @@ -0,0 +1,138 @@ +#! /usr/bin/env python +# -*- coding: iso-8859-15 -*- +# +# Dumpe les pages de déconnections pour les coller sur squid +# sudo /usr/scripts/wiki/dump-squid.py + +""" + MoinMoin - Dump a MoinMoin wiki to static pages + + @copyright: 2002-2004 by Jürgen Hermann + @license: GNU GPL, see COPYING for details. +""" + +__version__ = '20040329' + +# use this if your moin installation is not in sys.path: +import sys +sys.path.insert(0, '/etc/moin') + +url_prefix = "." + +url_base='VieCrans/PagesDeDeconnexion/' +outputdir='/usr/scripts/squid/errors' + +page_template = u""" + + +%(pagename)s + + + + + +

%(pagenamewithlinks)s

+ +%(pagehtml)s + +

+Cette page a été extraite du wiki le %(timestamp)s. Vous pouvez l'éditer ou voir la page originale. +

+ + +""" + +import os, time, StringIO, codecs, shutil, re +from MoinMoin import config, wikiutil, Page +from MoinMoin.scripts import _util +from MoinMoin.request import RequestCLI +from MoinMoin.action import AttachFile + +# on supprime toutes les pages +for f in os.listdir(outputdir): + os.remove(os.path.join(outputdir,f)) + +class MoinDump(_util.Script): + def __init__(self): + _util.Script.__init__(self, __name__, '') + + def mainloop(self): + """ moin-dump's main code. """ + + AttachFile.getAttachUrl = lambda pagename, filename, request, addts=0, escaped=0: (get_attachment(request, pagename, filename, outputdir)) + + # Dump the wiki + request = RequestCLI(u"wiki.crans.org/") + request.form = request.args = request.setup_args() + + # fix url_prefix so we get relative paths in output html + request.cfg.url_prefix = url_prefix + + # Get all existing pages in the wiki + pages = list(request.rootpage.getPageList(user='')) + pages = list(filter(lambda x: re.match(os.path.join(url_base,'.*'), x), pages)) + + pages.sort() + + quoteWikinameOriUrl = wikiutil.quoteWikinameURL + wikiutil.quoteWikinameURL = lambda pagename, qfn=wikiutil.quoteWikinameFS: (qfn(pagename) ) + + for pagename in pages: + file = wikiutil.quoteWikinameURL(pagename) # we have the same name in URL and FS + # On construit le nom de la page avec les liens (peut sans doute mieux faire) + originalpagename = pagename + pagename = pagename.replace("/PagesStatiques", "") + pagenamewithlinks = [u''] + for composant in pagename.split("/"): + pagenamewithlinks.append(pagenamewithlinks[-1]+'/'+composant) + pagenamewithlinks = u" / ".join(map(lambda x: u'%s' % ( + wikiutil.quoteWikinameURL(x[1:]), x[1:].split("/")[-1]), pagenamewithlinks[1:])) + _util.log('Writing %s...' % file.split('(2f)')[-1]) + try: + pagehtml = '' + page = Page.Page(request, originalpagename) + try: + request.reset() + out = StringIO.StringIO() + request.redirect(out) + request.page = page + request.remote_addr = '138.231.136.3' + page.send_page(request, count_hit=0, content_only=1) + pagehtml = out.getvalue() + request.redirect() + except: + print >>sys.stderr, "*** Caught exception while writing page!" + import traceback + finally: + timestamp = time.strftime("%Y-%m-%d %H:%M") + filepath = os.path.join(outputdir, file.split('(2f)')[-1]) + fileout = codecs.open(filepath, 'w', config.charset) + fileout.write((page_template % { + 'charset': config.charset, + 'pagename': pagename, + 'pagenamewithlinks': pagenamewithlinks, + 'pagehtml': pagehtml.replace('./','http://wiki.crans.org/wiki/'), + 'timestamp': timestamp, + 'wikilink': u"http://wiki.crans.org/%s" % quoteWikinameOriUrl(originalpagename).encode("UTF-8"), + 'theme': request.cfg.theme_default, + }).replace("./monobook", "..")) + fileout.close() + +def run(): + MoinDump().run() + +def get_attachment(request, pagename, filename, outputdir): + """Traitement des attachements""" + source_dir = AttachFile.getAttachDir(request, pagename) + source_file = os.path.join(source_dir, filename) + if not os.path.isfile(source_file): + print "%s n'existe pas !" % source_file + return + dest_file = os.path.join(outputdir, "attach", + "%s_%s" % (wikiutil.quoteWikinameFS(pagename), filename)) + shutil.copyfile(source_file, dest_file) + return os.path.join("..", "attach", + "%s_%s" % (wikiutil.quoteWikinameFS(pagename), filename)) + +if __name__ == "__main__": + run() diff --git a/squid/generate.sh b/squid/generate.sh new file mode 100755 index 00000000..e8e3b274 --- /dev/null +++ b/squid/generate.sh @@ -0,0 +1,8 @@ +#! /bin/sh + +# dumpe les fichiers du wiki +echo -e "\033[1;32mDump des fichiers du wiki...\033[1;0m" +/usr/scripts/squid/dump-from-wiki.py + +# copie vers squid +ssh -i /usr/scripts/squid/id-squid sila.adm.crans.org