scripts/squid/dump-from-wiki.py
chove c674a73699 petits scripts pour regnrer les pages de dco de squid partir du wiki
sur rouge faut faire :
sudo /usr/scripts/squid/generate.sh
 - dump le wiki vers le nfs
 - se connecte  sila avec une cl commande
 - copie les fichiers dans le bon rep de squid (toutes langues)
 - redmare squid

darcs-hash:20060306220109-4ec08-5e35022d0d91f7339a3f2f3b99b51c2d49d1b2b0.gz
2006-03-06 23:01:09 +01:00

138 lines
5.2 KiB
Python
Executable file

#! /usr/bin/env python
# -*- coding: iso-8859-15 -*-
#
# Dumpe les pages de déconnections pour les coller sur squid
# sudo /usr/scripts/wiki/dump-squid.py
"""
MoinMoin - Dump a MoinMoin wiki to static pages
@copyright: 2002-2004 by Jürgen Hermann <jh@web.de>
@license: GNU GPL, see COPYING for details.
"""
__version__ = '20040329'
# use this if your moin installation is not in sys.path:
import sys
sys.path.insert(0, '/etc/moin')
url_prefix = "."
url_base='VieCrans/PagesDeDeconnexion/'
outputdir='/usr/scripts/squid/errors'
page_template = u"""<html>
<head>
<meta http-equiv="content-type" content="text/html; charset=%(charset)s">
<title>%(pagename)s</title>
<link rel="stylesheet" type="text/css" href="http://wiki.crans.org/wiki/blackliste/css/common.css">
<link rel="stylesheet" type="text/css" href="http://wiki.crans.org/wiki/blackliste/css/blackliste.css">
</head>
<body>
<h1>%(pagenamewithlinks)s</h1>
%(pagehtml)s
<p class="creation">
Cette page a été extraite du wiki le %(timestamp)s. Vous pouvez l'<a href="%(wikilink)s?action=edit">éditer</a> ou <a href="%(wikilink)s">voir</a> la page originale.
</p>
</body>
</html>
"""
import os, time, StringIO, codecs, shutil, re
from MoinMoin import config, wikiutil, Page
from MoinMoin.scripts import _util
from MoinMoin.request import RequestCLI
from MoinMoin.action import AttachFile
# on supprime toutes les pages
for f in os.listdir(outputdir):
os.remove(os.path.join(outputdir,f))
class MoinDump(_util.Script):
def __init__(self):
_util.Script.__init__(self, __name__, '')
def mainloop(self):
""" moin-dump's main code. """
AttachFile.getAttachUrl = lambda pagename, filename, request, addts=0, escaped=0: (get_attachment(request, pagename, filename, outputdir))
# Dump the wiki
request = RequestCLI(u"wiki.crans.org/")
request.form = request.args = request.setup_args()
# fix url_prefix so we get relative paths in output html
request.cfg.url_prefix = url_prefix
# Get all existing pages in the wiki
pages = list(request.rootpage.getPageList(user=''))
pages = list(filter(lambda x: re.match(os.path.join(url_base,'.*'), x), pages))
pages.sort()
quoteWikinameOriUrl = wikiutil.quoteWikinameURL
wikiutil.quoteWikinameURL = lambda pagename, qfn=wikiutil.quoteWikinameFS: (qfn(pagename) )
for pagename in pages:
file = wikiutil.quoteWikinameURL(pagename) # we have the same name in URL and FS
# On construit le nom de la page avec les liens (peut sans doute mieux faire)
originalpagename = pagename
pagename = pagename.replace("/PagesStatiques", "")
pagenamewithlinks = [u'']
for composant in pagename.split("/"):
pagenamewithlinks.append(pagenamewithlinks[-1]+'/'+composant)
pagenamewithlinks = u" / ".join(map(lambda x: u'<a href="/wiki/%s">%s</a>' % (
wikiutil.quoteWikinameURL(x[1:]), x[1:].split("/")[-1]), pagenamewithlinks[1:]))
_util.log('Writing %s...' % file.split('(2f)')[-1])
try:
pagehtml = ''
page = Page.Page(request, originalpagename)
try:
request.reset()
out = StringIO.StringIO()
request.redirect(out)
request.page = page
request.remote_addr = '138.231.136.3'
page.send_page(request, count_hit=0, content_only=1)
pagehtml = out.getvalue()
request.redirect()
except:
print >>sys.stderr, "*** Caught exception while writing page!"
import traceback
finally:
timestamp = time.strftime("%Y-%m-%d %H:%M")
filepath = os.path.join(outputdir, file.split('(2f)')[-1])
fileout = codecs.open(filepath, 'w', config.charset)
fileout.write((page_template % {
'charset': config.charset,
'pagename': pagename,
'pagenamewithlinks': pagenamewithlinks,
'pagehtml': pagehtml.replace('./','http://wiki.crans.org/wiki/'),
'timestamp': timestamp,
'wikilink': u"http://wiki.crans.org/%s" % quoteWikinameOriUrl(originalpagename).encode("UTF-8"),
'theme': request.cfg.theme_default,
}).replace("./monobook", ".."))
fileout.close()
def run():
MoinDump().run()
def get_attachment(request, pagename, filename, outputdir):
"""Traitement des attachements"""
source_dir = AttachFile.getAttachDir(request, pagename)
source_file = os.path.join(source_dir, filename)
if not os.path.isfile(source_file):
print "%s n'existe pas !" % source_file
return
dest_file = os.path.join(outputdir, "attach",
"%s_%s" % (wikiutil.quoteWikinameFS(pagename), filename))
shutil.copyfile(source_file, dest_file)
return os.path.join("..", "attach",
"%s_%s" % (wikiutil.quoteWikinameFS(pagename), filename))
if __name__ == "__main__":
run()