on ajoute les pages pour prvisualisation sur

http://rouge.crans.org/squid/ (parce que je voulais tester w3c)
on corrige les tags html et body ferms 2 fois

darcs-hash:20060307212004-4ec08-804006235ee967db2de4bfe75e22c2faf1dbb7d0.gz
This commit is contained in:
chove 2006-03-07 22:20:04 +01:00
parent e204a5b9e1
commit b0c74b8f8d

View file

@ -21,6 +21,7 @@ url_prefix = "."
url_base='VieCrans/PagesDeDeconnexion/'
outputdir='/usr/scripts/squid/errors'
outputdir_rouge='/var/www/rouge/squid'
page_template = u"""<html>
<head>
@ -49,8 +50,9 @@ from MoinMoin.request import RequestCLI
from MoinMoin.action import AttachFile
# on supprime toutes les pages
for f in os.listdir(outputdir):
os.remove(os.path.join(outputdir,f))
for r in [outputdir, outputdir_rouge]:
for f in os.listdir(r):
os.remove(os.path.join(r,f))
class MoinDump(_util.Script):
def __init__(self):
@ -105,9 +107,14 @@ class MoinDump(_util.Script):
import traceback
finally:
timestamp = time.strftime("%Y-%m-%d %H:%M")
filepath = os.path.join(outputdir, file.split('(2f)')[-1])
filepath_rouge = os.path.join(outputdir_rouge, file.split('(2f)')[-1]+'.html')
fileout = codecs.open(filepath, 'w', config.charset)
fileout.write((page_template % {
fileout_rouge = codecs.open(filepath_rouge, 'w', config.charset)
contenu = page_template % {
'charset': config.charset,
'pagename': pagename,
'pagenamewithlinks': pagenamewithlinks,
@ -115,8 +122,16 @@ class MoinDump(_util.Script):
'timestamp': timestamp,
'wikilink': u"http://wiki.crans.org/%s" % quoteWikinameOriUrl(originalpagename).encode("UTF-8"),
'theme': request.cfg.theme_default,
}).replace("./monobook", ".."))
}
contenu = contenu.replace("./monobook", "..")
fileout_rouge.write(contenu)
contenu = re.sub('</(body|html)>\n?', '', contenu)
fileout.write(contenu)
fileout.close()
fileout_rouge.close()
def run():
MoinDump().run()