Tout modifié pour la nouvelle API

This commit is contained in:
redstorm45 2017-10-29 20:58:32 +01:00
parent d1de6ffe09
commit cb4ea4d419
5 changed files with 86 additions and 95 deletions

View file

@ -2,7 +2,6 @@
import piexel
import re
class Tokenizer:
def __init__(self, conf, api):
self.conf = conf
@ -17,22 +16,22 @@ class Tokenizer:
def get_tokens_step(self, step):
return [t for t in self.tk if t['step'] == step]
def tokenise(self, filename):
found = {}
def tokenize(self, filename):
found = {'lang':[], 'quality':[], 'subtitle':[]}
for step in self.steps:
for tok in self.get_tokens_step(step):
if(not bool(tk['case_sensitive'])):
if(not bool(int(tok['case_sensitive']))):
reg = re.compile(tok['token'], re.IGNORECASE)
else:
reg = re.compile(tok['token'])
if reg.match(filename):
if(tok['lang']):
found['lang'] = tok['lang']
if(tok['quality']):
found['quality'] = tok['quality']
if(tok['subtitle']):
found['subtitles'] = tok['subtitle']
reg.sub(' ', filename)
if reg.search(filename):
for tok_lang in tok['languages']:
found['lang'].append(tok_lang['value'])
for tok_qual in tok['qualities']:
found['quality'].append(tok_qual['value'])
for tok_sub in tok['subtitle_languages']:
found['subtitle'].append(tok_sub['value'])
filename = reg.sub(' ', filename)
return filename, found