Tout modifié pour la nouvelle API
This commit is contained in:
parent
d1de6ffe09
commit
cb4ea4d419
5 changed files with 86 additions and 95 deletions
23
tokenizer.py
23
tokenizer.py
|
@ -2,7 +2,6 @@
|
|||
import piexel
|
||||
import re
|
||||
|
||||
|
||||
class Tokenizer:
|
||||
def __init__(self, conf, api):
|
||||
self.conf = conf
|
||||
|
@ -17,22 +16,22 @@ class Tokenizer:
|
|||
def get_tokens_step(self, step):
|
||||
return [t for t in self.tk if t['step'] == step]
|
||||
|
||||
def tokenise(self, filename):
|
||||
found = {}
|
||||
def tokenize(self, filename):
|
||||
found = {'lang':[], 'quality':[], 'subtitle':[]}
|
||||
for step in self.steps:
|
||||
for tok in self.get_tokens_step(step):
|
||||
if(not bool(tk['case_sensitive'])):
|
||||
if(not bool(int(tok['case_sensitive']))):
|
||||
reg = re.compile(tok['token'], re.IGNORECASE)
|
||||
else:
|
||||
reg = re.compile(tok['token'])
|
||||
if reg.match(filename):
|
||||
if(tok['lang']):
|
||||
found['lang'] = tok['lang']
|
||||
if(tok['quality']):
|
||||
found['quality'] = tok['quality']
|
||||
if(tok['subtitle']):
|
||||
found['subtitles'] = tok['subtitle']
|
||||
reg.sub(' ', filename)
|
||||
if reg.search(filename):
|
||||
for tok_lang in tok['languages']:
|
||||
found['lang'].append(tok_lang['value'])
|
||||
for tok_qual in tok['qualities']:
|
||||
found['quality'].append(tok_qual['value'])
|
||||
for tok_sub in tok['subtitle_languages']:
|
||||
found['subtitle'].append(tok_sub['value'])
|
||||
filename = reg.sub(' ', filename)
|
||||
return filename, found
|
||||
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue