#coding:utf-8 ''' Pierre Cadart Script pour NAS sous Linux Utilise un accès en FTP ''' import posixpath import re import config import piexel import tokenizer import filerule from ftplib import FTP import time import file def ftpwalk(directory, ftp): """ Parcours en profondeur un dossier du serveur FTP Effectue un parcours préfixe, semblable à la fonction os.walk """ to_visit = [directory] while len(to_visit) > 0: current = to_visit.pop(0) ftp.cwd(current) Lfiles = [] Ldirs = [] for name, prop in ftp.mlsd(): if name.startswith('.'): continue if prop['type'] == 'dir': Ldirs.append(name) to_visit.append(current+'/'+name) elif prop['type'] == 'file': Lfiles.append(name) # ne construit pas la liste complète, # mais retourne les résultats intermédiaires yield (current, Ldirs, Lfiles) def visit_folder(domain, api, rules, tok): """ Visite un dossier sur un serveur, et ajoute les fichiers trouvés à l'API """ # Connection au serveur print('connect to:', domain['server']) ftp = FTP(domain['server'][6:], user=domain['username'], passwd=domain['password']) ftp.encoding = 'UTF-8' # Initialisation des listes de mises à jour L_missing = [] # fichiers non trouvés sur le serveur FTP L_unreferenced = [] # fichiers non référencés dans l'API L_moved = [] # fichiers déplacés sur le serveur FTP # Lecture des fichiers sur le serveur FTP Lloc = [] for path, _, files in ftpwalk(domain['path'], ftp): # Ajoute les fichiers correspondants aux extensions for f in files: match = filerule.match_rules(path+'/'+f, rules) if match: #print('got match:',match[1], 'name:',path+'/'+f) F = file.File(path, f, match[1]) F.extract_title(tok) Lloc.append(F) ftp.close() print('total:',len(Lloc)) # Récupère les fichiers de l'api Lapi = [] for info in api.get_files(path=domain['server']+domain['path'], like=1, filable=1): nfo = {} if not info['filable']: print('nfo:', info) else: year = int(info['filable']['release_date'][:4]) nfo['YEAR'] = year F = file.File(info['path'][len(domain['server']):], info['name'], nfo, api_id=info['filable_id'], api_fileid=info['id']) F.extract_title(tok) Lapi.append(F) print('got api for ',domain['server']+domain['path'],':', len(Lapi)) #print('loc titles:', '|'.join(sorted([f.title for f in Lloc]))) #print('loc titles:', '|'.join(sorted([f.title for f in Lloc]))) #print('\n'*2) #print('api titles:', '|'.join(sorted([f.title for f in Lapi]))) # supprime les dossiers de l'api (ils ne devraient pas apparaître) Linvalid = [f for f in Lapi if (not tok.conf.is_valid_file(f.name)) or not (filerule.match_rules(f.path+'/'+f.name, rules))] Lapi = [f for f in Lapi if tok.conf.is_valid_file(f.name)] # Compare avec la liste de l'api Lmissing = [f for f in Lapi if f not in Lloc] # fichiers non présents localement Lunref = [f for f in Lloc if f not in Lapi] # fichiers non référencés # Fichiers déplacés (ou copiés) localement Lrelink = [] # liste des références à changer for file2 in Lunref: for file1 in Lapi: if file1.filename_same(file2): Lrelink.append((file1, file2)) break for fApi, fLoc in Lrelink: if fApi in Lmissing: Lmissing.remove(fApi) Lunref.remove(fLoc) print('moved/copied:', Lrelink) # Linke les fichiers identiques au même film Llink = [] for file1 in Lunref: for file2 in Lapi: if file1.title == file2.title: Llink.append((file1, file2)) print('D add:', file1, file2, file2.api_id) break print('doubles:', sorted(Llink, key=lambda f:str(f))) for f, fApi in Llink: if fApi in Lmissing: Lmissing.remove(fApi) Lunref.remove(f) # Linke les films par nom si possible APIfilms = api.get_films() API_alltitles = [] for f in APIfilms: if f['title']: t = f['title'].replace(' ','').lower() if len(t) <= 2: if t not in [e[0] for e in API_alltitles]: API_alltitles.append((t, f['id'])) if f['title_vo']: t = f['title_vo'].replace(' ','').lower() if len(t) <= 2: if t not in [e[0] for e in API_alltitles]: API_alltitles.append((t, f['id'])) Llink2 = [] for film in Lunref: for title, fid in API_alltitles: if title==film.title: Llink2.append((film, fid)) break # pour ne pas référencer deux fois le même fichier #print(film, ' <-> ', [f for f in APIfilms if f['id']==fid][0]['title']) print('easy ref:', sorted(Llink2, key=lambda f:str(f))) for f, _ in Llink2: Lunref.remove(f) print('invalid:\n'+'\n'.join(str(f.api_fileid)+' '+str(f) for f in Linvalid)) print('missing (', len(Lmissing), '):\n','\n'.join([str(e.api_id)+':'+repr(e)+'('+e.title+')' for e in sorted(Lmissing, key=lambda e:e.title)])) print('\n'*3) print('unreferenced:\n'+'\n'.join(str(f) for f in sorted(Lunref, key=lambda e:e.title))) print('\n'*3) #print('unreferenced titles:\n', '\n'.join(sorted([f.title for f in Lunref]))) # Supprime les fichiers invalides (dossiers/ ne répondent à aucune règle) for i, film in enumerate(Linvalid): print('['+str(i+1)+'/'+str(len(Linvalid))+']'+'invalid:', film) try: resp = api.delete_file(id=film.api_fileid) except Exception as e: print(e) print('film '+film.title+' not deleted') raise Exception('end') time.sleep(1) # Supprime les fichiers qui n'existent plus for i, film in enumerate(Lmissing): print('['+str(i+1)+'/'+str(len(Lmissing))+']'+'missing:', film) try: resp = api.delete_file(id=film.api_fileid) except Exception as e: print(e) print('film '+film.title+' not deleted') raise Exception('end') time.sleep(1) # Put les renommages / déplacements i = 0 for filmApi, filmLoc in Lrelink: i += 1 print('['+str(i)+'/'+str(len(Lrelink))+']'+'relink:', filmApi.title) try: api.put_file(id=filmApi.api_fileid, path=domain['server']+filmLoc.path, name=filmLoc.name) time.sleep(1) except Exception as e: print(e) print('film '+filmApi.title+' not edited') raise Exception('end') # Poste les ajouts de doubles i = 0 for film, filmAPI in Llink: filmID = filmAPI.api_id i += 1 print('['+str(i)+'/'+str(len(Llink))+']'+'link:', film.title) try: resp = api.post_file(path=domain['server']+film.path, name=film.name, type='film', type_id=filmID) if 'id' in resp: post_markers(api, film, resp['id']) time.sleep(1) except Exception as e: print(e) print('film '+film.title+' not added') raise Exception('end') # Poste les ajouts de doubles plus complexes i = 0 for film, filmID in Llink2: i += 1 print('['+str(i)+'/'+str(len(Llink2))+']'+'link2:', film.title) try: resp = api.post_file(path=domain['server']+film.path, name=film.name, type='film', type_id=filmID, **film.additional_info()) if 'id' in resp: post_markers(api, film, resp['id']) time.sleep(1) except Exception as e: print(e) print('film '+film.title+' not added') raise Exception('end') # Poste tout les films locaux (doit faire une reqête Tmdb, qui peut ne pas marcher) i = 0 Lcannot_post = [] for film in Lunref: i += 1 print('['+str(i)+'/'+str(len(Lunref))+']'+'post:', film.title, str(film.info.get('YEAR'))) try: posted = False if 'YEAR' in film.info: # tente avec l'année spécifié en premier resp = api.post_film(title=film.title, year=film.info['YEAR']) if "id" in resp: # id du film posted = True if not posted: resp = api.post_film(title=film.title) if "id" in resp: # id du film posted = True if posted: print('post: path=',domain['server']+film.path) resp = api.post_file(path=domain['server']+film.path, name=film.name, type='film', type_id=resp["id"]) if 'id' in resp: # id du file post_markers(api, film, resp['id']) else: Lcannot_post.append(film) print('response:', resp) time.sleep(1) except Exception as e: print(e) print('film '+film.title+' not posted') raise Exception('end') # TODO: traiter les films non postés print('visit finished') def post_markers(api, file_, fileid): for lang in file_.markers['lang']: api.add_language(fileid, lang) time.sleep(0.5) for qual in file_.markers['quality']: api.add_quality(fileid, qual) time.sleep(0.5) for sub in file_.markers['subtitle']: api.add_subtitle(fileid, sub) time.sleep(0.5) def main(): conf = config.Config() api = piexel.Piexel(conf.server, conf.app, conf.token) tokens = tokenizer.Tokenizer(conf, api) folders = api.get_folders() rules = api.get_paths() for fold in folders: applicable = [filerule.FileRule(re.escape(fold['path'])+'\\/'+r['regex'], conf) for r in rules if int(r['indexer_folder_id']) == fold['id']] visit_folder(fold, api, applicable, tokens) return if __name__ == '__main__': main()