patch
This commit is contained in:
parent
0ceebb5a57
commit
55e8693a3f
114
animecli.py
114
animecli.py
@ -10,13 +10,11 @@ import concurrent.futures
|
|||||||
import configparser
|
import configparser
|
||||||
import os
|
import os
|
||||||
|
|
||||||
# Chemins par défaut
|
|
||||||
DB_DIR = Path.home() / ".animecli"
|
DB_DIR = Path.home() / ".animecli"
|
||||||
DB_PATH = DB_DIR / "animes.db"
|
DB_PATH = DB_DIR / "animes.db"
|
||||||
DOWNLOAD_DIR = Path.home() / "MesAnimes"
|
DOWNLOAD_DIR = Path.home() / "MesAnimes"
|
||||||
CONFIG_PATH = DB_DIR / "config.ini"
|
CONFIG_PATH = DB_DIR / "config.ini"
|
||||||
|
|
||||||
# Valeurs par défaut
|
|
||||||
DEFAULT_CONFIG = {
|
DEFAULT_CONFIG = {
|
||||||
"vitesse_max": "0",
|
"vitesse_max": "0",
|
||||||
"telechargements_simultanes": "1",
|
"telechargements_simultanes": "1",
|
||||||
@ -85,7 +83,23 @@ def get_all_animes(conn):
|
|||||||
c = conn.cursor()
|
c = conn.cursor()
|
||||||
return c.execute("SELECT id, titre, url, saison, dernier_episode FROM animes").fetchall()
|
return c.execute("SELECT id, titre, url, saison, dernier_episode FROM animes").fetchall()
|
||||||
|
|
||||||
|
def extract_master_m3u8_url(page_url):
|
||||||
|
try:
|
||||||
|
resp = requests.get(page_url)
|
||||||
|
resp.raise_for_status()
|
||||||
|
urls = re.findall(r'https?://[^\s\'"]+\.m3u8[^\s\'"]*', resp.text)
|
||||||
|
for url in urls:
|
||||||
|
if "master.m3u8" in url:
|
||||||
|
return url
|
||||||
|
return urls[0] if urls else None
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
def get_source_info(url):
|
def get_source_info(url):
|
||||||
|
if url.endswith('.html'):
|
||||||
|
m3u8_url = extract_master_m3u8_url(url)
|
||||||
|
if m3u8_url:
|
||||||
|
url = m3u8_url
|
||||||
ydl_opts = {
|
ydl_opts = {
|
||||||
'quiet': True,
|
'quiet': True,
|
||||||
'skip_download': True,
|
'skip_download': True,
|
||||||
@ -98,39 +112,63 @@ def get_source_info(url):
|
|||||||
if not formats:
|
if not formats:
|
||||||
return None
|
return None
|
||||||
best = max(formats, key=lambda f: f.get('height', 0) or 0)
|
best = max(formats, key=lambda f: f.get('height', 0) or 0)
|
||||||
taille = best.get('filesize') or best.get('filesize_approx')
|
|
||||||
taille_mo = f"{taille // (1024*1024)} Mo" if taille else "?"
|
|
||||||
qualite = f"{best.get('height', '?')}p"
|
qualite = f"{best.get('height', '?')}p"
|
||||||
return {
|
return {
|
||||||
'qualite': qualite,
|
'qualite': qualite,
|
||||||
'taille': taille_mo,
|
|
||||||
'title': info.get('title', ''),
|
|
||||||
'url': url
|
'url': url
|
||||||
}
|
}
|
||||||
except Exception:
|
except Exception:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def choisir_source(stdscr, sources_infos):
|
def choisir_source_globale(stdscr, episode_data):
|
||||||
sel = 0
|
# Regroupe les sources par index (site)
|
||||||
while True:
|
sources_by_site = []
|
||||||
stdscr.clear()
|
for i in range(len(episode_data[0][0])): # nombre de sources par épisode
|
||||||
stdscr.addstr(0, 0, "Choisissez la source pour cet épisode :")
|
urls = []
|
||||||
for idx, info in enumerate(sources_infos):
|
for ep in episode_data:
|
||||||
line = f"{idx+1}. {info['url']} ({info['qualite']}, {info['taille']})"
|
if len(ep[0]) > i:
|
||||||
if idx == sel:
|
urls.append(ep[0][i])
|
||||||
stdscr.attron(curses.color_pair(1))
|
if urls:
|
||||||
safe_addstr(stdscr, 2 + idx, 2, line)
|
sources_by_site.append(urls)
|
||||||
stdscr.attroff(curses.color_pair(1))
|
# Utilise le premier URL de chaque site pour la qualité
|
||||||
else:
|
preview_urls = [urls[0] for urls in sources_by_site]
|
||||||
safe_addstr(stdscr, 2 + idx, 2, line)
|
infos = [None] * len(preview_urls)
|
||||||
stdscr.refresh()
|
with concurrent.futures.ThreadPoolExecutor() as executor:
|
||||||
k = stdscr.getch()
|
future_to_idx = {executor.submit(get_source_info, url): i for i, url in enumerate(preview_urls)}
|
||||||
if k == curses.KEY_UP and sel > 0:
|
done = set()
|
||||||
sel -= 1
|
sel = 0
|
||||||
elif k == curses.KEY_DOWN and sel < len(sources_infos) - 1:
|
while True:
|
||||||
sel += 1
|
stdscr.clear()
|
||||||
elif k in [curses.KEY_ENTER, 10, 13]:
|
stdscr.addstr(0, 0, "Choisissez la source à utiliser pour tous les épisodes :")
|
||||||
return sel
|
for idx, url in enumerate(preview_urls):
|
||||||
|
info = infos[idx]
|
||||||
|
domain = re.sub(r'^https?://(www\.)?', '', url).split('/')[0]
|
||||||
|
if info:
|
||||||
|
line = f"{idx+1}. {domain} ({info['qualite']})"
|
||||||
|
else:
|
||||||
|
line = f"{idx+1}. {domain} (chargement...)"
|
||||||
|
if idx == sel:
|
||||||
|
stdscr.attron(curses.color_pair(1))
|
||||||
|
safe_addstr(stdscr, 2 + idx, 2, line)
|
||||||
|
stdscr.attroff(curses.color_pair(1))
|
||||||
|
else:
|
||||||
|
safe_addstr(stdscr, 2 + idx, 2, line)
|
||||||
|
stdscr.refresh()
|
||||||
|
try:
|
||||||
|
for future in concurrent.futures.as_completed(future_to_idx, timeout=0.1):
|
||||||
|
idx = future_to_idx[future]
|
||||||
|
if idx not in done:
|
||||||
|
infos[idx] = future.result()
|
||||||
|
done.add(idx)
|
||||||
|
except concurrent.futures.TimeoutError:
|
||||||
|
pass
|
||||||
|
k = stdscr.getch()
|
||||||
|
if k == curses.KEY_UP and sel > 0:
|
||||||
|
sel -= 1
|
||||||
|
elif k == curses.KEY_DOWN and sel < len(preview_urls) - 1:
|
||||||
|
sel += 1
|
||||||
|
elif k in [curses.KEY_ENTER, 10, 13]:
|
||||||
|
return sel, sources_by_site[sel]
|
||||||
|
|
||||||
def extract_episode_sources(url_page):
|
def extract_episode_sources(url_page):
|
||||||
resp = requests.get(url_page)
|
resp = requests.get(url_page)
|
||||||
@ -170,6 +208,10 @@ def format_dernier(dernier_url, saison):
|
|||||||
return f"Saison {saison}, épisode {ep}, {dernier_url}"
|
return f"Saison {saison}, épisode {ep}, {dernier_url}"
|
||||||
|
|
||||||
def telecharger_episode(url, saison_folder, filename, qualite):
|
def telecharger_episode(url, saison_folder, filename, qualite):
|
||||||
|
if url.endswith('.html'):
|
||||||
|
m3u8_url = extract_master_m3u8_url(url)
|
||||||
|
if m3u8_url:
|
||||||
|
url = m3u8_url
|
||||||
ydl_opts = {
|
ydl_opts = {
|
||||||
"outtmpl": str(saison_folder / filename),
|
"outtmpl": str(saison_folder / filename),
|
||||||
"format": f"bestvideo[height<={qualite.rstrip('p')}]+bestaudio/best",
|
"format": f"bestvideo[height<={qualite.rstrip('p')}]+bestaudio/best",
|
||||||
@ -224,12 +266,15 @@ def handle_multi_download(stdscr, conn):
|
|||||||
stdscr.addstr(0, 0, f"Erreur récupération des sources: {e}")
|
stdscr.addstr(0, 0, f"Erreur récupération des sources: {e}")
|
||||||
stdscr.getch()
|
stdscr.getch()
|
||||||
return
|
return
|
||||||
if not episode_data:
|
if not episode_data or not episode_data[0][0]:
|
||||||
stdscr.clear()
|
stdscr.clear()
|
||||||
stdscr.addstr(0, 0, "Aucun épisode trouvé.")
|
stdscr.addstr(0, 0, "Aucune source trouvée.")
|
||||||
stdscr.getch()
|
stdscr.getch()
|
||||||
return
|
return
|
||||||
|
|
||||||
|
# Choix de la source globale (site)
|
||||||
|
sel_src, urls_par_source = choisir_source_globale(stdscr, episode_data)
|
||||||
|
|
||||||
selected = [False] * len(episode_data)
|
selected = [False] * len(episode_data)
|
||||||
cursor = 0
|
cursor = 0
|
||||||
scroll_offset = 0
|
scroll_offset = 0
|
||||||
@ -302,16 +347,9 @@ def handle_multi_download(stdscr, conn):
|
|||||||
base_folder = Path(CONFIG["download_dir"]) / titre
|
base_folder = Path(CONFIG["download_dir"]) / titre
|
||||||
download_queue = []
|
download_queue = []
|
||||||
for idx in to_download:
|
for idx in to_download:
|
||||||
sources, _ = episode_data[idx]
|
if idx >= len(urls_par_source):
|
||||||
infos = []
|
|
||||||
for src in sources:
|
|
||||||
info = get_source_info(src)
|
|
||||||
if info:
|
|
||||||
infos.append(info)
|
|
||||||
if not infos:
|
|
||||||
continue
|
continue
|
||||||
sel_src = choisir_source(stdscr, infos) if len(infos) > 1 else 0
|
chosen_url = urls_par_source[idx]
|
||||||
chosen_url = infos[sel_src]['url']
|
|
||||||
saison_str = f"S{int(saison):02d}"
|
saison_str = f"S{int(saison):02d}"
|
||||||
ep_str = f"E{idx+1:02d}"
|
ep_str = f"E{idx+1:02d}"
|
||||||
saison_folder = base_folder / f"Saison {int(saison):02d}"
|
saison_folder = base_folder / f"Saison {int(saison):02d}"
|
||||||
|
Loading…
x
Reference in New Issue
Block a user