Projekt aus Docker-Image videoconverter:2.9 extrahiert. Enthält zweiphasigen Import-Workflow mit Serien-Zuordnung. Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
1033 lines
38 KiB
Python
1033 lines
38 KiB
Python
"""TheTVDB API v4 Integration fuer Serien-Metadaten"""
|
|
import logging
|
|
import os
|
|
from typing import Optional, TYPE_CHECKING
|
|
|
|
import aiohttp
|
|
import aiomysql
|
|
|
|
from app.config import Config
|
|
|
|
if TYPE_CHECKING:
|
|
from app.services.library import LibraryService
|
|
|
|
# tvdb-v4-official ist optional - funktioniert auch ohne
|
|
try:
|
|
import tvdb_v4_official
|
|
TVDB_AVAILABLE = True
|
|
except ImportError:
|
|
TVDB_AVAILABLE = False
|
|
logging.warning("tvdb-v4-official nicht installiert - TVDB deaktiviert")
|
|
|
|
# Artwork-Typ-IDs -> Namen
|
|
ARTWORK_TYPE_MAP = {
|
|
1: "banner",
|
|
2: "poster",
|
|
3: "fanart",
|
|
5: "icon",
|
|
6: "season_poster",
|
|
7: "season_banner",
|
|
22: "clearlogo",
|
|
23: "clearart",
|
|
}
|
|
|
|
|
|
class TVDBService:
|
|
"""TVDB API v4 Client fuer Serien- und Film-Metadaten"""
|
|
|
|
def __init__(self, config: Config):
|
|
self.config = config
|
|
self._client = None
|
|
self._db_pool: Optional[aiomysql.Pool] = None
|
|
|
|
@property
|
|
def _api_key(self) -> str:
|
|
return self.config.settings.get("library", {}).get("tvdb_api_key", "")
|
|
|
|
@property
|
|
def _pin(self) -> str:
|
|
return self.config.settings.get("library", {}).get("tvdb_pin", "")
|
|
|
|
@property
|
|
def _language(self) -> str:
|
|
"""Konfigurierte TVDB-Sprache (Standard: deu)"""
|
|
return self.config.settings.get("library", {}).get(
|
|
"tvdb_language", "deu"
|
|
)
|
|
|
|
@property
|
|
def is_configured(self) -> bool:
|
|
return TVDB_AVAILABLE and bool(self._api_key)
|
|
|
|
def set_db_pool(self, pool: aiomysql.Pool) -> None:
|
|
"""Setzt den DB-Pool (geteilt mit LibraryService)"""
|
|
self._db_pool = pool
|
|
|
|
def _get_client(self):
|
|
"""Erstellt oder gibt TVDB-Client zurueck"""
|
|
if not TVDB_AVAILABLE:
|
|
return None
|
|
if not self._api_key:
|
|
return None
|
|
|
|
if self._client is None:
|
|
try:
|
|
if self._pin:
|
|
self._client = tvdb_v4_official.TVDB(
|
|
self._api_key, pin=self._pin
|
|
)
|
|
else:
|
|
self._client = tvdb_v4_official.TVDB(self._api_key)
|
|
logging.info("TVDB Client verbunden")
|
|
except Exception as e:
|
|
logging.error(f"TVDB Verbindung fehlgeschlagen: {e}")
|
|
return None
|
|
|
|
return self._client
|
|
|
|
# === DB-Tabellen ===
|
|
|
|
async def init_db(self) -> None:
|
|
"""Erstellt TVDB-Cache-Tabellen"""
|
|
if not self._db_pool:
|
|
return
|
|
try:
|
|
async with self._db_pool.acquire() as conn:
|
|
async with conn.cursor() as cur:
|
|
await cur.execute("""
|
|
CREATE TABLE IF NOT EXISTS tvdb_cast_cache (
|
|
id INT AUTO_INCREMENT PRIMARY KEY,
|
|
series_tvdb_id INT NOT NULL,
|
|
person_name VARCHAR(256) NOT NULL,
|
|
character_name VARCHAR(256),
|
|
sort_order INT DEFAULT 0,
|
|
image_url VARCHAR(512),
|
|
person_image_url VARCHAR(512),
|
|
cached_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
INDEX idx_series (series_tvdb_id)
|
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4
|
|
""")
|
|
|
|
await cur.execute("""
|
|
CREATE TABLE IF NOT EXISTS tvdb_artwork_cache (
|
|
id INT AUTO_INCREMENT PRIMARY KEY,
|
|
series_tvdb_id INT NOT NULL,
|
|
artwork_type VARCHAR(32) NOT NULL,
|
|
image_url VARCHAR(512) NOT NULL,
|
|
thumbnail_url VARCHAR(512),
|
|
width INT DEFAULT 0,
|
|
height INT DEFAULT 0,
|
|
is_primary TINYINT DEFAULT 0,
|
|
local_path VARCHAR(1024) NULL,
|
|
cached_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
INDEX idx_series (series_tvdb_id),
|
|
INDEX idx_type (artwork_type)
|
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4
|
|
""")
|
|
|
|
# Neue Spalten in library_series (falls noch nicht vorhanden)
|
|
try:
|
|
await cur.execute(
|
|
"ALTER TABLE library_series "
|
|
"ADD COLUMN metadata_path VARCHAR(1024) NULL"
|
|
)
|
|
except Exception:
|
|
pass # Spalte existiert bereits
|
|
try:
|
|
await cur.execute(
|
|
"ALTER TABLE library_series "
|
|
"ADD COLUMN genres VARCHAR(512) NULL"
|
|
)
|
|
except Exception:
|
|
pass
|
|
|
|
logging.info("TVDB-Cache-Tabellen initialisiert")
|
|
except Exception as e:
|
|
logging.error(f"TVDB-Tabellen erstellen fehlgeschlagen: {e}")
|
|
|
|
@staticmethod
|
|
def _serialize_row(row: dict) -> dict:
|
|
"""DB-Row JSON-kompatibel machen (datetime -> str)"""
|
|
result = {}
|
|
for k, v in row.items():
|
|
if hasattr(v, "isoformat"):
|
|
result[k] = str(v)
|
|
else:
|
|
result[k] = v
|
|
return result
|
|
|
|
# === Suche ===
|
|
|
|
def _localize_search_result(self, item: dict) -> tuple[str, str]:
|
|
"""Lokalisierten Namen + Overview aus Suchergebnis extrahieren.
|
|
Nutzt konfigurierte Sprache, Fallback auf Englisch."""
|
|
lang = self._language
|
|
name = item.get("name", "")
|
|
overview = item.get("overview", "")
|
|
|
|
# translations = {"deu": "Deutscher Titel", "eng": "English", ...}
|
|
trans = item.get("translations") or {}
|
|
if isinstance(trans, dict):
|
|
name = trans.get(lang) or name
|
|
|
|
# overviews = {"deu": "Deutsche Beschreibung", "eng": "English", ...}
|
|
overviews = item.get("overviews") or {}
|
|
if isinstance(overviews, dict):
|
|
overview = (overviews.get(lang)
|
|
or overviews.get("eng")
|
|
or overview)
|
|
|
|
return name, overview
|
|
|
|
async def search_series(self, query: str,
|
|
language: Optional[str] = None) -> list[dict]:
|
|
"""Sucht Serien auf TVDB.
|
|
|
|
Args:
|
|
query: Suchbegriff
|
|
language: Sprache fuer Ergebnisse (z.B. 'deu', 'eng').
|
|
None = konfigurierte Sprache verwenden.
|
|
"""
|
|
client = self._get_client()
|
|
if not client:
|
|
return []
|
|
|
|
# Sprache fuer Lokalisierung
|
|
display_lang = language or self._language
|
|
|
|
try:
|
|
results = client.search(query, type="series")
|
|
if not results:
|
|
return []
|
|
|
|
series_list = []
|
|
for item in results[:20]: # 20 statt 10 Ergebnisse
|
|
# Lokalisierung mit gewaehlter Sprache
|
|
name = item.get("name", "")
|
|
overview = item.get("overview", "")
|
|
|
|
trans = item.get("translations") or {}
|
|
if isinstance(trans, dict):
|
|
# Gewaehlte Sprache oder Original
|
|
name = trans.get(display_lang) or name
|
|
|
|
overviews = item.get("overviews") or {}
|
|
if isinstance(overviews, dict):
|
|
overview = (overviews.get(display_lang)
|
|
or overviews.get("eng")
|
|
or overview)
|
|
|
|
# Original-Name fuer Anzeige wenn anders
|
|
original_name = item.get("name", "")
|
|
|
|
series_list.append({
|
|
"tvdb_id": item.get("tvdb_id") or item.get("objectID"),
|
|
"name": name,
|
|
"original_name": original_name if original_name != name else "",
|
|
"overview": overview,
|
|
"first_air_date": item.get("first_air_time")
|
|
or item.get("firstAirDate", ""),
|
|
"year": item.get("year", ""),
|
|
"status": item.get("status", ""),
|
|
"poster": item.get("thumbnail")
|
|
or item.get("image_url", ""),
|
|
})
|
|
return series_list
|
|
except Exception as e:
|
|
logging.error(f"TVDB Suche fehlgeschlagen: {e}")
|
|
return []
|
|
|
|
async def search_movies(self, query: str) -> list[dict]:
|
|
"""Sucht Filme auf TVDB"""
|
|
client = self._get_client()
|
|
if not client:
|
|
return []
|
|
|
|
try:
|
|
results = client.search(query, type="movie")
|
|
if not results:
|
|
return []
|
|
|
|
movie_list = []
|
|
for item in results[:10]:
|
|
name, overview = self._localize_search_result(item)
|
|
movie_list.append({
|
|
"tvdb_id": item.get("tvdb_id") or item.get("objectID"),
|
|
"name": name,
|
|
"overview": overview,
|
|
"year": item.get("year", ""),
|
|
"poster": item.get("thumbnail")
|
|
or item.get("image_url", ""),
|
|
})
|
|
return movie_list
|
|
except Exception as e:
|
|
logging.error(f"TVDB Film-Suche fehlgeschlagen: {e}")
|
|
return []
|
|
|
|
# === Serien-Info ===
|
|
|
|
async def get_series_info(self, tvdb_id: int) -> Optional[dict]:
|
|
"""Holt Serien-Details von TVDB (Dict-basiert)"""
|
|
client = self._get_client()
|
|
if not client:
|
|
return None
|
|
|
|
try:
|
|
series = client.get_series_extended(tvdb_id)
|
|
if not series:
|
|
return None
|
|
|
|
# API gibt Dict zurueck
|
|
poster_url = series.get("image", "")
|
|
name = series.get("name", "")
|
|
overview = series.get("overview", "")
|
|
|
|
# Lokalisierte Uebersetzung holen (Fallback Englisch)
|
|
pref_lang = self._language
|
|
for lang in (pref_lang, "eng"):
|
|
try:
|
|
trans = client.get_series_translation(tvdb_id, lang)
|
|
if trans:
|
|
if not overview and trans.get("overview"):
|
|
overview = trans["overview"]
|
|
if lang == pref_lang:
|
|
if trans.get("overview"):
|
|
overview = trans["overview"]
|
|
if trans.get("name"):
|
|
name = trans["name"]
|
|
if overview:
|
|
break
|
|
except Exception:
|
|
pass
|
|
|
|
artworks = series.get("artworks") or []
|
|
for art in artworks:
|
|
art_type = art.get("type", {})
|
|
type_id = art_type.get("id", 0) if isinstance(art_type, dict) else art_type
|
|
if type_id == 2:
|
|
poster_url = art.get("image", poster_url)
|
|
break
|
|
|
|
# Staffeln zaehlen
|
|
seasons = []
|
|
for s in (series.get("seasons") or []):
|
|
s_type = s.get("type", {})
|
|
type_id = s_type.get("id", 0) if isinstance(s_type, dict) else 0
|
|
# Typ 1 = official
|
|
if type_id == 1 or not s_type:
|
|
s_num = s.get("number", 0)
|
|
if s_num and s_num > 0:
|
|
seasons.append(s_num)
|
|
|
|
# Genres
|
|
genres = []
|
|
for g in (series.get("genres") or []):
|
|
gname = g.get("name", "")
|
|
if gname:
|
|
genres.append(gname)
|
|
|
|
# Status
|
|
status_obj = series.get("status", {})
|
|
status_name = ""
|
|
if isinstance(status_obj, dict):
|
|
status_name = status_obj.get("name", "")
|
|
elif isinstance(status_obj, str):
|
|
status_name = status_obj
|
|
|
|
return {
|
|
"tvdb_id": tvdb_id,
|
|
"name": name,
|
|
"overview": overview,
|
|
"first_aired": series.get("firstAired", ""),
|
|
"status": status_name,
|
|
"poster_url": poster_url,
|
|
"total_seasons": len(seasons),
|
|
"genres": ", ".join(genres),
|
|
}
|
|
except Exception as e:
|
|
logging.error(f"TVDB Serien-Info fehlgeschlagen (ID {tvdb_id}): {e}")
|
|
return None
|
|
|
|
# === Film-Info ===
|
|
|
|
async def get_movie_info(self, tvdb_id: int) -> Optional[dict]:
|
|
"""Holt Film-Details von TVDB"""
|
|
client = self._get_client()
|
|
if not client:
|
|
return None
|
|
|
|
try:
|
|
movie = client.get_movie_extended(tvdb_id)
|
|
if not movie:
|
|
return None
|
|
|
|
poster_url = movie.get("image", "")
|
|
name = movie.get("name", "")
|
|
overview = movie.get("overview", "")
|
|
|
|
# Lokalisierte Uebersetzung holen (Fallback Englisch)
|
|
pref_lang = self._language
|
|
for lang in (pref_lang, "eng"):
|
|
try:
|
|
trans = client.get_movie_translation(tvdb_id, lang)
|
|
if trans:
|
|
if not overview and trans.get("overview"):
|
|
overview = trans["overview"]
|
|
if lang == pref_lang and trans.get("name"):
|
|
name = trans["name"]
|
|
if overview:
|
|
break
|
|
except Exception:
|
|
pass
|
|
|
|
# Genres
|
|
genres = []
|
|
for g in (movie.get("genres") or []):
|
|
gname = g.get("name", "")
|
|
if gname:
|
|
genres.append(gname)
|
|
|
|
# Status
|
|
status_obj = movie.get("status", {})
|
|
status_name = ""
|
|
if isinstance(status_obj, dict):
|
|
status_name = status_obj.get("name", "")
|
|
elif isinstance(status_obj, str):
|
|
status_name = status_obj
|
|
|
|
# Jahr
|
|
year = movie.get("year")
|
|
if not year:
|
|
first_release = movie.get("first_release", {})
|
|
if isinstance(first_release, dict):
|
|
date_str = first_release.get("date", "")
|
|
if date_str and len(date_str) >= 4:
|
|
try:
|
|
year = int(date_str[:4])
|
|
except ValueError:
|
|
pass
|
|
|
|
return {
|
|
"tvdb_id": tvdb_id,
|
|
"name": name,
|
|
"overview": overview,
|
|
"year": year,
|
|
"poster_url": poster_url,
|
|
"genres": ", ".join(genres),
|
|
"runtime": movie.get("runtime"),
|
|
"status": status_name,
|
|
}
|
|
except Exception as e:
|
|
logging.error(
|
|
f"TVDB Film-Info fehlgeschlagen (ID {tvdb_id}): {e}"
|
|
)
|
|
return None
|
|
|
|
async def match_and_update_movie(self, movie_id: int,
|
|
tvdb_id: int,
|
|
library_service: 'LibraryService'
|
|
) -> dict:
|
|
"""TVDB-ID einem Film zuordnen und Infos aktualisieren"""
|
|
info = await self.get_movie_info(tvdb_id)
|
|
if not info:
|
|
return {"error": "TVDB Film-Info nicht gefunden"}
|
|
|
|
pool = self._db_pool
|
|
if not pool:
|
|
return {"error": "Keine DB-Verbindung"}
|
|
|
|
try:
|
|
async with pool.acquire() as conn:
|
|
async with conn.cursor() as cur:
|
|
await cur.execute("""
|
|
UPDATE library_movies SET
|
|
tvdb_id = %s,
|
|
title = %s,
|
|
overview = %s,
|
|
year = %s,
|
|
poster_url = %s,
|
|
genres = %s,
|
|
runtime = %s,
|
|
status = %s,
|
|
last_updated = NOW()
|
|
WHERE id = %s
|
|
""", (
|
|
tvdb_id,
|
|
info["name"],
|
|
info.get("overview", ""),
|
|
info.get("year"),
|
|
info.get("poster_url", ""),
|
|
info.get("genres", ""),
|
|
info.get("runtime"),
|
|
info.get("status", ""),
|
|
movie_id,
|
|
))
|
|
|
|
return {
|
|
"success": True,
|
|
"name": info["name"],
|
|
"year": info.get("year"),
|
|
}
|
|
except Exception as e:
|
|
logging.error(f"TVDB Film-Match fehlgeschlagen: {e}")
|
|
return {"error": str(e)}
|
|
|
|
# === Cast / Characters ===
|
|
|
|
async def get_series_characters(self, tvdb_id: int) -> list[dict]:
|
|
"""Holt Cast/Darsteller von TVDB.
|
|
Prueft zuerst DB-Cache, dann TVDB-API."""
|
|
# Cache pruefen
|
|
cached = await self._get_cached_cast(tvdb_id)
|
|
if cached:
|
|
return cached
|
|
|
|
client = self._get_client()
|
|
if not client:
|
|
return []
|
|
|
|
try:
|
|
series = client.get_series_extended(tvdb_id)
|
|
if not series:
|
|
return []
|
|
|
|
characters = []
|
|
for c in (series.get("characters") or []):
|
|
# type=3 ist Actor
|
|
if c.get("type") != 3:
|
|
continue
|
|
characters.append({
|
|
"person_name": c.get("personName", ""),
|
|
"character_name": c.get("name", ""),
|
|
"sort_order": c.get("sort", 0),
|
|
"image_url": c.get("image", ""),
|
|
"person_image_url": c.get("personImgURL", ""),
|
|
})
|
|
|
|
# Sortieren
|
|
characters.sort(key=lambda x: x.get("sort_order", 999))
|
|
|
|
# In DB cachen
|
|
await self._cache_cast(tvdb_id, characters)
|
|
|
|
return characters
|
|
except Exception as e:
|
|
logging.error(f"TVDB Cast laden fehlgeschlagen (ID {tvdb_id}): {e}")
|
|
return []
|
|
|
|
async def _get_cached_cast(self, tvdb_id: int) -> Optional[list[dict]]:
|
|
"""Cast aus DB-Cache laden"""
|
|
if not self._db_pool:
|
|
return None
|
|
try:
|
|
async with self._db_pool.acquire() as conn:
|
|
async with conn.cursor(aiomysql.DictCursor) as cur:
|
|
await cur.execute(
|
|
"SELECT * FROM tvdb_cast_cache "
|
|
"WHERE series_tvdb_id = %s ORDER BY sort_order",
|
|
(tvdb_id,)
|
|
)
|
|
rows = await cur.fetchall()
|
|
if rows:
|
|
return [self._serialize_row(r) for r in rows]
|
|
except Exception:
|
|
pass
|
|
return None
|
|
|
|
async def _cache_cast(self, tvdb_id: int,
|
|
characters: list[dict]) -> None:
|
|
"""Darsteller in DB cachen"""
|
|
if not self._db_pool:
|
|
return
|
|
try:
|
|
async with self._db_pool.acquire() as conn:
|
|
async with conn.cursor() as cur:
|
|
await cur.execute(
|
|
"DELETE FROM tvdb_cast_cache "
|
|
"WHERE series_tvdb_id = %s", (tvdb_id,)
|
|
)
|
|
for c in characters:
|
|
await cur.execute(
|
|
"INSERT INTO tvdb_cast_cache "
|
|
"(series_tvdb_id, person_name, character_name, "
|
|
"sort_order, image_url, person_image_url) "
|
|
"VALUES (%s, %s, %s, %s, %s, %s)",
|
|
(tvdb_id, c["person_name"],
|
|
c["character_name"], c["sort_order"],
|
|
c["image_url"], c["person_image_url"])
|
|
)
|
|
except Exception as e:
|
|
logging.error(f"TVDB Cast cachen fehlgeschlagen: {e}")
|
|
|
|
# === Artworks ===
|
|
|
|
async def get_series_artworks(self, tvdb_id: int) -> list[dict]:
|
|
"""Holt Artworks einer Serie.
|
|
Prueft zuerst DB-Cache, dann TVDB-API."""
|
|
cached = await self._get_cached_artworks(tvdb_id)
|
|
if cached:
|
|
return cached
|
|
|
|
client = self._get_client()
|
|
if not client:
|
|
return []
|
|
|
|
try:
|
|
series = client.get_series_extended(tvdb_id)
|
|
if not series:
|
|
return []
|
|
|
|
artworks = []
|
|
for a in (series.get("artworks") or []):
|
|
art_type_obj = a.get("type", {})
|
|
type_id = art_type_obj.get("id", 0) if isinstance(
|
|
art_type_obj, dict
|
|
) else art_type_obj
|
|
type_name = ARTWORK_TYPE_MAP.get(type_id)
|
|
if not type_name:
|
|
continue
|
|
|
|
artworks.append({
|
|
"artwork_type": type_name,
|
|
"image_url": a.get("image", ""),
|
|
"thumbnail_url": a.get("thumbnail", ""),
|
|
"width": a.get("width", 0),
|
|
"height": a.get("height", 0),
|
|
})
|
|
|
|
# Cachen
|
|
await self._cache_artworks(tvdb_id, artworks)
|
|
|
|
return artworks
|
|
except Exception as e:
|
|
logging.error(f"TVDB Artworks laden fehlgeschlagen (ID {tvdb_id}): {e}")
|
|
return []
|
|
|
|
async def _get_cached_artworks(self, tvdb_id: int) -> Optional[list[dict]]:
|
|
"""Artworks aus DB-Cache laden"""
|
|
if not self._db_pool:
|
|
return None
|
|
try:
|
|
async with self._db_pool.acquire() as conn:
|
|
async with conn.cursor(aiomysql.DictCursor) as cur:
|
|
await cur.execute(
|
|
"SELECT * FROM tvdb_artwork_cache "
|
|
"WHERE series_tvdb_id = %s "
|
|
"ORDER BY artwork_type, is_primary DESC",
|
|
(tvdb_id,)
|
|
)
|
|
rows = await cur.fetchall()
|
|
if rows:
|
|
return [self._serialize_row(r) for r in rows]
|
|
except Exception:
|
|
pass
|
|
return None
|
|
|
|
async def _cache_artworks(self, tvdb_id: int,
|
|
artworks: list[dict]) -> None:
|
|
"""Artworks in DB cachen"""
|
|
if not self._db_pool:
|
|
return
|
|
try:
|
|
async with self._db_pool.acquire() as conn:
|
|
async with conn.cursor() as cur:
|
|
await cur.execute(
|
|
"DELETE FROM tvdb_artwork_cache "
|
|
"WHERE series_tvdb_id = %s", (tvdb_id,)
|
|
)
|
|
seen_primary = set()
|
|
for a in artworks:
|
|
art_type = a["artwork_type"]
|
|
is_primary = 1 if art_type not in seen_primary else 0
|
|
seen_primary.add(art_type)
|
|
await cur.execute(
|
|
"INSERT INTO tvdb_artwork_cache "
|
|
"(series_tvdb_id, artwork_type, image_url, "
|
|
"thumbnail_url, width, height, is_primary) "
|
|
"VALUES (%s, %s, %s, %s, %s, %s, %s)",
|
|
(tvdb_id, art_type, a["image_url"],
|
|
a["thumbnail_url"], a["width"],
|
|
a["height"], is_primary)
|
|
)
|
|
except Exception as e:
|
|
logging.error(f"TVDB Artworks cachen fehlgeschlagen: {e}")
|
|
|
|
# === Metadaten herunterladen ===
|
|
|
|
async def download_metadata(self, series_id: int, tvdb_id: int,
|
|
series_folder: str) -> dict:
|
|
"""Laedt Poster, Fanart, Cast-Bilder in .metadata/ Ordner"""
|
|
if not series_folder or not os.path.isdir(series_folder):
|
|
return {"error": "Serien-Ordner nicht gefunden"}
|
|
|
|
meta_dir = os.path.join(series_folder, ".metadata")
|
|
os.makedirs(meta_dir, exist_ok=True)
|
|
cast_dir = os.path.join(meta_dir, "cast")
|
|
os.makedirs(cast_dir, exist_ok=True)
|
|
|
|
downloaded = 0
|
|
errors = 0
|
|
|
|
# Artworks holen (ggf. von API)
|
|
artworks = await self.get_series_artworks(tvdb_id)
|
|
# Cast holen (ggf. von API)
|
|
cast = await self.get_series_characters(tvdb_id)
|
|
|
|
async with aiohttp.ClientSession() as session:
|
|
# Primaere Bilder herunterladen (Poster, Fanart, Banner)
|
|
seen_types = set()
|
|
for art in artworks:
|
|
art_type = art["artwork_type"]
|
|
if art_type in seen_types:
|
|
continue
|
|
if art_type not in ("poster", "fanart", "banner"):
|
|
continue
|
|
seen_types.add(art_type)
|
|
|
|
url = art["image_url"]
|
|
if not url:
|
|
continue
|
|
ext = os.path.splitext(url)[1] or ".jpg"
|
|
target = os.path.join(meta_dir, f"{art_type}{ext}")
|
|
|
|
ok = await self._download_file(session, url, target)
|
|
if ok:
|
|
downloaded += 1
|
|
else:
|
|
errors += 1
|
|
|
|
# Cast-Bilder herunterladen
|
|
for c in cast:
|
|
url = c.get("person_image_url") or c.get("image_url", "")
|
|
name = c.get("person_name", "")
|
|
if not url or not name:
|
|
continue
|
|
# Sicherer Dateiname
|
|
safe_name = "".join(
|
|
ch if ch.isalnum() or ch in " _-" else "_"
|
|
for ch in name
|
|
).strip()
|
|
ext = os.path.splitext(url)[1] or ".jpg"
|
|
target = os.path.join(cast_dir, f"{safe_name}{ext}")
|
|
|
|
ok = await self._download_file(session, url, target)
|
|
if ok:
|
|
downloaded += 1
|
|
else:
|
|
errors += 1
|
|
|
|
# metadata_path in DB setzen
|
|
if self._db_pool:
|
|
try:
|
|
async with self._db_pool.acquire() as conn:
|
|
async with conn.cursor() as cur:
|
|
await cur.execute(
|
|
"UPDATE library_series SET metadata_path = %s "
|
|
"WHERE id = %s",
|
|
(meta_dir, series_id)
|
|
)
|
|
except Exception:
|
|
pass
|
|
|
|
logging.info(
|
|
f"TVDB Metadaten heruntergeladen fuer Serie {series_id}: "
|
|
f"{downloaded} Dateien, {errors} Fehler"
|
|
)
|
|
return {
|
|
"success": True,
|
|
"downloaded": downloaded,
|
|
"errors": errors,
|
|
"metadata_path": meta_dir,
|
|
}
|
|
|
|
@staticmethod
|
|
async def _download_file(session: aiohttp.ClientSession,
|
|
url: str, target: str) -> bool:
|
|
"""Einzelne Datei herunterladen"""
|
|
try:
|
|
async with session.get(url, timeout=aiohttp.ClientTimeout(total=30)) as resp:
|
|
if resp.status != 200:
|
|
return False
|
|
data = await resp.read()
|
|
with open(target, "wb") as f:
|
|
f.write(data)
|
|
return True
|
|
except Exception as e:
|
|
logging.warning(f"Download fehlgeschlagen: {url}: {e}")
|
|
return False
|
|
|
|
# === Episoden ===
|
|
|
|
async def fetch_episodes(self, tvdb_id: int) -> list[dict]:
|
|
"""Holt alle Episoden einer Serie von TVDB und cached sie in DB.
|
|
Nutzt konfigurierte Sprache fuer Episodennamen."""
|
|
client = self._get_client()
|
|
if not client:
|
|
return []
|
|
|
|
pref_lang = self._language
|
|
|
|
try:
|
|
episodes = []
|
|
page = 0
|
|
while True:
|
|
# Mit Sprach-Parameter abrufen fuer lokalisierte Namen
|
|
result = client.get_series_episodes(
|
|
tvdb_id, season_type="official", page=page,
|
|
lang=pref_lang,
|
|
)
|
|
if not result:
|
|
break
|
|
# API gibt Dict zurueck, kein Objekt
|
|
if isinstance(result, dict):
|
|
eps = result.get("episodes", [])
|
|
elif hasattr(result, "episodes"):
|
|
eps = result.episodes
|
|
else:
|
|
break
|
|
if not eps:
|
|
break
|
|
for ep in eps:
|
|
if isinstance(ep, dict):
|
|
s_num = ep.get("seasonNumber", 0)
|
|
e_num = ep.get("number", 0)
|
|
ep_name = ep.get("name", "")
|
|
ep_aired = ep.get("aired")
|
|
ep_runtime = ep.get("runtime")
|
|
else:
|
|
s_num = getattr(ep, "seasonNumber", 0)
|
|
e_num = getattr(ep, "number", 0)
|
|
ep_name = getattr(ep, "name", "")
|
|
ep_aired = getattr(ep, "aired", None)
|
|
ep_runtime = getattr(ep, "runtime", None)
|
|
if s_num and s_num > 0 and e_num and e_num > 0:
|
|
episodes.append({
|
|
"season_number": s_num,
|
|
"episode_number": e_num,
|
|
"episode_name": ep_name or "",
|
|
"aired": ep_aired,
|
|
"runtime": ep_runtime,
|
|
})
|
|
page += 1
|
|
if page > 50:
|
|
break
|
|
|
|
if episodes and self._db_pool:
|
|
await self._cache_episodes(tvdb_id, episodes)
|
|
|
|
logging.info(f"TVDB: {len(episodes)} Episoden fuer "
|
|
f"Serie {tvdb_id} geladen ({pref_lang})")
|
|
return episodes
|
|
except Exception as e:
|
|
logging.error(f"TVDB Episoden laden fehlgeschlagen "
|
|
f"(ID {tvdb_id}): {e}")
|
|
return []
|
|
|
|
async def _cache_episodes(self, tvdb_id: int,
|
|
episodes: list[dict]) -> None:
|
|
"""Episoden in DB cachen"""
|
|
if not self._db_pool:
|
|
return
|
|
try:
|
|
async with self._db_pool.acquire() as conn:
|
|
async with conn.cursor() as cur:
|
|
await cur.execute(
|
|
"DELETE FROM tvdb_episode_cache "
|
|
"WHERE series_tvdb_id = %s",
|
|
(tvdb_id,)
|
|
)
|
|
for ep in episodes:
|
|
await cur.execute(
|
|
"INSERT INTO tvdb_episode_cache "
|
|
"(series_tvdb_id, season_number, episode_number, "
|
|
"episode_name, aired, runtime) "
|
|
"VALUES (%s, %s, %s, %s, %s, %s)",
|
|
(
|
|
tvdb_id, ep["season_number"],
|
|
ep["episode_number"], ep["episode_name"],
|
|
ep["aired"], ep["runtime"],
|
|
)
|
|
)
|
|
except Exception as e:
|
|
logging.error(f"TVDB Episode cachen fehlgeschlagen: {e}")
|
|
|
|
# === Match & Update ===
|
|
|
|
async def match_and_update_series(self, series_id: int,
|
|
tvdb_id: int,
|
|
library_service: 'LibraryService'
|
|
) -> dict:
|
|
"""TVDB-ID zuordnen, Infos holen, Episoden + Cast + Artworks cachen"""
|
|
info = await self.get_series_info(tvdb_id)
|
|
if not info:
|
|
return {"error": "TVDB-Serien-Info nicht gefunden"}
|
|
|
|
episodes = await self.fetch_episodes(tvdb_id)
|
|
|
|
# Cast und Artworks cachen (im Hintergrund)
|
|
await self.get_series_characters(tvdb_id)
|
|
await self.get_series_artworks(tvdb_id)
|
|
|
|
pool = self._db_pool
|
|
if not pool:
|
|
return {"error": "Keine DB-Verbindung"}
|
|
|
|
try:
|
|
async with pool.acquire() as conn:
|
|
async with conn.cursor() as cur:
|
|
await cur.execute("""
|
|
UPDATE library_series SET
|
|
tvdb_id = %s,
|
|
title = %s,
|
|
overview = %s,
|
|
first_aired = %s,
|
|
poster_url = %s,
|
|
status = %s,
|
|
total_seasons = %s,
|
|
total_episodes = %s,
|
|
genres = %s,
|
|
last_updated = NOW()
|
|
WHERE id = %s
|
|
""", (
|
|
tvdb_id,
|
|
info["name"],
|
|
info.get("overview", ""),
|
|
info.get("first_aired") or None,
|
|
info.get("poster_url", ""),
|
|
info.get("status", ""),
|
|
info.get("total_seasons", 0),
|
|
len(episodes),
|
|
info.get("genres", ""),
|
|
series_id,
|
|
))
|
|
|
|
await self._update_episode_titles(series_id, tvdb_id)
|
|
await library_service._update_series_counts(series_id)
|
|
|
|
return {
|
|
"success": True,
|
|
"name": info["name"],
|
|
"total_episodes": len(episodes),
|
|
}
|
|
except Exception as e:
|
|
logging.error(f"TVDB Match fehlgeschlagen: {e}")
|
|
return {"error": str(e)}
|
|
|
|
async def _update_episode_titles(self, series_id: int,
|
|
tvdb_id: int) -> None:
|
|
"""Episoden-Titel aus TVDB-Cache in lokale Videos uebertragen"""
|
|
if not self._db_pool:
|
|
return
|
|
try:
|
|
async with self._db_pool.acquire() as conn:
|
|
async with conn.cursor() as cur:
|
|
await cur.execute("""
|
|
UPDATE library_videos v
|
|
JOIN tvdb_episode_cache tc
|
|
ON tc.series_tvdb_id = %s
|
|
AND tc.season_number = v.season_number
|
|
AND tc.episode_number = v.episode_number
|
|
SET v.episode_title = tc.episode_name
|
|
WHERE v.series_id = %s
|
|
""", (tvdb_id, series_id))
|
|
except Exception as e:
|
|
logging.error(f"Episoden-Titel aktualisieren fehlgeschlagen: {e}")
|
|
|
|
# === Auto-Match ===
|
|
|
|
@staticmethod
|
|
def _clean_search_title(title: str) -> tuple[str, str]:
|
|
"""Bereinigt Titel fuer bessere TVDB-Suche.
|
|
Entfernt fuehrende Sortier-Nummern, Aufloesung-Suffixe, Klammern."""
|
|
import re
|
|
t = title.strip()
|
|
# Fuehrende Sortier-Nummern entfernen ("1 X-Men" -> "X-Men")
|
|
t = re.sub(r'^\d{1,2}\s+', '', t)
|
|
# Aufloesung/Qualitaets-Suffixe entfernen
|
|
t = re.sub(
|
|
r'\s*(720p|1080p|2160p|4k|bluray|bdrip|webrip|web-dl|hdtv|'
|
|
r'x264|x265|hevc|aac|dts|remux)\s*',
|
|
' ', t, flags=re.IGNORECASE
|
|
).strip()
|
|
# Klammern mit Inhalt entfernen "(2020)" etc.
|
|
t_no_parens = re.sub(r'\s*\([^)]*\)\s*', ' ', t).strip()
|
|
# Trailing-Nummern entfernen ("X-Men 1" -> "X-Men")
|
|
t_no_num = re.sub(r'\s+\d{1,2}$', '', t)
|
|
# Variante 1: bereinigt, Variante 2: ohne Klammern und Trailing-Nummern
|
|
clean1 = t.strip()
|
|
clean2 = t_no_parens.strip() if t_no_parens != clean1 else t_no_num.strip()
|
|
return clean1, clean2
|
|
|
|
async def collect_suggestions(
|
|
self,
|
|
media_type: str,
|
|
progress_callback=None,
|
|
) -> list[dict]:
|
|
"""Sammelt TVDB-Vorschlaege fuer alle Serien oder Filme ohne TVDB.
|
|
media_type: 'series' oder 'movies'
|
|
Gibt Liste von Vorschlaegen zurueck: [{
|
|
id, local_name, year, type,
|
|
suggestions: [{tvdb_id, name, year, poster, overview}]
|
|
}]"""
|
|
if not self._db_pool:
|
|
return []
|
|
|
|
# Items ohne TVDB laden
|
|
async with self._db_pool.acquire() as conn:
|
|
async with conn.cursor(aiomysql.DictCursor) as cur:
|
|
if media_type == "series":
|
|
await cur.execute(
|
|
"SELECT id, folder_name, title "
|
|
"FROM library_series WHERE tvdb_id IS NULL "
|
|
"ORDER BY title"
|
|
)
|
|
else:
|
|
await cur.execute(
|
|
"SELECT id, folder_name, title, year "
|
|
"FROM library_movies WHERE tvdb_id IS NULL "
|
|
"ORDER BY title"
|
|
)
|
|
items = await cur.fetchall()
|
|
|
|
total = len(items)
|
|
proposals = []
|
|
|
|
for i, item in enumerate(items):
|
|
name = item.get("title") or item["folder_name"]
|
|
t_full, t_clean = self._clean_search_title(name)
|
|
|
|
try:
|
|
search_fn = (self.search_series if media_type == "series"
|
|
else self.search_movies)
|
|
results = await search_fn(t_full)
|
|
if not results and t_clean != t_full:
|
|
results = await search_fn(t_clean)
|
|
except Exception as e:
|
|
logging.warning(f"TVDB-Suche fehlgeschlagen: {name}: {e}")
|
|
results = []
|
|
|
|
# Top 3 Vorschlaege sammeln
|
|
suggestions = []
|
|
for r in (results or [])[:3]:
|
|
suggestions.append({
|
|
"tvdb_id": r.get("tvdb_id"),
|
|
"name": r.get("name", ""),
|
|
"year": r.get("year", ""),
|
|
"poster": r.get("poster", ""),
|
|
"overview": (r.get("overview") or "")[:150],
|
|
})
|
|
|
|
proposals.append({
|
|
"id": item["id"],
|
|
"local_name": name,
|
|
"year": item.get("year"),
|
|
"type": media_type,
|
|
"suggestions": suggestions,
|
|
})
|
|
|
|
if progress_callback:
|
|
await progress_callback(
|
|
i + 1, total, name, len(proposals)
|
|
)
|
|
|
|
return proposals
|