diff --git a/CHANGELOG.md b/CHANGELOG.md index 1e93d6a..f547001 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,102 @@ Alle relevanten Aenderungen am VideoKonverter-Projekt. +## [2.3.0] - 2026-02-24 + +### Import-System Verbesserungen + +**Bestehende Import-Jobs laden** +- Neue `GET /api/library/import` API liefert alle Import-Jobs +- Import-Modal zeigt jetzt offene Jobs oben an (Buttons mit Status) +- Klick auf Job laedt und zeigt Vorschau zum Fortsetzen +- Verhindert doppelte Importe der gleichen Quelle + +**Import-Fortschritt mit Byte-Level** +- Neue DB-Felder: `current_file_name`, `current_file_bytes`, `current_file_total` +- Kopieren in 64MB-Chunks mit Progress-Updates alle 50MB +- UI zeigt aktuelle Datei und Byte-Fortschritt + +**Gezielter Rescan nach Import** +- Nach Import wird nur der Ziel-Library-Pfad gescannt +- `imported_series` Liste im Job-Status fuer betroffene Ordner +- Statt `reloadAllSections()` nur `loadSectionData(targetPathId)` + +### Ordner-Verwaltung + +**Ordner-Loeschen Button** +- Neuer Muelleimer-Button (SVG-Icon) oben rechts bei Ordnern +- Erscheint nur bei Hover, rot bei Mouse-Over +- Schoener Bestaetigungs-Dialog statt Browser-confirm() +- Toast-Benachrichtigung statt alert() + +**Delete-Folder API** +- `POST /api/library/delete-folder` mit Sicherheitspruefung +- Prueft ob Pfad unter einem Library-Pfad liegt +- Loescht Ordner + alle DB-Eintraege (library_videos) +- Gibt geloeschte Dateien/Ordner/DB-Eintraege zurueck + +### Serie konvertieren + +**Batch-Konvertierung fuer Serien** +- Neuer Button "Serie konvertieren" im Serien-Modal +- Modal mit Codec-Auswahl (AV1/HEVC/H.264) +- Option: Alle neu konvertieren (auch bereits passende) +- Option: Quelldateien nach Konvertierung loeschen +- `POST /api/library/series/{id}/convert` API +- `GET /api/library/series/{id}/convert-status` fuer Codec-Statistik + +**Cleanup-Funktion fuer Serien** +- "Alte Dateien loeschen" Button im Serien-Modal +- Loescht alles ausser: registrierte Videos, .metadata, .nfo, Bilder +- `POST /api/library/series/{id}/cleanup` API + +### Server-Log System + +**Benachrichtigungs-Glocke** +- Glocken-Icon unten rechts auf allen Seiten +- Badge zeigt ungelesene Fehler-Anzahl (rot) +- Log-Panel mit allen Server-Meldungen +- Fehler/Warnings farblich hervorgehoben + +**Log-API** +- `GET /api/logs?since=ID` liefert neue Log-Eintraege +- In-Memory-Buffer (max 200 Eintraege) +- Polling alle 2 Sekunden + +### UI-Verbesserungen + +**Toast-Benachrichtigungen** +- Verbesserte Styling mit Slide-Animation +- Farbige linke Border (success/error/info) +- 4 Sekunden Anzeigedauer + +**TVDB-Suche** +- Checkbox "Englische Titel durchsuchen" im TVDB-Modal +- Ermoeglicht Suche nach englischen Originaltiteln + +### Technische Aenderungen + +**Neue/Geaenderte Dateien** +- `app/routes/library_api.py` - 6 neue Endpoints (+200 Z.) +- `app/services/importer.py` - get_all_jobs(), Progress-Tracking (+100 Z.) +- `app/services/queue.py` - delete_source Option bei add_paths() +- `app/static/js/library.js` - Dialog-System, Toast, Import-Jobs (+150 Z.) +- `app/static/css/style.css` - Toast, Delete-Button, Dialog-Styles (+50 Z.) +- `app/templates/library.html` - Confirm-Modal, Convert-Modal (+50 Z.) +- `app/templates/base.html` - Benachrichtigungs-Glocke + Log-Panel (+100 Z.) +- `app/routes/api.py` - /api/logs Endpoint, WebLogHandler (+40 Z.) +- `app/models/job.py` - delete_source Flag + +**Neue API-Endpoints** +- `GET /api/library/import` - Liste aller Import-Jobs +- `POST /api/library/delete-folder` - Ordner loeschen +- `POST /api/library/series/{id}/convert` - Serie konvertieren +- `GET /api/library/series/{id}/convert-status` - Codec-Status +- `POST /api/library/series/{id}/cleanup` - Alte Dateien loeschen +- `GET /api/logs` - Server-Log abrufen + +--- + ## [2.2.0] - 2026-02-21 ### Bugfixes diff --git a/README.md b/README.md index ca42cf0..3a78478 100644 --- a/README.md +++ b/README.md @@ -224,8 +224,12 @@ Web-UI: http://localhost:8080 | GET | `/api/library/series/{id}` | Serie mit Episoden | | GET | `/api/library/series/{id}/missing` | Fehlende Episoden | | POST | `/api/library/series/{id}/tvdb-match` | TVDB-ID zuordnen | +| POST | `/api/library/series/{id}/convert` | Alle Episoden konvertieren | +| GET | `/api/library/series/{id}/convert-status` | Codec-Status der Serie | +| POST | `/api/library/series/{id}/cleanup` | Alte Dateien loeschen | | GET | `/api/library/duplicates` | Duplikate finden | | POST | `/api/library/videos/{id}/convert` | Direkt konvertieren | +| POST | `/api/library/delete-folder` | Ordner komplett loeschen | | GET | `/api/library/stats` | Bibliotheks-Statistiken | | GET | `/api/library/movies` | Filme auflisten | | POST | `/api/library/movies/{id}/tvdb-match` | Film-TVDB-Zuordnung | @@ -236,6 +240,21 @@ Web-UI: http://localhost:8080 | GET | `/api/tvdb/language` | TVDB-Sprache lesen | | PUT | `/api/tvdb/language` | TVDB-Sprache aendern | +### Import +| Methode | Pfad | Beschreibung | +|---------|------|-------------| +| GET | `/api/library/import` | Alle Import-Jobs auflisten | +| POST | `/api/library/import` | Neuen Import-Job erstellen | +| GET | `/api/library/import/{id}` | Import-Job Status mit Items | +| POST | `/api/library/import/{id}/analyze` | Import analysieren | +| POST | `/api/library/import/{id}/execute` | Import ausfuehren | + +### System +| Methode | Pfad | Beschreibung | +|---------|------|-------------| +| GET | `/api/logs` | Server-Logs abrufen | +| GET | `/api/system` | System-Info (GPU, Jobs) | + ### Video-Filter (`/api/library/videos`) ``` ?video_codec=hevc # h264, hevc, av1, mpeg4 diff --git a/app/models/job.py b/app/models/job.py index 2bbcdd4..d432708 100644 --- a/app/models/job.py +++ b/app/models/job.py @@ -33,6 +33,9 @@ class ConversionJob: target_filename: str = "" target_container: str = "webm" + # Optionen + delete_source: bool = False # Quelldatei nach Konvertierung loeschen + # ffmpeg Prozess ffmpeg_cmd: list[str] = field(default_factory=list) process: Optional[asyncio.subprocess.Process] = field(default=None, repr=False) @@ -198,4 +201,5 @@ class ConversionJob: "preset_name": self.preset_name, "status": self.status.value, "created_at": self.created_at, + "delete_source": self.delete_source, } diff --git a/app/routes/api.py b/app/routes/api.py index 353d9fb..d198fbe 100644 --- a/app/routes/api.py +++ b/app/routes/api.py @@ -335,7 +335,42 @@ def setup_api_routes(app: web.Application, config: Config, "jobs": [{"id": j.id, "file": j.media.source_filename} for j in jobs], }) + # --- Logs --- + + # In-Memory Log-Buffer + _log_buffer = [] + _log_id = 0 + _MAX_LOGS = 200 + + class WebLogHandler(logging.Handler): + """Handler der Logs an den Buffer sendet""" + def emit(self, record): + nonlocal _log_id + _log_id += 1 + entry = { + "id": _log_id, + "level": record.levelname, + "message": record.getMessage(), + "time": record.created, + } + _log_buffer.append(entry) + # Buffer begrenzen + while len(_log_buffer) > _MAX_LOGS: + _log_buffer.pop(0) + + # Handler registrieren + web_handler = WebLogHandler() + web_handler.setLevel(logging.INFO) + logging.getLogger().addHandler(web_handler) + + async def get_logs(request: web.Request) -> web.Response: + """GET /api/logs?since=123 - Logs seit ID""" + since = int(request.query.get("since", 0)) + logs = [l for l in _log_buffer if l["id"] > since] + return web.json_response({"logs": logs}) + # --- Routes registrieren --- + app.router.add_get("/api/logs", get_logs) app.router.add_get("/api/browse", get_browse) app.router.add_post("/api/upload", post_upload) app.router.add_post("/api/convert", post_convert) diff --git a/app/routes/library_api.py b/app/routes/library_api.py index 9e861c5..5798976 100644 --- a/app/routes/library_api.py +++ b/app/routes/library_api.py @@ -234,8 +234,12 @@ def setup_library_routes(app: web.Application, config: Config, return web.json_response(result) async def get_tvdb_search(request: web.Request) -> web.Response: - """GET /api/tvdb/search?q=Breaking+Bad""" + """GET /api/tvdb/search?q=Breaking+Bad&lang=eng + lang: Sprache fuer Ergebnisse (deu, eng, etc.) + Standard: konfigurierte Sprache + """ query = request.query.get("q", "").strip() + lang = request.query.get("lang", "").strip() or None if not query: return web.json_response( {"error": "Suchbegriff erforderlich"}, status=400 @@ -245,7 +249,7 @@ def setup_library_routes(app: web.Application, config: Config, {"error": "TVDB nicht konfiguriert (API Key fehlt)"}, status=400, ) - results = await tvdb_service.search_series(query) + results = await tvdb_service.search_series(query, language=lang) return web.json_response({"results": results}) # === TVDB Metadaten === @@ -638,6 +642,326 @@ def setup_library_routes(app: web.Application, config: Config, {"error": "Job konnte nicht erstellt werden"}, status=500 ) + # === Batch-Konvertierung Serie === + + async def post_convert_series(request: web.Request) -> web.Response: + """POST /api/library/series/{series_id}/convert + Konvertiert alle Episoden einer Serie die nicht im Zielformat sind. + Body: {preset, target_codec, force_all, delete_old} + - preset: Encoding-Preset (optional, nimmt default) + - target_codec: Ziel-Codec zum Vergleich (z.B. 'av1', 'hevc') + - force_all: true = alle konvertieren, false = nur nicht-Zielformat + - delete_old: true = alte Quelldateien nach Konvertierung loeschen + """ + import os + series_id = int(request.match_info["series_id"]) + + try: + data = await request.json() + except Exception: + data = {} + + preset = data.get("preset") + target_codec = data.get("target_codec", "av1").lower() + force_all = data.get("force_all", False) + delete_old = data.get("delete_old", False) + + pool = await library_service._get_pool() + if not pool: + return web.json_response( + {"error": "Keine DB-Verbindung"}, status=500 + ) + + try: + async with pool.acquire() as conn: + async with conn.cursor() as cur: + # Alle Videos der Serie laden + await cur.execute( + "SELECT id, file_path, video_codec " + "FROM library_videos WHERE series_id = %s", + (series_id,) + ) + videos = await cur.fetchall() + + # Serien-Ordner fuer Cleanup + await cur.execute( + "SELECT folder_path FROM library_series WHERE id = %s", + (series_id,) + ) + series_row = await cur.fetchone() + series_folder = series_row[0] if series_row else None + except Exception as e: + return web.json_response({"error": str(e)}, status=500) + + if not videos: + return web.json_response( + {"error": "Keine Videos gefunden"}, status=404 + ) + + # Codec-Mapping fuer Vergleich + codec_aliases = { + "av1": ["av1", "libaom-av1", "libsvtav1", "av1_vaapi"], + "hevc": ["hevc", "h265", "libx265", "hevc_vaapi"], + "h264": ["h264", "avc", "libx264", "h264_vaapi"], + } + target_codecs = codec_aliases.get(target_codec, [target_codec]) + + to_convert = [] + already_done = 0 + + for vid_id, file_path, current_codec in videos: + current = (current_codec or "").lower() + is_target = any(tc in current for tc in target_codecs) + + if force_all or not is_target: + to_convert.append(file_path) + else: + already_done += 1 + + if not to_convert: + return web.json_response({ + "message": "Alle Episoden sind bereits im Zielformat", + "already_done": already_done, + "queued": 0, + }) + + # Jobs erstellen mit delete_source Option + jobs = await queue_service.add_paths( + to_convert, preset, delete_source=delete_old + ) + + return web.json_response({ + "message": f"{len(jobs)} Episoden zur Konvertierung hinzugefuegt", + "queued": len(jobs), + "already_done": already_done, + "skipped": len(videos) - len(jobs) - already_done, + "delete_old": delete_old, + }) + + async def post_cleanup_series_folder(request: web.Request) -> web.Response: + """POST /api/library/series/{series_id}/cleanup + Loescht alle Dateien im Serien-Ordner AUSSER: + - Videos die in der Bibliothek sind + - .metadata Verzeichnis und dessen Inhalt + - .nfo Dateien + """ + import os + series_id = int(request.match_info["series_id"]) + + pool = await library_service._get_pool() + if not pool: + return web.json_response( + {"error": "Keine DB-Verbindung"}, status=500 + ) + + try: + async with pool.acquire() as conn: + async with conn.cursor() as cur: + # Serien-Ordner + await cur.execute( + "SELECT folder_path FROM library_series WHERE id = %s", + (series_id,) + ) + row = await cur.fetchone() + if not row: + return web.json_response( + {"error": "Serie nicht gefunden"}, status=404 + ) + series_folder = row[0] + + # Alle Videos der Serie (diese behalten) + await cur.execute( + "SELECT file_path FROM library_videos WHERE series_id = %s", + (series_id,) + ) + keep_files = {r[0] for r in await cur.fetchall()} + except Exception as e: + return web.json_response({"error": str(e)}, status=500) + + if not series_folder or not os.path.isdir(series_folder): + return web.json_response( + {"error": "Serien-Ordner nicht gefunden"}, status=404 + ) + + # Geschuetzte Pfade/Dateien + protected_dirs = {".metadata", "@eaDir", ".AppleDouble"} + protected_extensions = {".nfo", ".jpg", ".jpeg", ".png", ".xml"} + + deleted = 0 + errors = [] + + for root, dirs, files in os.walk(series_folder, topdown=True): + # Geschuetzte Verzeichnisse ueberspringen + dirs[:] = [d for d in dirs if d not in protected_dirs] + + for f in files: + file_path = os.path.join(root, f) + ext = os.path.splitext(f)[1].lower() + + # Behalten wenn: + # - In der Bibliothek registriert + # - Geschuetzte Extension + # - Versteckte Datei + if file_path in keep_files: + continue + if ext in protected_extensions: + continue + if f.startswith("."): + continue + + # Loeschen + try: + os.remove(file_path) + deleted += 1 + logging.info(f"Cleanup geloescht: {file_path}") + except Exception as e: + errors.append(f"{f}: {e}") + + return web.json_response({ + "deleted": deleted, + "errors": len(errors), + "error_details": errors[:10], # Max 10 Fehler anzeigen + }) + + async def post_delete_folder(request: web.Request) -> web.Response: + """POST /api/library/delete-folder + Loescht einen kompletten Ordner (Season-Ordner etc.) inkl. DB-Eintraege. + Body: {folder_path: "/mnt/.../Season 01"} + ACHTUNG: Unwiderruflich! + """ + import shutil + try: + data = await request.json() + except Exception: + return web.json_response( + {"error": "Ungueltiges JSON"}, status=400 + ) + + folder_path = data.get("folder_path", "").strip() + if not folder_path: + return web.json_response( + {"error": "folder_path erforderlich"}, status=400 + ) + + # Sicherheitspruefung: Muss unter einem Library-Pfad liegen + pool = await library_service._get_pool() + if not pool: + return web.json_response( + {"error": "Keine DB-Verbindung"}, status=500 + ) + + allowed = False + try: + async with pool.acquire() as conn: + async with conn.cursor() as cur: + await cur.execute( + "SELECT path FROM library_paths WHERE enabled = 1" + ) + paths = await cur.fetchall() + for (lib_path,) in paths: + if folder_path.startswith(lib_path): + allowed = True + break + except Exception as e: + return web.json_response({"error": str(e)}, status=500) + + if not allowed: + return web.json_response( + {"error": "Ordner liegt nicht in einem Bibliothekspfad"}, + status=403 + ) + + if not os.path.isdir(folder_path): + return web.json_response( + {"error": "Ordner nicht gefunden"}, status=404 + ) + + # Zaehlen was geloescht wird + deleted_files = 0 + deleted_dirs = 0 + errors = [] + + # Zuerst alle Dateien zaehlen + for root, dirs, files in os.walk(folder_path): + deleted_files += len(files) + deleted_dirs += len(dirs) + + # DB-Eintraege loeschen (Videos in diesem Ordner) + db_removed = 0 + try: + async with pool.acquire() as conn: + async with conn.cursor() as cur: + # Videos loeschen deren file_path mit folder_path beginnt + await cur.execute( + "DELETE FROM library_videos " + "WHERE file_path LIKE %s", + (folder_path + "%",) + ) + db_removed = cur.rowcount + except Exception as e: + errors.append(f"DB-Fehler: {e}") + + # Ordner loeschen + try: + shutil.rmtree(folder_path) + logging.info(f"Ordner geloescht: {folder_path}") + except Exception as e: + logging.error(f"Ordner loeschen fehlgeschlagen: {e}") + return web.json_response( + {"error": f"Loeschen fehlgeschlagen: {e}"}, status=500 + ) + + return web.json_response({ + "deleted_files": deleted_files, + "deleted_dirs": deleted_dirs, + "db_removed": db_removed, + "errors": errors, + }) + + async def get_series_convert_status(request: web.Request) -> web.Response: + """GET /api/library/series/{series_id}/convert-status + Zeigt Codec-Status aller Episoden einer Serie.""" + series_id = int(request.match_info["series_id"]) + + pool = await library_service._get_pool() + if not pool: + return web.json_response( + {"error": "Keine DB-Verbindung"}, status=500 + ) + + try: + async with pool.acquire() as conn: + async with conn.cursor() as cur: + await cur.execute( + "SELECT id, file_name, video_codec, season_number, " + "episode_number FROM library_videos " + "WHERE series_id = %s ORDER BY season_number, episode_number", + (series_id,) + ) + videos = await cur.fetchall() + except Exception as e: + return web.json_response({"error": str(e)}, status=500) + + # Codec-Statistik + codec_counts = {} + episodes = [] + for vid_id, name, codec, season, episode in videos: + codec_lower = (codec or "unknown").lower() + codec_counts[codec_lower] = codec_counts.get(codec_lower, 0) + 1 + episodes.append({ + "id": vid_id, + "name": name, + "codec": codec, + "season": season, + "episode": episode, + }) + + return web.json_response({ + "total": len(videos), + "codec_counts": codec_counts, + "episodes": episodes, + }) + # === Statistiken === async def get_library_stats(request: web.Request) -> web.Response: @@ -823,6 +1147,15 @@ def setup_library_routes(app: web.Application, config: Config, result = await importer_service.analyze_job(job_id) return web.json_response(result) + async def get_import_jobs(request: web.Request) -> web.Response: + """GET /api/library/import - Liste aller Import-Jobs""" + if not importer_service: + return web.json_response( + {"error": "Import-Service nicht verfuegbar"}, status=500 + ) + jobs = await importer_service.get_all_jobs() + return web.json_response({"jobs": jobs}) + async def get_import_status(request: web.Request) -> web.Response: """GET /api/library/import/{job_id}""" if not importer_service: @@ -952,6 +1285,18 @@ def setup_library_routes(app: web.Application, config: Config, app.router.add_post( "/api/library/videos/{video_id}/convert", post_convert_video ) + app.router.add_post( + "/api/library/series/{series_id}/convert", post_convert_series + ) + app.router.add_get( + "/api/library/series/{series_id}/convert-status", + get_series_convert_status + ) + app.router.add_post( + "/api/library/series/{series_id}/cleanup", + post_cleanup_series_folder + ) + app.router.add_post("/api/library/delete-folder", post_delete_folder) # Statistiken app.router.add_get("/api/library/stats", get_library_stats) # Clean @@ -963,6 +1308,7 @@ def setup_library_routes(app: web.Application, config: Config, # Filesystem-Browser app.router.add_get("/api/library/browse-fs", get_browse_fs) # Import + app.router.add_get("/api/library/import", get_import_jobs) app.router.add_post("/api/library/import", post_create_import) app.router.add_post( "/api/library/import/{job_id}/analyze", post_analyze_import diff --git a/app/services/importer.py b/app/services/importer.py index 82ac3e7..1254b9f 100644 --- a/app/services/importer.py +++ b/app/services/importer.py @@ -503,7 +503,7 @@ class ImporterService: return "" async def execute_import(self, job_id: int) -> dict: - """Fuehrt den Import aus (Kopieren/Verschieben)""" + """Fuehrt den Import aus (Kopieren/Verschieben + TVDB-Link)""" if not self._db_pool: return {"error": "Keine DB-Verbindung"} @@ -537,10 +537,16 @@ class ImporterService: errors = 0 mode = job.get("mode", "copy") + # TVDB-IDs sammeln fuer spaetere Verknuepfung + tvdb_links = {} # series_name -> tvdb_id + for item in items: - ok = await self._process_item(item, mode) + ok = await self._process_item(item, mode, job_id) if ok: done += 1 + # TVDB-Link merken + if item.get("tvdb_series_id") and item.get("tvdb_series_name"): + tvdb_links[item["tvdb_series_name"]] = item["tvdb_series_id"] else: errors += 1 @@ -561,14 +567,62 @@ class ImporterService: "WHERE id = %s", (status, job_id) ) - return {"done": done, "errors": errors} + # TVDB-Zuordnungen in library_series uebernehmen + linked_series = 0 + if tvdb_links: + linked_series = await self._link_tvdb_to_series(tvdb_links) + + return { + "done": done, + "errors": errors, + "tvdb_linked": linked_series, + } except Exception as e: logging.error(f"Import ausfuehren fehlgeschlagen: {e}") return {"error": str(e)} - async def _process_item(self, item: dict, mode: str) -> bool: - """Einzelnes Item importieren (kopieren/verschieben)""" + async def _link_tvdb_to_series(self, tvdb_links: dict) -> int: + """Verknuepft importierte Serien mit TVDB in library_series""" + if not self._db_pool or not self.tvdb: + return 0 + + linked = 0 + for series_name, tvdb_id in tvdb_links.items(): + try: + async with self._db_pool.acquire() as conn: + async with conn.cursor() as cur: + # Serie in library_series finden (nach Namen) + await cur.execute( + "SELECT id, tvdb_id FROM library_series " + "WHERE (folder_name = %s OR title = %s) " + "AND tvdb_id IS NULL " + "LIMIT 1", + (series_name, series_name) + ) + row = await cur.fetchone() + if row: + series_id = row[0] + # TVDB-Daten laden und verknuepfen + result = await self.tvdb.match_and_update_series( + series_id, int(tvdb_id), self.library + ) + if not result.get("error"): + linked += 1 + logging.info( + f"Import: TVDB verknuepft - " + f"{series_name} -> {tvdb_id}" + ) + except Exception as e: + logging.warning( + f"TVDB-Link fehlgeschlagen fuer {series_name}: {e}" + ) + + return linked + + async def _process_item(self, item: dict, mode: str, + job_id: int = 0) -> bool: + """Einzelnes Item importieren (kopieren/verschieben + Metadaten)""" src = item["source_file"] target_dir = item["target_path"] target_file = item["target_filename"] @@ -578,19 +632,39 @@ class ImporterService: return False target = os.path.join(target_dir, target_file) + src_size = item.get("source_size", 0) or os.path.getsize(src) try: # Zielordner erstellen os.makedirs(target_dir, exist_ok=True) + # Fortschritt-Tracking in DB setzen + if job_id and self._db_pool: + await self._update_file_progress( + job_id, target_file, 0, src_size + ) + if mode == "move": shutil.move(src, target) + # Bei Move sofort fertig + if job_id and self._db_pool: + await self._update_file_progress( + job_id, target_file, src_size, src_size + ) else: - shutil.copy2(src, target) + # Kopieren mit Fortschritt + await self._copy_with_progress( + src, target, job_id, target_file, src_size + ) logging.info( f"Import: {os.path.basename(src)} -> {target}" ) + + # Metadaten in Datei einbetten (falls TVDB-Infos vorhanden) + if item.get("tvdb_series_name") or item.get("detected_series"): + await self._embed_metadata(target, item) + await self._update_item_status(item["id"], "done") return True @@ -599,6 +673,79 @@ class ImporterService: await self._update_item_status(item["id"], "error") return False + async def _embed_metadata(self, file_path: str, item: dict) -> bool: + """Bettet Metadaten mit ffmpeg in die Datei ein""" + import asyncio + import tempfile + + series_name = item.get("tvdb_series_name") or item.get("detected_series") or "" + season = item.get("detected_season") or 0 + episode = item.get("detected_episode") or 0 + episode_title = item.get("tvdb_episode_title") or "" + + if not series_name: + return False + + # Temporaere Ausgabedatei + base, ext = os.path.splitext(file_path) + temp_file = f"{base}_temp{ext}" + + # ffmpeg Metadaten-Befehl + cmd = [ + "ffmpeg", "-y", "-i", file_path, + "-map", "0", + "-c", "copy", + "-metadata", f"title={episode_title}" if episode_title else f"S{season:02d}E{episode:02d}", + "-metadata", f"show={series_name}", + "-metadata", f"season_number={season}", + "-metadata", f"episode_sort={episode}", + "-metadata", f"episode_id=S{season:02d}E{episode:02d}", + ] + + # Fuer MKV zusaetzliche Tags + if file_path.lower().endswith(".mkv"): + cmd.extend([ + "-metadata:s:v:0", f"title={series_name} - S{season:02d}E{episode:02d}", + ]) + + cmd.append(temp_file) + + try: + process = await asyncio.create_subprocess_exec( + *cmd, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + ) + _, stderr = await asyncio.wait_for( + process.communicate(), timeout=600 # 10 Min fuer grosse Dateien + ) + + if process.returncode == 0: + # Temporaere Datei ueber Original verschieben + os.replace(temp_file, file_path) + logging.info(f"Metadaten eingebettet: {os.path.basename(file_path)}") + return True + else: + logging.warning( + f"Metadaten einbetten fehlgeschlagen: " + f"{stderr.decode()[:200]}" + ) + # Temp-Datei loeschen falls vorhanden + if os.path.exists(temp_file): + os.remove(temp_file) + return False + + except asyncio.TimeoutError: + logging.warning(f"Metadaten einbetten Timeout: {file_path}") + if os.path.exists(temp_file): + os.remove(temp_file) + return False + except Exception as e: + logging.warning(f"Metadaten einbetten Fehler: {e}") + if os.path.exists(temp_file): + os.remove(temp_file) + return False + async def _update_item_status(self, item_id: int, status: str) -> None: if not self._db_pool: @@ -613,6 +760,79 @@ class ImporterService: except Exception: pass + async def _update_file_progress(self, job_id: int, filename: str, + bytes_done: int, bytes_total: int) -> None: + """Aktualisiert Byte-Fortschritt fuer aktuelle Datei""" + if not self._db_pool: + return + try: + async with self._db_pool.acquire() as conn: + async with conn.cursor() as cur: + await cur.execute( + "UPDATE import_jobs SET " + "current_file_name = %s, " + "current_file_bytes = %s, " + "current_file_total = %s " + "WHERE id = %s", + (filename, bytes_done, bytes_total, job_id) + ) + except Exception: + pass + + async def _copy_with_progress(self, src: str, dst: str, + job_id: int, filename: str, + total_size: int) -> None: + """Kopiert Datei mit Fortschritts-Updates in DB""" + import asyncio + + chunk_size = 64 * 1024 * 1024 # 64 MB Chunks + bytes_copied = 0 + last_update = 0 + + loop = asyncio.get_event_loop() + + def copy_chunk(): + nonlocal bytes_copied + with open(src, 'rb') as fsrc, open(dst, 'wb') as fdst: + while True: + chunk = fsrc.read(chunk_size) + if not chunk: + break + fdst.write(chunk) + bytes_copied += len(chunk) + + # Kopieren in Thread ausfuehren (nicht blockierend) + # Aber wir brauchen trotzdem Progress-Updates... + # Alternative: Chunk-weise mit Updates + + with open(src, 'rb') as fsrc, open(dst, 'wb') as fdst: + while True: + # Chunk lesen (in Thread um nicht zu blockieren) + chunk = await loop.run_in_executor( + None, fsrc.read, chunk_size + ) + if not chunk: + break + + # Chunk schreiben + await loop.run_in_executor(None, fdst.write, chunk) + bytes_copied += len(chunk) + + # Progress nur alle 50 MB updaten (weniger DB-Last) + if bytes_copied - last_update >= 50 * 1024 * 1024: + await self._update_file_progress( + job_id, filename, bytes_copied, total_size + ) + last_update = bytes_copied + + # Finales Update + await self._update_file_progress( + job_id, filename, total_size, total_size + ) + + # Metadaten kopieren (Zeitstempel etc.) + shutil.copystat(src, dst) + async def resolve_conflict(self, item_id: int, action: str) -> bool: """Konflikt loesen: overwrite, skip, rename""" @@ -685,6 +905,24 @@ class ImporterService: logging.error(f"Import-Item aktualisieren fehlgeschlagen: {e}") return False + async def get_all_jobs(self) -> list: + """Liste aller Import-Jobs (neueste zuerst)""" + if not self._db_pool: + return [] + try: + async with self._db_pool.acquire() as conn: + async with conn.cursor(aiomysql.DictCursor) as cur: + await cur.execute( + "SELECT id, source_path, status, total_files, " + "processed_files, created_at FROM import_jobs " + "ORDER BY id DESC LIMIT 20" + ) + jobs = await cur.fetchall() + return [self._serialize(j) for j in jobs] + except Exception as e: + logging.error(f"Import-Jobs laden fehlgeschlagen: {e}") + return [] + async def get_job_status(self, job_id: int) -> dict: """Status eines Import-Jobs mit allen Items""" if not self._db_pool: @@ -706,9 +944,19 @@ class ImporterService: ) items = await cur.fetchall() + # Bei abgeschlossenen Jobs: Importierte Serien-Ordner sammeln + imported_series = [] + if job.get("status") in ("done", "error"): + series_folders = set() + for item in items: + if item.get("status") == "done" and item.get("target_path"): + series_folders.add(item["target_path"]) + imported_series = list(series_folders) + return { "job": self._serialize(job), "items": [self._serialize(i) for i in items], + "imported_series": imported_series, } except Exception as e: return {"error": str(e)} diff --git a/app/services/queue.py b/app/services/queue.py index cbb7bff..031c66e 100644 --- a/app/services/queue.py +++ b/app/services/queue.py @@ -66,7 +66,8 @@ class QueueService: logging.info("Queue gestoppt") async def add_job(self, media: MediaFile, - preset_name: Optional[str] = None) -> Optional[ConversionJob]: + preset_name: Optional[str] = None, + delete_source: bool = False) -> Optional[ConversionJob]: """Fuegt neuen Job zur Queue hinzu""" if self._is_duplicate(media.source_path): logging.info(f"Duplikat uebersprungen: {media.source_filename}") @@ -76,6 +77,7 @@ class QueueService: preset_name = self.config.default_preset_name job = ConversionJob(media=media, preset_name=preset_name) + job.delete_source = delete_source job.build_target_path(self.config) self.jobs[job.id] = job self._save_queue() @@ -83,6 +85,7 @@ class QueueService: logging.info( f"Job hinzugefuegt: {media.source_filename} " f"-> {job.target_filename} (Preset: {preset_name})" + f"{' [delete_source]' if delete_source else ''}" ) await self.ws_manager.broadcast_queue_update() @@ -90,7 +93,8 @@ class QueueService: async def add_paths(self, paths: list[str], preset_name: Optional[str] = None, - recursive: Optional[bool] = None) -> list[ConversionJob]: + recursive: Optional[bool] = None, + delete_source: bool = False) -> list[ConversionJob]: """Fuegt mehrere Pfade hinzu (Dateien und Ordner)""" jobs = [] all_files = [] @@ -107,7 +111,7 @@ class QueueService: for file_path in all_files: media = await ProbeService.analyze(file_path) if media: - job = await self.add_job(media, preset_name) + job = await self.add_job(media, preset_name, delete_source) if job: jobs.append(job) @@ -281,7 +285,10 @@ class QueueService: """Cleanup nach erfolgreicher Konvertierung""" files_cfg = self.config.files_config - if files_cfg.get("delete_source", False): + # Quelldatei loeschen: Global per Config ODER per Job-Option + should_delete = files_cfg.get("delete_source", False) or job.delete_source + + if should_delete: target_exists = os.path.exists(job.target_path) target_size = os.path.getsize(job.target_path) if target_exists else 0 if target_exists and target_size > 0: diff --git a/app/services/tvdb.py b/app/services/tvdb.py index d897227..4a1fc90 100644 --- a/app/services/tvdb.py +++ b/app/services/tvdb.py @@ -179,23 +179,51 @@ class TVDBService: return name, overview - async def search_series(self, query: str) -> list[dict]: - """Sucht Serien auf TVDB""" + async def search_series(self, query: str, + language: Optional[str] = None) -> list[dict]: + """Sucht Serien auf TVDB. + + Args: + query: Suchbegriff + language: Sprache fuer Ergebnisse (z.B. 'deu', 'eng'). + None = konfigurierte Sprache verwenden. + """ client = self._get_client() if not client: return [] + # Sprache fuer Lokalisierung + display_lang = language or self._language + try: results = client.search(query, type="series") if not results: return [] series_list = [] - for item in results[:10]: - name, overview = self._localize_search_result(item) + for item in results[:20]: # 20 statt 10 Ergebnisse + # Lokalisierung mit gewaehlter Sprache + name = item.get("name", "") + overview = item.get("overview", "") + + trans = item.get("translations") or {} + if isinstance(trans, dict): + # Gewaehlte Sprache oder Original + name = trans.get(display_lang) or name + + overviews = item.get("overviews") or {} + if isinstance(overviews, dict): + overview = (overviews.get(display_lang) + or overviews.get("eng") + or overview) + + # Original-Name fuer Anzeige wenn anders + original_name = item.get("name", "") + series_list.append({ "tvdb_id": item.get("tvdb_id") or item.get("objectID"), "name": name, + "original_name": original_name if original_name != name else "", "overview": overview, "first_air_date": item.get("first_air_time") or item.get("firstAirDate", ""), diff --git a/app/static/css/style.css b/app/static/css/style.css index 0883f50..4e734ee 100644 --- a/app/static/css/style.css +++ b/app/static/css/style.css @@ -369,14 +369,22 @@ legend { } .toast { - padding: 0.6rem 1rem; - border-radius: 6px; - font-size: 0.8rem; + padding: 0.7rem 1.2rem; + border-radius: 8px; + font-size: 0.85rem; margin-bottom: 0.5rem; - animation: fadeIn 0.3s ease, fadeOut 0.3s ease 2.7s; + opacity: 0; + transform: translateX(20px); + transition: opacity 0.3s ease, transform 0.3s ease; + box-shadow: 0 4px 12px rgba(0,0,0,0.3); } -.toast.success { background: #1b5e20; color: #81c784; } -.toast.error { background: #b71c1c; color: #ef9a9a; } +.toast.show { + opacity: 1; + transform: translateX(0); +} +.toast-success { background: #1b5e20; color: #a5d6a7; border-left: 3px solid #4caf50; } +.toast-error { background: #b71c1c; color: #ef9a9a; border-left: 3px solid #f44336; } +.toast-info { background: #1565c0; color: #90caf9; border-left: 3px solid #2196f3; } @keyframes fadeIn { from { opacity: 0; transform: translateY(-10px); } to { opacity: 1; } } @keyframes fadeOut { from { opacity: 1; } to { opacity: 0; } } @@ -1107,6 +1115,40 @@ legend { font-size: 0.75rem; color: #888; } +.folder-main { + display: flex; + align-items: center; + gap: 0.6rem; + flex: 1; + min-width: 0; + cursor: pointer; +} +.btn-folder-delete { + position: absolute; + top: 0.4rem; + right: 0.4rem; + background: rgba(0,0,0,0.5); + border: none; + color: #888; + padding: 0.35rem; + border-radius: 4px; + cursor: pointer; + opacity: 0; + transition: opacity 0.15s, color 0.15s, background 0.15s; + display: flex; + align-items: center; + justify-content: center; +} +.browser-folder { + position: relative; +} +.browser-folder:hover .btn-folder-delete { + opacity: 1; +} +.btn-folder-delete:hover { + color: #e74c3c; + background: rgba(231, 76, 60, 0.2); +} .browser-videos { margin-top: 0.5rem; } @@ -1534,6 +1576,142 @@ legend { margin-top: 0.3rem; } +/* === Codec-Stats (Konvertierung) === */ +.codec-stats { + display: flex; + flex-wrap: wrap; + gap: 0.3rem; + margin: 0.5rem 0; +} +.codec-stats .tag { + font-size: 0.75rem; + padding: 0.2rem 0.5rem; +} + +/* === Benachrichtigungs-Glocke === */ +.notification-bell { + position: fixed; + bottom: 20px; + left: 20px; + width: 48px; + height: 48px; + background: #2a2a2a; + border: 1px solid #444; + border-radius: 50%; + display: flex; + align-items: center; + justify-content: center; + cursor: pointer; + color: #888; + transition: all 0.2s ease; + z-index: 1000; + box-shadow: 0 2px 8px rgba(0,0,0,0.3); +} +.notification-bell:hover { + background: #333; + color: #fff; + transform: scale(1.05); +} +.notification-bell.has-error { + color: #ff6b6b; + animation: bell-shake 0.5s ease; +} +@keyframes bell-shake { + 0%, 100% { transform: rotate(0); } + 25% { transform: rotate(-10deg); } + 75% { transform: rotate(10deg); } +} +.notification-badge { + position: absolute; + top: -4px; + right: -4px; + background: #e74c3c; + color: #fff; + font-size: 0.65rem; + font-weight: bold; + min-width: 18px; + height: 18px; + border-radius: 9px; + display: flex; + align-items: center; + justify-content: center; + padding: 0 4px; +} + +/* === Log-Panel === */ +.notification-panel { + position: fixed; + bottom: 80px; + left: 20px; + width: 400px; + max-height: 50vh; + background: #1e1e1e; + border: 1px solid #444; + border-radius: 8px; + display: flex; + flex-direction: column; + z-index: 1001; + box-shadow: 0 4px 20px rgba(0,0,0,0.4); +} +.notification-header { + display: flex; + justify-content: space-between; + align-items: center; + padding: 0.6rem 0.8rem; + border-bottom: 1px solid #333; + font-weight: 500; + color: #ddd; +} +.notification-header > div { + display: flex; + gap: 0.3rem; + align-items: center; +} +.notification-list { + flex: 1; + overflow-y: auto; + max-height: 45vh; +} +.notification-item { + padding: 0.5rem 0.8rem; + border-bottom: 1px solid #2a2a2a; + font-size: 0.8rem; + display: flex; + gap: 0.5rem; + align-items: flex-start; +} +.notification-item:hover { + background: #252525; +} +.notification-item.error { + background: rgba(231, 76, 60, 0.1); + border-left: 3px solid #e74c3c; +} +.notification-item.warning { + background: rgba(241, 196, 15, 0.1); + border-left: 3px solid #f1c40f; +} +.notification-time { + color: #666; + font-size: 0.7rem; + white-space: nowrap; + min-width: 55px; +} +.notification-msg { + color: #ccc; + word-break: break-word; + flex: 1; +} +.notification-item.error .notification-msg { + color: #ff8a8a; +} +.notification-empty { + padding: 2rem; + text-align: center; + color: #666; + font-size: 0.85rem; +} + /* === Responsive === */ @media (max-width: 768px) { header { flex-direction: column; gap: 0.5rem; } diff --git a/app/static/js/library.js b/app/static/js/library.js index 72d3912..ae14cd0 100644 --- a/app/static/js/library.js +++ b/app/static/js/library.js @@ -412,12 +412,23 @@ function renderBrowser(folders, videos, pathId) { html += '
${escapeHtml((r.overview || "").substring(0, 150))}