diff --git a/Dockerfile b/Dockerfile index 61be73f..5f17a33 100644 --- a/Dockerfile +++ b/Dockerfile @@ -17,6 +17,16 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ ENV LIBVA_DRIVER_NAME=iHD ENV LIBVA_DRIVERS_PATH=/usr/lib/x86_64-linux-gnu/dri +# VideoKonverter Defaults (ueberschreibbar per docker run -e / Unraid UI) +ENV VK_DB_HOST=localhost +ENV VK_DB_PORT=3306 +ENV VK_DB_USER=video +ENV VK_DB_PASSWORD="" +ENV VK_DB_NAME=video_converter +ENV VK_MODE=cpu +ENV VK_PORT=8080 +ENV VK_LOG_LEVEL=INFO + WORKDIR /opt/video-konverter # Python-Abhaengigkeiten diff --git a/app/config.py b/app/config.py index f4a8b05..a24d37e 100644 --- a/app/config.py +++ b/app/config.py @@ -1,4 +1,16 @@ -"""Konfigurationsmanagement - Singleton fuer Settings und Presets""" +"""Konfigurationsmanagement - Singleton fuer Settings und Presets + +Alle wichtigen Settings koennen per Umgebungsvariable ueberschrieben werden. +ENV-Variablen haben IMMER Vorrang vor settings.yaml. + +Mapping (VK_ Prefix): + Datenbank: VK_DB_HOST, VK_DB_PORT, VK_DB_USER, VK_DB_PASSWORD, VK_DB_NAME + Encoding: VK_MODE (cpu/gpu/auto), VK_GPU_DEVICE, VK_MAX_JOBS, VK_DEFAULT_PRESET + Server: VK_PORT, VK_HOST, VK_EXTERNAL_URL + Library: VK_TVDB_API_KEY, VK_TVDB_LANGUAGE, VK_LIBRARY_ENABLED (true/false) + Dateien: VK_TARGET_CONTAINER (webm/mkv/mp4) + Logging: VK_LOG_LEVEL (DEBUG/INFO/WARNING/ERROR) +""" import os import logging import yaml @@ -6,9 +18,107 @@ from pathlib import Path from typing import Optional from logging.handlers import TimedRotatingFileHandler, RotatingFileHandler +# Mapping: ENV-Variable -> (settings-pfad, typ) +# Pfad als Tuple: ("section", "key") +_ENV_MAP: dict[str, tuple[tuple[str, str], type]] = { + "VK_DB_HOST": (("database", "host"), str), + "VK_DB_PORT": (("database", "port"), int), + "VK_DB_USER": (("database", "user"), str), + "VK_DB_PASSWORD": (("database", "password"), str), + "VK_DB_NAME": (("database", "database"), str), + "VK_MODE": (("encoding", "mode"), str), + "VK_GPU_DEVICE": (("encoding", "gpu_device"), str), + "VK_MAX_JOBS": (("encoding", "max_parallel_jobs"), int), + "VK_DEFAULT_PRESET": (("encoding", "default_preset"), str), + "VK_PORT": (("server", "port"), int), + "VK_HOST": (("server", "host"), str), + "VK_EXTERNAL_URL": (("server", "external_url"), str), + "VK_TVDB_API_KEY": (("library", "tvdb_api_key"), str), + "VK_TVDB_LANGUAGE": (("library", "tvdb_language"), str), + "VK_LIBRARY_ENABLED": (("library", "enabled"), bool), + "VK_TARGET_CONTAINER": (("files", "target_container"), str), + "VK_LOG_LEVEL": (("logging", "level"), str), +} + +# Rueckwaertskompatibilitaet +_ENV_ALIASES: dict[str, str] = { + "VIDEO_KONVERTER_MODE": "VK_MODE", +} + +# Default-Settings wenn keine settings.yaml existiert +_DEFAULT_SETTINGS: dict = { + "database": { + "host": "localhost", + "port": 3306, + "user": "video", + "password": "", + "database": "video_converter", + }, + "encoding": { + "mode": "cpu", + "default_preset": "cpu_av1", + "gpu_device": "/dev/dri/renderD128", + "gpu_driver": "iHD", + "max_parallel_jobs": 1, + }, + "server": { + "host": "0.0.0.0", + "port": 8080, + "external_url": "", + "use_https": False, + "websocket_path": "/ws", + }, + "files": { + "delete_source": False, + "recursive_scan": True, + "scan_extensions": [".mkv", ".mp4", ".avi", ".wmv", ".vob", ".ts", ".m4v", ".flv", ".mov"], + "target_container": "webm", + "target_folder": "same", + }, + "audio": { + "bitrate_map": {2: "128k", 6: "320k", 8: "450k"}, + "default_bitrate": "192k", + "default_codec": "libopus", + "keep_channels": True, + "languages": ["ger", "eng", "und"], + }, + "subtitle": { + "codec_blacklist": ["hdmv_pgs_subtitle", "dvd_subtitle", "dvb_subtitle"], + "languages": ["ger", "eng"], + }, + "library": { + "enabled": True, + "import_default_mode": "copy", + "import_naming_pattern": "{series} - S{season:02d}E{episode:02d} - {title}.{ext}", + "import_season_pattern": "Season {season:02d}", + "scan_interval_hours": 0, + "tvdb_api_key": "", + "tvdb_language": "deu", + "tvdb_pin": "", + }, + "cleanup": { + "enabled": False, + "delete_extensions": [".avi", ".wmv", ".vob", ".nfo", ".txt", ".jpg", ".png", ".srt", ".sub", ".idx"], + "keep_extensions": [".srt"], + "exclude_patterns": ["readme*", "*.md"], + }, + "logging": { + "level": "INFO", + "file": "server.log", + "rotation": "time", + "backup_count": 7, + "max_size_mb": 10, + }, + "statistics": { + "cleanup_days": 365, + "max_entries": 5000, + }, +} + class Config: - """Laedt und verwaltet settings.yaml und presets.yaml""" + """Laedt und verwaltet settings.yaml und presets.yaml. + ENV-Variablen (VK_*) ueberschreiben YAML-Werte.""" _instance: Optional['Config'] = None def __new__(cls) -> 'Config': @@ -28,6 +138,7 @@ class Config: self._data_path = self._base_path.parent / "data" # Verzeichnisse sicherstellen + self._cfg_path.mkdir(parents=True, exist_ok=True) self._log_path.mkdir(parents=True, exist_ok=True) self._data_path.mkdir(parents=True, exist_ok=True) @@ -38,55 +149,89 @@ class Config: self._apply_env_overrides() def _load_settings(self) -> None: - """Laedt settings.yaml""" + """Laedt settings.yaml oder erzeugt Defaults""" + import copy settings_file = self._cfg_path / "settings.yaml" - try: - with open(settings_file, "r", encoding="utf-8") as f: - self.settings = yaml.safe_load(f) or {} - logging.info(f"Settings geladen: {settings_file}") - except FileNotFoundError: - logging.error(f"Settings nicht gefunden: {settings_file}") - self.settings = {} + if settings_file.exists(): + try: + with open(settings_file, "r", encoding="utf-8") as f: + self.settings = yaml.safe_load(f) or {} + logging.info(f"Settings geladen: {settings_file}") + except Exception as e: + logging.error(f"Settings lesen fehlgeschlagen: {e}") + self.settings = copy.deepcopy(_DEFAULT_SETTINGS) + else: + # Keine settings.yaml -> Defaults verwenden und speichern + logging.info("Keine settings.yaml gefunden - erzeuge Defaults") + self.settings = copy.deepcopy(_DEFAULT_SETTINGS) + self._save_yaml(settings_file, self.settings) def _load_presets(self) -> None: """Laedt presets.yaml""" presets_file = self._cfg_path / "presets.yaml" - try: - with open(presets_file, "r", encoding="utf-8") as f: - self.presets = yaml.safe_load(f) or {} - logging.info(f"Presets geladen: {presets_file}") - except FileNotFoundError: - logging.error(f"Presets nicht gefunden: {presets_file}") + if presets_file.exists(): + try: + with open(presets_file, "r", encoding="utf-8") as f: + self.presets = yaml.safe_load(f) or {} + logging.info(f"Presets geladen: {presets_file}") + except Exception as e: + logging.error(f"Presets lesen fehlgeschlagen: {e}") + self.presets = {} + else: + logging.warning("Keine presets.yaml gefunden - verwende leere Presets") self.presets = {} def _apply_env_overrides(self) -> None: - """Umgebungsvariablen ueberschreiben Settings""" - env_mode = os.environ.get("VIDEO_KONVERTER_MODE") - if env_mode and env_mode in ("cpu", "gpu", "auto"): - self.settings.setdefault("encoding", {})["mode"] = env_mode - logging.info(f"Encoding-Modus per Umgebungsvariable: {env_mode}") + """Umgebungsvariablen (VK_*) ueberschreiben Settings. + Unterstuetzt auch alte Variablennamen per Alias-Mapping.""" + applied = [] + + # Aliase aufloesen (z.B. VIDEO_KONVERTER_MODE -> VK_MODE) + for old_name, new_name in _ENV_ALIASES.items(): + if old_name in os.environ and new_name not in os.environ: + os.environ[new_name] = os.environ[old_name] + + for env_key, ((section, key), val_type) in _ENV_MAP.items(): + raw = os.environ.get(env_key) + if raw is None: + continue + + # Typ-Konvertierung + try: + if val_type is bool: + value = raw.lower() in ("true", "1", "yes", "on") + elif val_type is int: + value = int(raw) + else: + value = raw + except (ValueError, TypeError): + logging.warning(f"ENV {env_key}={raw!r} - ungueliger Wert, uebersprungen") + continue + + self.settings.setdefault(section, {})[key] = value + applied.append(f"{env_key}={value}") + + if applied: + logging.info(f"ENV-Overrides angewendet: {', '.join(applied)}") + + @staticmethod + def _save_yaml(path: Path, data: dict) -> None: + """Schreibt dict als YAML in Datei""" + try: + with open(path, "w", encoding="utf-8") as f: + yaml.dump(data, f, default_flow_style=False, + indent=2, allow_unicode=True) + logging.info(f"YAML gespeichert: {path}") + except Exception as e: + logging.error(f"YAML speichern fehlgeschlagen ({path}): {e}") def save_settings(self) -> None: """Schreibt aktuelle Settings zurueck in settings.yaml""" - settings_file = self._cfg_path / "settings.yaml" - try: - with open(settings_file, "w", encoding="utf-8") as f: - yaml.dump(self.settings, f, default_flow_style=False, - indent=2, allow_unicode=True) - logging.info("Settings gespeichert") - except Exception as e: - logging.error(f"Settings speichern fehlgeschlagen: {e}") + self._save_yaml(self._cfg_path / "settings.yaml", self.settings) def save_presets(self) -> None: """Schreibt Presets zurueck in presets.yaml""" - presets_file = self._cfg_path / "presets.yaml" - try: - with open(presets_file, "w", encoding="utf-8") as f: - yaml.dump(self.presets, f, default_flow_style=False, - indent=2, allow_unicode=True) - logging.info("Presets gespeichert") - except Exception as e: - logging.error(f"Presets speichern fehlgeschlagen: {e}") + self._save_yaml(self._cfg_path / "presets.yaml", self.presets) def setup_logging(self) -> None: """Konfiguriert Logging mit Rotation""" diff --git a/app/routes/api.py b/app/routes/api.py index d198fbe..2e3e468 100644 --- a/app/routes/api.py +++ b/app/routes/api.py @@ -1,4 +1,5 @@ """REST API Endpoints""" +import asyncio import logging import os from pathlib import Path @@ -7,11 +8,13 @@ from app.config import Config from app.services.queue import QueueService from app.services.scanner import ScannerService from app.services.encoder import EncoderService +from app.routes.ws import WebSocketManager def setup_api_routes(app: web.Application, config: Config, queue_service: QueueService, - scanner: ScannerService) -> None: + scanner: ScannerService, + ws_manager: WebSocketManager = None) -> None: """Registriert alle API-Routes""" # --- Job-Management --- @@ -335,42 +338,33 @@ def setup_api_routes(app: web.Application, config: Config, "jobs": [{"id": j.id, "file": j.media.source_filename} for j in jobs], }) - # --- Logs --- + # --- Logs via WebSocket --- - # In-Memory Log-Buffer - _log_buffer = [] - _log_id = 0 - _MAX_LOGS = 200 + class WebSocketLogHandler(logging.Handler): + """Pusht Logs direkt per WebSocket an alle Clients""" + def __init__(self, ws_mgr: WebSocketManager): + super().__init__() + self._ws_manager = ws_mgr - class WebLogHandler(logging.Handler): - """Handler der Logs an den Buffer sendet""" def emit(self, record): - nonlocal _log_id - _log_id += 1 - entry = { - "id": _log_id, - "level": record.levelname, - "message": record.getMessage(), - "time": record.created, - } - _log_buffer.append(entry) - # Buffer begrenzen - while len(_log_buffer) > _MAX_LOGS: - _log_buffer.pop(0) + if not self._ws_manager or not self._ws_manager.clients: + return + try: + loop = asyncio.get_running_loop() + loop.create_task( + self._ws_manager.broadcast_log( + record.levelname, record.getMessage() + ) + ) + except RuntimeError: + pass - # Handler registrieren - web_handler = WebLogHandler() - web_handler.setLevel(logging.INFO) - logging.getLogger().addHandler(web_handler) - - async def get_logs(request: web.Request) -> web.Response: - """GET /api/logs?since=123 - Logs seit ID""" - since = int(request.query.get("since", 0)) - logs = [l for l in _log_buffer if l["id"] > since] - return web.json_response({"logs": logs}) + if ws_manager: + ws_log_handler = WebSocketLogHandler(ws_manager) + ws_log_handler.setLevel(logging.INFO) + logging.getLogger().addHandler(ws_log_handler) # --- Routes registrieren --- - app.router.add_get("/api/logs", get_logs) app.router.add_get("/api/browse", get_browse) app.router.add_post("/api/upload", post_upload) app.router.add_post("/api/convert", post_convert) diff --git a/app/routes/library_api.py b/app/routes/library_api.py index 5798976..b2064a3 100644 --- a/app/routes/library_api.py +++ b/app/routes/library_api.py @@ -597,6 +597,19 @@ def setup_library_routes(app: web.Application, config: Config, dupes = await library_service.find_duplicates() return web.json_response({"duplicates": dupes}) + # === Video loeschen === + + async def delete_video(request: web.Request) -> web.Response: + """DELETE /api/library/videos/{video_id}?delete_file=1""" + video_id = int(request.match_info["video_id"]) + delete_file = request.query.get("delete_file") == "1" + result = await library_service.delete_video( + video_id, delete_file=delete_file + ) + if result.get("error"): + return web.json_response(result, status=404) + return web.json_response(result) + # === Konvertierung aus Bibliothek === async def post_convert_video(request: web.Request) -> web.Response: @@ -829,6 +842,7 @@ def setup_library_routes(app: web.Application, config: Config, Body: {folder_path: "/mnt/.../Season 01"} ACHTUNG: Unwiderruflich! """ + import os import shutil try: data = await request.json() @@ -901,10 +915,26 @@ def setup_library_routes(app: web.Application, config: Config, except Exception as e: errors.append(f"DB-Fehler: {e}") - # Ordner loeschen + # Ordner loeschen (onerror fuer SMB/CIFS Permission-Probleme) + def _rm_error(func, path, exc_info): + """Bei Permission-Fehler: Schreibrechte setzen und nochmal versuchen""" + import stat + try: + os.chmod(path, stat.S_IRWXU) + func(path) + except Exception as e2: + errors.append(f"{path}: {e2}") + try: - shutil.rmtree(folder_path) - logging.info(f"Ordner geloescht: {folder_path}") + shutil.rmtree(folder_path, onerror=_rm_error) + if os.path.exists(folder_path): + # Ordner existiert noch -> nicht alles geloescht + logging.warning( + f"Ordner teilweise geloescht: {folder_path} " + f"({len(errors)} Fehler)" + ) + else: + logging.info(f"Ordner geloescht: {folder_path}") except Exception as e: logging.error(f"Ordner loeschen fehlgeschlagen: {e}") return web.json_response( @@ -1217,6 +1247,158 @@ def setup_library_routes(app: web.Application, config: Config, {"error": "Ungueltige Aktion"}, status=400 ) + # === Video-Streaming === + + async def get_stream_video(request: web.Request) -> web.StreamResponse: + """GET /api/library/videos/{video_id}/stream?t=0 + Streamt Video per ffmpeg-Transcoding (Video copy, Audio->AAC). + Browser-kompatibel fuer alle Codecs (EAC3, DTS, AC3 etc.). + Optional: ?t=120 fuer Seeking auf Sekunde 120.""" + import os + import asyncio as _asyncio + import shlex + + video_id = int(request.match_info["video_id"]) + + pool = await library_service._get_pool() + if not pool: + return web.json_response( + {"error": "Keine DB-Verbindung"}, status=500 + ) + + try: + async with pool.acquire() as conn: + async with conn.cursor() as cur: + await cur.execute( + "SELECT file_path FROM library_videos WHERE id = %s", + (video_id,) + ) + row = await cur.fetchone() + if not row: + return web.json_response( + {"error": "Video nicht gefunden"}, status=404 + ) + except Exception as e: + return web.json_response({"error": str(e)}, status=500) + + file_path = row[0] + if not os.path.isfile(file_path): + return web.json_response( + {"error": "Datei nicht gefunden"}, status=404 + ) + + # Seek-Position (Sekunden) aus Query-Parameter + seek_sec = float(request.query.get("t", "0")) + + # ffmpeg-Kommando: Video copy, Audio -> AAC Stereo, MP4-Container + cmd = [ + "ffmpeg", "-hide_banner", "-loglevel", "error", + ] + if seek_sec > 0: + cmd += ["-ss", str(seek_sec)] + cmd += [ + "-i", file_path, + "-c:v", "copy", + "-c:a", "aac", "-ac", "2", "-b:a", "192k", + "-movflags", "frag_keyframe+empty_moov+faststart", + "-f", "mp4", + "pipe:1", + ] + + resp = web.StreamResponse( + status=200, + headers={ + "Content-Type": "video/mp4", + "Cache-Control": "no-cache", + "Transfer-Encoding": "chunked", + }, + ) + await resp.prepare(request) + + proc = None + try: + proc = await _asyncio.create_subprocess_exec( + *cmd, + stdout=_asyncio.subprocess.PIPE, + stderr=_asyncio.subprocess.PIPE, + ) + + chunk_size = 256 * 1024 # 256 KB + while True: + chunk = await proc.stdout.read(chunk_size) + if not chunk: + break + try: + await resp.write(chunk) + except (ConnectionResetError, ConnectionAbortedError): + # Client hat Verbindung geschlossen + break + + except Exception as e: + logging.error(f"Stream-Fehler: {e}") + finally: + if proc and proc.returncode is None: + proc.kill() + await proc.wait() + + await resp.write_eof() + return resp + + # === Import: Item zuordnen / ueberspringen === + + async def post_reassign_import_item( + request: web.Request, + ) -> web.Response: + """POST /api/library/import/items/{item_id}/reassign + Weist einem nicht-erkannten Item eine Serie zu.""" + if not importer_service: + return web.json_response( + {"error": "Import-Service nicht verfuegbar"}, status=500 + ) + item_id = int(request.match_info["item_id"]) + try: + data = await request.json() + except Exception: + return web.json_response( + {"error": "Ungueltiges JSON"}, status=400 + ) + + series_name = data.get("series_name", "").strip() + season = data.get("season") + episode = data.get("episode") + tvdb_id = data.get("tvdb_id") + + if not series_name or season is None or episode is None: + return web.json_response( + {"error": "series_name, season und episode erforderlich"}, + status=400, + ) + + result = await importer_service.reassign_item( + item_id, series_name, + int(season), int(episode), + int(tvdb_id) if tvdb_id else None + ) + if result.get("error"): + return web.json_response(result, status=400) + return web.json_response(result) + + async def post_skip_import_item( + request: web.Request, + ) -> web.Response: + """POST /api/library/import/items/{item_id}/skip""" + if not importer_service: + return web.json_response( + {"error": "Import-Service nicht verfuegbar"}, status=500 + ) + item_id = int(request.match_info["item_id"]) + success = await importer_service.skip_item(item_id) + if success: + return web.json_response({"message": "Item uebersprungen"}) + return web.json_response( + {"error": "Fehlgeschlagen"}, status=400 + ) + # === Routes registrieren === # Pfade app.router.add_get("/api/library/paths", get_paths) @@ -1230,6 +1412,9 @@ def setup_library_routes(app: web.Application, config: Config, # Videos / Filme app.router.add_get("/api/library/videos", get_videos) app.router.add_get("/api/library/movies", get_movies) + app.router.add_delete( + "/api/library/videos/{video_id}", delete_video + ) # Serien app.router.add_get("/api/library/series", get_series) app.router.add_get("/api/library/series/{series_id}", get_series_detail) @@ -1325,6 +1510,18 @@ def setup_library_routes(app: web.Application, config: Config, app.router.add_put( "/api/library/import/items/{item_id}/resolve", put_resolve_conflict ) + app.router.add_post( + "/api/library/import/items/{item_id}/reassign", + post_reassign_import_item, + ) + app.router.add_post( + "/api/library/import/items/{item_id}/skip", + post_skip_import_item, + ) + # Video-Streaming + app.router.add_get( + "/api/library/videos/{video_id}/stream", get_stream_video + ) # TVDB Auto-Match (Review-Modus) app.router.add_post( "/api/library/tvdb-auto-match", post_tvdb_auto_match diff --git a/app/routes/ws.py b/app/routes/ws.py index 767ac3b..9852003 100644 --- a/app/routes/ws.py +++ b/app/routes/ws.py @@ -83,6 +83,12 @@ class WebSocketManager: """Sendet Fortschritts-Update fuer einen Job""" await self.broadcast({"data_flow": job.to_dict_progress()}) + async def broadcast_log(self, level: str, message: str) -> None: + """Sendet Log-Nachricht an alle Clients""" + await self.broadcast({ + "data_log": {"level": level, "message": message} + }) + async def _handle_message(self, data: dict) -> None: """Verarbeitet eingehende WebSocket-Nachrichten""" if not self.queue_service: diff --git a/app/server.py b/app/server.py index 967b40e..d1a3486 100644 --- a/app/server.py +++ b/app/server.py @@ -60,7 +60,8 @@ class VideoKonverterServer: # API Routes setup_api_routes( - self.app, self.config, self.queue_service, self.scanner + self.app, self.config, self.queue_service, self.scanner, + self.ws_manager ) # Bibliothek API Routes diff --git a/app/services/encoder.py b/app/services/encoder.py index cf230e6..a203773 100644 --- a/app/services/encoder.py +++ b/app/services/encoder.py @@ -133,6 +133,14 @@ class EncoderService: if keep_channels: cmd.extend([f"-ac:{audio_idx}", str(channels)]) + # Channel-Layout normalisieren fuer libopus + # EAC3/AC3 mit 5.1(side) Layout fuehrt zu Encoder-Fehler + if codec == "libopus" and channels == 6: + cmd.extend([ + f"-filter:a:{audio_idx}", + "channelmap=channel_layout=5.1", + ]) + audio_idx += 1 return cmd diff --git a/app/services/importer.py b/app/services/importer.py index 1254b9f..6765bf6 100644 --- a/app/services/importer.py +++ b/app/services/importer.py @@ -302,7 +302,7 @@ class ImporterService: pattern = job.get("naming_pattern") or self._naming_pattern season_pattern = job.get("season_pattern") or self._season_pattern target_dir, target_file = self._build_target( - tvdb_name or series_name or "Unbekannt", + tvdb_name or series_name or "Unbekannte Serie", season, episode, tvdb_ep_title or "", ext, @@ -456,14 +456,21 @@ class ImporterService: # Season-Ordner season_dir = season_pattern.format(season=s) - # Dateiname + # Dateiname - kein Titel: ohne Titel-Teil, sonst mit try: - filename = pattern.format( - series=series, season=s, episode=e, - title=title or "Unbekannt", ext=ext - ) + if title: + filename = pattern.format( + series=series, season=s, episode=e, + title=title, ext=ext + ) + else: + # Ohne Titel: "Serie - S01E03.ext" + filename = f"{series} - S{s:02d}E{e:02d}.{ext}" except (KeyError, ValueError): - filename = f"{series} - S{s:02d}E{e:02d} - {title or 'Unbekannt'}.{ext}" + if title: + filename = f"{series} - S{s:02d}E{e:02d} - {title}.{ext}" + else: + filename = f"{series} - S{s:02d}E{e:02d}.{ext}" # Ungueltige Zeichen entfernen for ch in ['<', '>', ':', '"', '|', '?', '*']: @@ -638,6 +645,34 @@ class ImporterService: # Zielordner erstellen os.makedirs(target_dir, exist_ok=True) + # Alte Dateien fuer dieselbe Episode aufraeumen + # (z.B. "S01E03 - Unbekannt.mkv" wenn jetzt "S01E03 - Willkür.mkv" kommt) + season = item.get("detected_season") + episode = item.get("detected_episode") + if season is not None and episode is not None and os.path.isdir(target_dir): + ep_pattern = f"S{season:02d}E{episode:02d}" + for existing in os.listdir(target_dir): + existing_path = os.path.join(target_dir, existing) + if (existing != target_file + and ep_pattern in existing + and os.path.isfile(existing_path)): + logging.info( + f"Import: Alte Episode-Datei entfernt: {existing}" + ) + os.remove(existing_path) + # Auch aus library_videos loeschen + if self._db_pool: + try: + async with self._db_pool.acquire() as conn: + async with conn.cursor() as cur: + await cur.execute( + "DELETE FROM library_videos " + "WHERE file_path = %s", + (existing_path,) + ) + except Exception: + pass + # Fortschritt-Tracking in DB setzen if job_id and self._db_pool: await self._update_file_progress( @@ -905,6 +940,118 @@ class ImporterService: logging.error(f"Import-Item aktualisieren fehlgeschlagen: {e}") return False + async def reassign_item(self, item_id: int, + series_name: str, + season: int, episode: int, + tvdb_id: int = None) -> dict: + """Weist einem pending-Item eine Serie/Staffel/Episode zu. + + Berechnet automatisch den Zielpfad und holt ggf. TVDB-Episodentitel. + """ + if not self._db_pool: + return {"error": "Keine DB-Verbindung"} + + try: + async with self._db_pool.acquire() as conn: + async with conn.cursor(aiomysql.DictCursor) as cur: + # Item laden + await cur.execute( + "SELECT i.*, j.target_library_id, j.naming_pattern, " + "j.season_pattern FROM import_items i " + "JOIN import_jobs j ON j.id = i.import_job_id " + "WHERE i.id = %s", (item_id,) + ) + item = await cur.fetchone() + if not item: + return {"error": "Item nicht gefunden"} + + # Library-Pfad laden + await cur.execute( + "SELECT * FROM library_paths WHERE id = %s", + (item["target_library_id"],) + ) + lib_path = await cur.fetchone() + if not lib_path: + return {"error": "Ziel-Library nicht gefunden"} + + # TVDB-Name und Episodentitel holen + tvdb_name = series_name + tvdb_ep_title = "" + if tvdb_id and self.tvdb.is_configured: + # Serien-Info von TVDB holen + try: + info = await self.tvdb.get_series_info(tvdb_id) + if info and info.get("name"): + tvdb_name = info["name"] + except Exception: + pass + # Episodentitel holen + tvdb_ep_title = await self._get_episode_title( + tvdb_id, season, episode + ) + + # Zielpfad berechnen + ext = os.path.splitext(item["source_file"])[1].lstrip(".") + pattern = item.get("naming_pattern") or self._naming_pattern + season_pattern = item.get("season_pattern") or self._season_pattern + target_dir, target_file = self._build_target( + tvdb_name or series_name, + season, episode, + tvdb_ep_title or "", + ext, + lib_path["path"], + pattern, season_pattern + ) + + # In DB aktualisieren + async with self._db_pool.acquire() as conn: + async with conn.cursor() as cur: + await cur.execute(""" + UPDATE import_items SET + detected_series = %s, + detected_season = %s, + detected_episode = %s, + tvdb_series_id = %s, + tvdb_series_name = %s, + tvdb_episode_title = %s, + target_path = %s, + target_filename = %s, + status = 'matched' + WHERE id = %s + """, ( + series_name, season, episode, + tvdb_id, tvdb_name, tvdb_ep_title, + target_dir, target_file, item_id, + )) + + return { + "ok": True, + "target_dir": target_dir, + "target_file": target_file, + "tvdb_name": tvdb_name, + "tvdb_ep_title": tvdb_ep_title, + } + + except Exception as e: + logging.error(f"Import-Item zuordnen fehlgeschlagen: {e}") + return {"error": str(e)} + + async def skip_item(self, item_id: int) -> bool: + """Markiert ein Item als uebersprungen""" + if not self._db_pool: + return False + try: + async with self._db_pool.acquire() as conn: + async with conn.cursor() as cur: + await cur.execute( + "UPDATE import_items SET status = 'skipped', " + "conflict_reason = 'Manuell uebersprungen' " + "WHERE id = %s", (item_id,) + ) + return True + except Exception: + return False + async def get_all_jobs(self) -> list: """Liste aller Import-Jobs (neueste zuerst)""" if not self._db_pool: diff --git a/app/services/library.py b/app/services/library.py index f4173cf..e0f8cee 100644 --- a/app/services/library.py +++ b/app/services/library.py @@ -385,8 +385,15 @@ class LibraryService: # Dateisystem loeschen wenn gewuenscht if delete_files and folder_path and os.path.isdir(folder_path): import shutil + import stat + def _rm_error(func, path, exc_info): + try: + os.chmod(path, stat.S_IRWXU) + func(path) + except Exception: + pass try: - shutil.rmtree(folder_path) + shutil.rmtree(folder_path, onerror=_rm_error) result["deleted_folder"] = folder_path logging.info( f"Serie {series_id} komplett geloescht " @@ -409,6 +416,56 @@ class LibraryService: logging.error(f"Serie loeschen fehlgeschlagen: {e}") return {"error": str(e)} + async def delete_video(self, video_id: int, + delete_file: bool = False) -> dict: + """Einzelnes Video loeschen (DB + optional Datei)""" + pool = await self._get_pool() + if not pool: + return {"error": "Keine DB-Verbindung"} + + try: + async with pool.acquire() as conn: + async with conn.cursor() as cur: + await cur.execute( + "SELECT file_path FROM library_videos WHERE id = %s", + (video_id,) + ) + row = await cur.fetchone() + if not row: + return {"error": "Video nicht gefunden"} + + file_path = row[0] + + # Aus DB loeschen + await cur.execute( + "DELETE FROM library_videos WHERE id = %s", + (video_id,) + ) + + result = {"success": True, "file_path": file_path} + + # Datei loeschen wenn gewuenscht + if delete_file and file_path and os.path.isfile(file_path): + try: + os.remove(file_path) + result["file_deleted"] = True + logging.info(f"Video geloescht: {file_path}") + except Exception as e: + result["file_error"] = str(e) + logging.error( + f"Video-Datei loeschen fehlgeschlagen: " + f"{file_path}: {e}" + ) + elif delete_file: + result["file_deleted"] = False + result["file_error"] = "Datei nicht gefunden" + + return result + + except Exception as e: + logging.error(f"Video loeschen fehlgeschlagen: {e}") + return {"error": str(e)} + async def get_movies(self, filters: dict = None, page: int = 1, limit: int = 50) -> dict: """Nur Filme (keine Serien) abfragen""" @@ -1600,8 +1657,15 @@ class LibraryService: if delete_files and folder_path and os.path.isdir(folder_path): import shutil + import stat + def _rm_error(func, path, exc_info): + try: + os.chmod(path, stat.S_IRWXU) + func(path) + except Exception: + pass try: - shutil.rmtree(folder_path) + shutil.rmtree(folder_path, onerror=_rm_error) result["deleted_folder"] = folder_path except Exception as e: result["folder_error"] = str(e) diff --git a/app/static/css/style.css b/app/static/css/style.css index 4e734ee..a546464 100644 --- a/app/static/css/style.css +++ b/app/static/css/style.css @@ -1441,6 +1441,54 @@ legend { } .row-conflict { background: #2a1a10 !important; } .row-conflict:hover { background: #332010 !important; } +.row-pending { background: #2a1020 !important; } +.row-pending:hover { background: #331030 !important; } + +/* === Play-Button === */ +.btn-play { + background: #2a7a2a; + color: #fff; + border: none; + border-radius: 4px; + padding: 0.2rem 0.5rem; + cursor: pointer; + font-size: 0.8rem; +} +.btn-play:hover { background: #3a9a3a; } + +/* === Video-Player Modal === */ +.player-overlay { + z-index: 10000; + background: rgba(0, 0, 0, 0.95); +} +.player-container { + width: 95vw; + max-width: 1400px; + display: flex; + flex-direction: column; +} +.player-header { + display: flex; + justify-content: space-between; + align-items: center; + padding: 0.5rem 0.8rem; + color: #fff; + font-size: 0.9rem; +} +.player-header .btn-close { + font-size: 1.5rem; + color: #aaa; + background: none; + border: none; + cursor: pointer; +} +.player-header .btn-close:hover { color: #fff; } +#player-video { + width: 100%; + max-height: 85vh; + background: #000; + border-radius: 4px; +} /* === TVDB Review-Modal === */ .tvdb-review-list { diff --git a/app/static/js/library.js b/app/static/js/library.js index ae14cd0..0f37c32 100644 --- a/app/static/js/library.js +++ b/app/static/js/library.js @@ -262,7 +262,13 @@ function loadSectionSeries(pathId) { // === Ordner pro Bereich === +let _browserLoading = false; + function loadSectionBrowser(pathId, subPath) { + // Doppelklick-Schutz: Zweiten Aufruf ignorieren solange geladen wird + if (_browserLoading) return; + _browserLoading = true; + const content = document.getElementById("content-" + pathId); content.innerHTML = '
Lade Ordner...
'; @@ -281,7 +287,8 @@ function loadSectionBrowser(pathId, subPath) { html += renderBrowser(data.folders || [], data.videos || [], pathId); content.innerHTML = html; }) - .catch(() => { content.innerHTML = '
Fehler
'; }); + .catch(() => { content.innerHTML = '
Fehler
'; }) + .finally(() => { _browserLoading = false; }); } // === Video-Tabelle (gemeinsam genutzt) === @@ -306,6 +313,7 @@ function renderVideoTable(items) { const res = v.width && v.height ? resolutionLabel(v.width, v.height) : "-"; const is10bit = v.is_10bit ? ' 10bit' : ""; + const vidTitle = v.file_name || "Video"; html += ` ${escapeHtml(v.file_name || "-")} ${res}${is10bit} @@ -315,7 +323,11 @@ function renderVideoTable(items) { ${formatSize(v.file_size || 0)} ${formatDuration(v.duration_sec || 0)} ${(v.container || "-").toUpperCase()} - + + + + + `; } html += ''; @@ -557,13 +569,18 @@ function renderEpisodesTab(series) { return `${lang} ${channelLayout(a.channels)}`; }).join(" "); const res = ep.width && ep.height ? resolutionLabel(ep.width, ep.height) : "-"; + const epTitle = ep.episode_title || ep.file_name || "Episode"; html += ` ${ep.episode_number || "-"} - ${escapeHtml(ep.episode_title || ep.file_name || "-")} + ${escapeHtml(epTitle)} ${res} ${ep.video_codec || "-"} ${audioInfo || "-"} - + + + + + `; } } @@ -831,6 +848,7 @@ function openMovieDetail(movieId) { return `${lang} ${channelLayout(a.channels)}`; }).join(" "); const res = v.width && v.height ? resolutionLabel(v.width, v.height) : "-"; + const movieTitle = v.file_name || "Video"; html += ` ${escapeHtml(v.file_name || "-")} ${res}${v.is_10bit ? ' 10bit' : ''} @@ -838,7 +856,11 @@ function openMovieDetail(movieId) { ${audioInfo || "-"} ${formatSize(v.file_size || 0)} ${formatDuration(v.duration_sec || 0)} - + + + + + `; } html += ''; @@ -1892,10 +1914,10 @@ function importBrowse(path) { `; } - // Unterordner + // Unterordner: Einfachklick = auswaehlen, Doppelklick = navigieren for (const f of (data.folders || [])) { const meta = f.video_count > 0 ? `${f.video_count} Videos` : ""; - html += `
+ html += `
📁 ${escapeHtml(f.name)} ${meta} @@ -1916,6 +1938,23 @@ function importBrowse(path) { }); } +// Klick-Handler: Einfachklick = auswaehlen, Doppelklick = navigieren +let _importClickTimer = null; +function importFolderClick(path, el) { + if (_importClickTimer) { + // Zweiter Klick innerhalb 300ms -> Doppelklick -> navigieren + clearTimeout(_importClickTimer); + _importClickTimer = null; + importBrowse(path); + } else { + // Erster Klick -> kurz warten ob Doppelklick kommt + _importClickTimer = setTimeout(() => { + _importClickTimer = null; + importSelectFolder(path, el); + }, 250); + } +} + function importSelectFolder(path, el) { // Vorherige Auswahl entfernen document.querySelectorAll(".import-browser-folder.selected").forEach( @@ -1995,8 +2034,10 @@ function renderImportItems(data) { document.getElementById("import-info").textContent = `${items.length} Dateien: ${matched} erkannt, ${conflicts} Konflikte, ${pending} offen`; - // Start-Button nur wenn keine ungeloesten Konflikte - const hasUnresolved = items.some(i => i.status === "conflict" && !i.user_action); + // Start-Button nur wenn keine ungeloesten Konflikte UND keine pending Items + const hasUnresolved = items.some(i => + (i.status === "conflict" && !i.user_action) || i.status === "pending" + ); document.getElementById("btn-start-import").disabled = hasUnresolved; if (!items.length) { @@ -2012,11 +2053,13 @@ function renderImportItems(data) { const statusClass = item.status === "conflict" ? "status-badge warn" : item.status === "matched" ? "status-badge ok" : item.status === "done" ? "status-badge ok" + : item.status === "pending" ? "status-badge error" : "status-badge"; const statusText = item.status === "conflict" ? "Konflikt" : item.status === "matched" ? "OK" : item.status === "done" ? "Fertig" : item.status === "skipped" ? "Uebersprungen" + : item.status === "pending" ? "Nicht erkannt" : item.status; const sourceName = item.source_file ? item.source_file.split("/").pop() : "-"; @@ -2032,13 +2075,17 @@ function renderImportItems(data) { `; } else if (item.status === "pending") { - // TVDB-Suchfeld fuer manuelles Matching - actionHtml = ``; + actionHtml = ` + + + `; } else if (item.user_action) { actionHtml = `${item.user_action}`; } - html += ` + const rowClass = item.status === "conflict" ? "row-conflict" + : item.status === "pending" ? "row-pending" : ""; + html += ` ${escapeHtml(sourceName)} ${escapeHtml(item.tvdb_series_name || item.detected_series || "-")} ${se} @@ -2065,37 +2112,114 @@ function resolveImportConflict(itemId, action) { .catch(e => alert("Fehler: " + e)); } -function openImportTvdbSearch(itemId) { - // Einfaches Prompt fuer TVDB-Suche - const query = prompt("TVDB-Serienname eingeben:"); +// === Import-Zuordnungs-Modal === + +let _assignItemId = null; +let _assignTvdbId = null; +let _assignSeriesName = ""; +let _assignSearchTimer = null; + +function openImportAssignModal(itemId, filename) { + _assignItemId = itemId; + _assignTvdbId = null; + _assignSeriesName = ""; + + const modal = document.getElementById("import-assign-modal"); + modal.style.display = "flex"; + document.getElementById("import-assign-filename").textContent = filename; + document.getElementById("import-assign-search").value = ""; + document.getElementById("import-assign-results").innerHTML = ""; + document.getElementById("import-assign-selected").style.display = "none"; + document.getElementById("import-assign-season").value = ""; + document.getElementById("import-assign-episode").value = ""; + document.getElementById("import-assign-search").focus(); +} + +function closeImportAssignModal() { + document.getElementById("import-assign-modal").style.display = "none"; + _assignItemId = null; +} + +function debounceAssignSearch() { + if (_assignSearchTimer) clearTimeout(_assignSearchTimer); + _assignSearchTimer = setTimeout(searchAssignTvdb, 500); +} + +function searchAssignTvdb() { + const query = document.getElementById("import-assign-search").value.trim(); if (!query) return; + const results = document.getElementById("import-assign-results"); + results.innerHTML = '
Suche...
'; + fetch(`/api/tvdb/search?q=${encodeURIComponent(query)}`) .then(r => r.json()) .then(data => { - if (!data.results || !data.results.length) { alert("Keine Ergebnisse"); return; } - // Erste 5 anzeigen - const choices = data.results.slice(0, 5).map((r, i) => - `${i + 1}. ${r.name} (${r.year || "?"})` - ).join("\n"); - const choice = prompt(`Ergebnisse:\n${choices}\n\nNummer eingeben:`); - if (!choice) return; - const idx = parseInt(choice) - 1; - if (idx < 0 || idx >= data.results.length) return; - - const selected = data.results[idx]; - fetch(`/api/library/import/items/${itemId}`, { - method: "PUT", - headers: {"Content-Type": "application/json"}, - body: JSON.stringify({ - tvdb_series_id: selected.tvdb_id, - tvdb_series_name: selected.name, - status: "matched", - }), - }) - .then(() => refreshImportPreview()) - .catch(e => alert("Fehler: " + e)); + if (data.error) { results.innerHTML = `
${escapeHtml(data.error)}
`; return; } + if (!data.results || !data.results.length) { results.innerHTML = '
Keine Ergebnisse
'; return; } + results.innerHTML = data.results.slice(0, 8).map(r => ` +
+ ${r.poster ? `` : ""} +
+ ${escapeHtml(r.name)} + ${r.year || ""} +

${escapeHtml((r.overview || "").substring(0, 120))}

+
+
+ `).join(""); }) + .catch(e => { results.innerHTML = `
Fehler: ${e}
`; }); +} + +function selectAssignSeries(tvdbId, name) { + _assignTvdbId = tvdbId; + _assignSeriesName = name; + document.getElementById("import-assign-results").innerHTML = ""; + document.getElementById("import-assign-selected").style.display = ""; + document.getElementById("import-assign-selected-name").textContent = name; + document.getElementById("import-assign-search").value = ""; +} + +function submitImportAssign() { + if (!_assignItemId) return; + + const season = parseInt(document.getElementById("import-assign-season").value); + const episode = parseInt(document.getElementById("import-assign-episode").value); + const manualName = document.getElementById("import-assign-search").value.trim(); + const seriesName = _assignSeriesName || manualName; + + if (!seriesName) { alert("Serie auswaehlen oder Namen eingeben"); return; } + if (isNaN(season) || isNaN(episode)) { alert("Staffel und Episode eingeben"); return; } + + const btn = document.querySelector("#import-assign-modal .btn-primary"); + btn.disabled = true; + btn.textContent = "Zuordne..."; + + fetch(`/api/library/import/items/${_assignItemId}/reassign`, { + method: "POST", + headers: {"Content-Type": "application/json"}, + body: JSON.stringify({ + series_name: seriesName, + season: season, + episode: episode, + tvdb_id: _assignTvdbId || null, + }), + }) + .then(r => r.json()) + .then(data => { + btn.disabled = false; + btn.textContent = "Zuordnen"; + if (data.error) { alert("Fehler: " + data.error); return; } + closeImportAssignModal(); + refreshImportPreview(); + }) + .catch(e => { btn.disabled = false; btn.textContent = "Zuordnen"; alert("Fehler: " + e); }); +} + +function skipImportItem(itemId) { + fetch(`/api/library/import/items/${itemId}/skip`, {method: "POST"}) + .then(r => r.json()) + .then(() => refreshImportPreview()) .catch(e => alert("Fehler: " + e)); } @@ -2274,3 +2398,71 @@ function cleanSearchTitle(title) { .replace(/\s*(720p|1080p|2160p|4k|bluray|bdrip|webrip|web-dl|hdtv|x264|x265|hevc|aac|dts|remux)\s*/gi, ' ') .trim(); } + +// === Video-Player === + +let _playerVideoId = null; + +function playVideo(videoId, title) { + const modal = document.getElementById("player-modal"); + const video = document.getElementById("player-video"); + document.getElementById("player-title").textContent = title || "Video"; + _playerVideoId = videoId; + + // Alte Quelle stoppen + video.pause(); + video.removeAttribute("src"); + video.load(); + + // Neue Quelle setzen (ffmpeg-Transcoding-Stream) + video.src = `/api/library/videos/${videoId}/stream`; + modal.style.display = "flex"; + + video.play().catch(() => { + // Autoplay blockiert - User muss manuell starten + }); +} + +function closePlayer() { + const video = document.getElementById("player-video"); + video.pause(); + video.removeAttribute("src"); + video.load(); + _playerVideoId = null; + document.getElementById("player-modal").style.display = "none"; +} + +// ESC schliesst den Player +document.addEventListener("keydown", function(e) { + if (e.key === "Escape") { + const player = document.getElementById("player-modal"); + if (player && player.style.display === "flex") { + closePlayer(); + e.stopPropagation(); + } + } +}); + +// === Video loeschen === + +function deleteVideo(videoId, title, context) { + if (!confirm(`"${title}" wirklich loeschen?\n\nDatei wird unwiderruflich entfernt!`)) return; + + fetch(`/api/library/videos/${videoId}?delete_file=1`, {method: "DELETE"}) + .then(r => r.json()) + .then(data => { + if (data.error) { showToast("Fehler: " + data.error, "error"); return; } + showToast("Video geloescht", "success"); + + // Ansicht aktualisieren + if (context === "series" && currentSeriesId) { + openSeriesDetail(currentSeriesId); + } else if (context === "movie" && currentMovieId) { + openMovieDetail(currentMovieId); + } else { + reloadAllSections(); + } + loadStats(); + }) + .catch(e => showToast("Fehler: " + e, "error")); +} diff --git a/app/static/js/websocket.js b/app/static/js/websocket.js index b276969..cb9c662 100644 --- a/app/static/js/websocket.js +++ b/app/static/js/websocket.js @@ -32,6 +32,11 @@ function connectWebSocket() { updateActiveConversions(packet.data_convert); } else if (packet.data_queue !== undefined) { updateQueue(packet.data_queue); + } else if (packet.data_log !== undefined) { + // Log-Nachrichten ans Benachrichtigungs-System weiterleiten + if (typeof addNotification === "function") { + addNotification(packet.data_log.message, packet.data_log.level); + } } } catch (e) { console.error("WebSocket Nachricht parsen fehlgeschlagen:", e); diff --git a/app/templates/base.html b/app/templates/base.html index 583e77c..fc8ba8d 100644 --- a/app/templates/base.html +++ b/app/templates/base.html @@ -54,7 +54,6 @@ // === Benachrichtigungs-System === const notifications = []; let unreadErrors = 0; - let lastLogId = 0; function toggleNotificationPanel() { const panel = document.getElementById("notification-panel"); @@ -128,25 +127,36 @@ .replace(/>/g, ">"); } - // Log-Polling vom Server - async function pollLogs() { - try { - const r = await fetch(`/api/logs?since=${lastLogId}`); - const data = await r.json(); + // Log-Empfang per WebSocket (kein Polling mehr) + // WebSocket sendet {data_log: {level, message}} - wird in websocket.js + // oder hier abgefangen, je nachdem welche Seite geladen ist. + let _logWs = null; - if (data.logs && data.logs.length) { - for (const log of data.logs) { - addNotification(log.message, log.level); - if (log.id > lastLogId) lastLogId = log.id; + function connectLogWebSocket() { + const proto = location.protocol === "https:" ? "wss:" : "ws:"; + const url = `${proto}//${location.host}/ws`; + _logWs = new WebSocket(url); + + _logWs.onmessage = function(event) { + try { + const packet = JSON.parse(event.data); + if (packet.data_log) { + addNotification(packet.data_log.message, packet.data_log.level); } + } catch (e) { + // JSON-Parse-Fehler ignorieren } - } catch (e) { - // Ignorieren falls Endpoint nicht existiert - } + }; + + _logWs.onclose = function() { + setTimeout(connectLogWebSocket, 5000); + }; } - // Polling starten - setInterval(pollLogs, 2000); + // Nur Log-WebSocket starten wenn kein globaler WS existiert (Dashboard hat eigenen) + if (!window.WS_URL) { + connectLogWebSocket(); + } {% block scripts %}{% endblock %} diff --git a/app/templates/library.html b/app/templates/library.html index cefae3e..5b6dfaf 100644 --- a/app/templates/library.html +++ b/app/templates/library.html @@ -457,6 +457,64 @@
+ + + + + {% endblock %} {% block scripts %} diff --git a/docker-compose.yml b/docker-compose.yml index 38d4a58..d3e023f 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -8,7 +8,7 @@ services: restart: unless-stopped user: "${PUID:-99}:${PGID:-100}" ports: - - "8080:8080" + - "${VK_PORT:-8080}:8080" volumes: # Konfiguration (persistent) - ./app/cfg:/opt/video-konverter/app/cfg @@ -25,8 +25,27 @@ services: group_add: - "video" environment: + # GPU-Treiber - LIBVA_DRIVER_NAME=iHD - LIBVA_DRIVERS_PATH=/usr/lib/x86_64-linux-gnu/dri + # === VideoKonverter Konfiguration (VK_*) === + # Alle Werte ueberschreiben die settings.yaml + # Datenbank + - VK_DB_HOST=${VK_DB_HOST:-192.168.155.11} + - VK_DB_PORT=${VK_DB_PORT:-3306} + - VK_DB_USER=${VK_DB_USER:-video} + - VK_DB_PASSWORD=${VK_DB_PASSWORD:-8715} + - VK_DB_NAME=${VK_DB_NAME:-video_converter} + # Encoding + - VK_MODE=gpu + - VK_GPU_DEVICE=${VK_GPU_DEVICE:-/dev/dri/renderD128} + - VK_MAX_JOBS=${VK_MAX_JOBS:-1} + - VK_DEFAULT_PRESET=${VK_DEFAULT_PRESET:-gpu_av1} + # Library / TVDB + - VK_TVDB_API_KEY=${VK_TVDB_API_KEY:-} + - VK_TVDB_LANGUAGE=${VK_TVDB_LANGUAGE:-deu} + # Logging + - VK_LOG_LEVEL=${VK_LOG_LEVEL:-INFO} profiles: - gpu @@ -39,7 +58,7 @@ services: container_name: video-konverter-cpu user: "${PUID:-99}:${PGID:-100}" ports: - - "8080:8080" + - "${VK_PORT:-8080}:8080" volumes: - ./app/cfg:/opt/video-konverter/app/cfg - ./data:/opt/video-konverter/data @@ -47,6 +66,21 @@ services: # /mnt 1:1 durchreichen - Pfade identisch zum Host - /mnt:/mnt:rw environment: - - VIDEO_KONVERTER_MODE=cpu + # === VideoKonverter Konfiguration (VK_*) === + # Datenbank + - VK_DB_HOST=${VK_DB_HOST:-192.168.155.11} + - VK_DB_PORT=${VK_DB_PORT:-3306} + - VK_DB_USER=${VK_DB_USER:-video} + - VK_DB_PASSWORD=${VK_DB_PASSWORD:-8715} + - VK_DB_NAME=${VK_DB_NAME:-video_converter} + # Encoding + - VK_MODE=cpu + - VK_MAX_JOBS=${VK_MAX_JOBS:-1} + - VK_DEFAULT_PRESET=${VK_DEFAULT_PRESET:-cpu_av1} + # Library / TVDB + - VK_TVDB_API_KEY=${VK_TVDB_API_KEY:-} + - VK_TVDB_LANGUAGE=${VK_TVDB_LANGUAGE:-deu} + # Logging + - VK_LOG_LEVEL=${VK_LOG_LEVEL:-INFO} profiles: - cpu