diff --git a/.gitignore b/.gitignore index d210053..d1122ff 100644 --- a/.gitignore +++ b/.gitignore @@ -7,8 +7,7 @@ # Build outputs /dist/ -# Local tests (not committed) -/tests/ +# Lokale Test-Artefakte /TESTING/ /.pytest_cache/ /pytest.ini diff --git a/CHANGELOG-DEV.md b/CHANGELOG-DEV.md index 3ad68d3..e5cea1c 100644 --- a/CHANGELOG-DEV.md +++ b/CHANGELOG-DEV.md @@ -1,5 +1,129 @@ # Changelog (Dev) +## 0.1.69-dev - 2026-03-01 + +### Trakt: Neue Features + +- **Weiterschauen:** Neuer Hauptmenüpunkt – zeigt die nächste ungesehene Folge je Serie + basierend auf der Trakt-History. Auto-Matching sucht den Titel automatisch in allen + aktiven Plugins und verlinkt direkt zur richtigen Staffel. +- **Trakt Upcoming:** Neuer Hauptmenüpunkt – zeigt anstehende Episoden der Watchlist-Serien + (Trakt-Kalender, 14 Tage voraus) mit Ausstrahlungsdatum und TMDB-Poster. + Auto-Matching wie bei „Weiterschauen". +- **Watched-Status in Episodenlisten:** Bereits bei Trakt als gesehen markierte Episoden + werden in Staffelansichten mit Häkchen (Kodi `playcount=1`) markiert. + Ergebnis wird 5 Minuten gecacht um API-Aufrufe zu minimieren. +- **`core/trakt.py`:** Neuer `TraktCalendarItem`-Dataclass und `get_calendar()`-Methode + (Trakt Calendar API: `/calendars/my/shows/{start}/{days}`). + +### Python 3.8 Kompatibilität (Windows/Kodi) + +- **`from __future__ import annotations`** in allen Modulen ergänzt, die noch kein + deferred-evaluation hatten (`core/router.py`, `core/metadata.py`, `core/playstate.py`, + `core/gui.py`, `regex_patterns.py`). Behebt `TypeError: 'type' object is not subscriptable` + auf Kodi-Installationen mit Python 3.8 (Windows). + +### Bugfixes + +- **`dokustreams_plugin.py`:** Regex `r"(\\d+)"` → `r"(\d+)"` – Seitennavigation hat + nie Seitenzahlen gefunden (falsch-escaped in Raw-String). +- **`serienstream_plugin.py`:** Dedup-Key verwendete `\\t` (Backslash+t) statt echtem + Tab-Zeichen – inkonsistent mit `aniworld_plugin.py`. +- **Menübereinigung:** Doppelter Menüpunkt „Neue Titel" (`new_plugin_titles`) entfernt – + identisch mit „Neuste Titel" (`latest_titles`). + +--- + +## 0.1.67-dev - 2026-02-27 + +### Stabilitäts- und Sicherheits-Fixes + +- **Sicherheit (updater.py):** ZIP-Extraktion prüft jetzt jeden Eintrag auf Path-Traversal. + Bösartige Pfade (z. B. `../../`) werden abgelehnt und der Update-Vorgang abgebrochen + (war: still ignoriert mit `continue`). +- **Silent-Failure-Fix (metadata.py):** Neues `_initialized`-Flag und `_require_init()`-Guard – + wenn `init()` nicht aufgerufen wurde, erscheint jetzt eine Warnung im Log statt lautlosem No-Op. +- **Thread-Safety (default.py):** Neue Locks `_PLUGIN_CACHE_LOCK` und `_GENRE_TITLES_CACHE_LOCK` + schützen alle Cache-Zugriffe auf Plugin-Instanzen und Genre-Titel-Caches. +- **Memory-Leak-Fix (default.py):** Alle internen Caches haben jetzt ein Größenlimit + (`_CACHE_MAXSIZE = 500`) mit LRU-artigem Eviction-Mechanismus. +- **Code-Qualität (default.py):** ~300 Zeilen Duplicate-Code durch `_show_paged_title_list()` + Hilfsfunktion ersetzt – alle paginierten Titellisten (Genres, Kategorien, A-Z) nutzen jetzt + dieselbe Logik. +- **Syntax-Fix (default.py):** Fremd-Text in Zeile 3517 entfernt, der einen latenten Syntax- + Fehler verursachte. + +### Neues Plugin-Interface + +Neue optionale Methoden in `BasisPlugin` (plugin_interface.py): + +| Methode | Beschreibung | +|---------|-------------| +| `latest_titles(page)` | Neuerscheinungen / neu hinzugefügte Titel | +| `years_available()` | Verfügbare Erscheinungsjahre für Filter | +| `titles_for_year(year, page)` | Titel nach Jahr gefiltert | +| `countries_available()` | Verfügbare Länder für Filter | +| `titles_for_country(country, page)` | Titel nach Herkunftsland gefiltert | +| `collections()` | Filmreihen / Sammlungen | +| `titles_for_collection(collection, page)` | Titel einer Sammlung | +| `tags()` | Schlagworte / Tags | +| `titles_for_tag(tag, page)` | Titel nach Schlagwort | +| `random_title()` | Zufälliger Titel | + +Neue Capability-Strings: `latest_titles`, `year_filter`, `country_filter`, `collections`, `tags`, `random` + +### Neue Menüeinträge in default.py + +Plugins die die neuen Capabilities melden, erhalten automatisch folgende Menüpunkte: +- **"Neue Titel"** – für Plugins mit `latest_titles` +- **"Nach Jahr"** – für Plugins mit `year_filter` +- **"Nach Land"** – für Plugins mit `country_filter` +- **"Sammlungen"** – für Plugins mit `collections` +- **"Schlagworte"** – für Plugins mit `tags` +- **"Zufälliger Titel"** – für Plugins mit `random` + +### Plugin-Erweiterungen + +**Filmpalast** (`filmpalast_plugin.py`): +- `popular_series()` – scrapt Top-Filme von `/movies/top` +- `latest_titles(page)` – scrapt Neuerscheinungen von `/movies/new` mit Pagination +- IMDb-Rating in `metadata_for()` integriert (`info_labels["rating"]`) +- Neue Capabilities: `popular_series`, `latest_titles` + +**Doku-Streams** (`dokustreams_plugin.py`): +- `tags()` – scrapt alle Schlagworte von der Startseite +- `titles_for_tag(tag, page)` – Titelliste pro Schlagwort mit Pagination +- `random_title()` – folgt dem Redirect von `/zufaellige-doku/` +- `resolve_stream_link(link)` – ResolveURL + HTTP-Redirect-Fallback +- Neue Capabilities: `tags`, `random` + +**AniWorld** (`aniworld_plugin.py`): +- `titles_for_genre_page(genre, page)` – paginierte Genre-Titellisten via `/genre/[slug]?page=[n]` +- `genre_page_count(genre)` – letzte Seitennummer aus Pagination extrahiert +- `latest_titles(page)` – neue Anime-Releases via `/animekalender` +- Neue Capability: `latest_titles` + +**SerienStream** (`serienstream_plugin.py`): +- `latest_titles(page)` – neue Serien via JSON-Kalender-API (`/api/calendar`) mit wochenweiser Rückwärts-Paginierung +- `genre_page_count(genre)` – Gesamtanzahl der Genre-Seiten aus Pagination-Links extrahiert +- `alpha_index()` – gibt A-Z-Buchstaben + `0-9` zurück +- `titles_for_alpha_page(letter, page)` – Serien alphabetisch abrufen via `/serien/alle?buchstabe={letter}` +- Neue Capabilities: `latest_titles`, `alpha` + +**TopStreamFilm** (`topstreamfilm_plugin.py`): +- `years_available()` – statische Liste vom aktuellen Jahr bis 1980 +- `titles_for_year(year, page)` – Titel nach Erscheinungsjahr via `/xfsearch/{year}/page/{n}/` +- `latest_titles(page)` – neue Filme via `/neueste-filme/page/{n}/` +- Neue Capabilities: `year_filter`, `latest_titles` + +**Einschalten** (`einschalten_plugin.py`): +- `popular_series()` – Top-50 Filme sortiert nach `voteAverage` (absteigend) +- `latest_titles(page)` – neue Filme (Alias zu `new_titles_page(page)`) +- Neue Capabilities: `popular_series`, `latest_titles` +- Hinweis: Ratings (`voteAverage`, `voteCount`) waren bereits in `metadata_for()` enthalten + +--- + ## 0.1.66-dev - 2026-02-25 - Serienstream HTTP-Fetches robuster gemacht: Retry bei kurzzeitigen Verbindungsabbruechen inkl. Session-Reset. diff --git a/CHANGELOG.md b/CHANGELOG.md index ce044ae..54957b4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,12 +1,15 @@ # Changelog (Stable) +> Stabile Releases werden aus dem `dev`-Branch nach `main` übertragen. +> Den vollständigen Entwicklungs-Changelog findet man in [CHANGELOG-DEV.md](CHANGELOG-DEV.md). + ## 0.1.58 - 2026-02-23 - Menuebezeichnungen vereinheitlicht (`Haeufig gesehen`, `Neuste Titel`). - `Neue Titel` und `Neueste Folgen` im Menue zu `Neuste Titel` zusammengelegt. - Hoster-Header-Anpassung zentral nach `resolve_stream_link` eingebaut. -- Hinweis bei Cloudflare-Block durch ResolveURL statt stiller Fehlversuche. +- Hinweis bei Cloudflare-Block. - Update-Einstellungen erweitert (Kanal, manueller Check, optionaler Auto-Check). -- Metadaten-Parsing in AniWorld und Filmpalast nachgezogen (Cover/Plot robuster). +- Metadaten in AniWorld und Filmpalast - Topstreamfilm-Suche: fehlender `urlencode`-Import behoben. - Einige ungenutzte Funktionen entfernt. diff --git a/addon/addon.xml b/addon/addon.xml index ec1f667..dadcf6e 100644 --- a/addon/addon.xml +++ b/addon/addon.xml @@ -1,10 +1,11 @@ - + + video diff --git a/addon/core/__init__.py b/addon/core/__init__.py new file mode 100644 index 0000000..abb5217 --- /dev/null +++ b/addon/core/__init__.py @@ -0,0 +1,2 @@ +from __future__ import annotations +# ViewIT core package diff --git a/addon/core/gui.py b/addon/core/gui.py new file mode 100644 index 0000000..0ba0019 --- /dev/null +++ b/addon/core/gui.py @@ -0,0 +1,341 @@ +from __future__ import annotations +import sys +import re +import contextlib +from urllib.parse import urlencode +from typing import Any, Generator, Optional, Callable +from contextlib import contextmanager + +try: + import xbmc + import xbmcaddon + import xbmcgui + import xbmcplugin +except ImportError: + xbmc = None + xbmcaddon = None + xbmcgui = None + xbmcplugin = None + +_ADDON_INSTANCE = None + +def get_addon(): + global _ADDON_INSTANCE + if xbmcaddon is None: + return None + if _ADDON_INSTANCE is None: + _ADDON_INSTANCE = xbmcaddon.Addon() + return _ADDON_INSTANCE + +def get_handle() -> int: + return int(sys.argv[1]) if len(sys.argv) > 1 else -1 + +def get_setting_string(setting_id: str) -> str: + addon = get_addon() + if addon is None: + return "" + getter = getattr(addon, "getSettingString", None) + if callable(getter): + try: + return str(getter(setting_id) or "") + except Exception: + pass + getter = getattr(addon, "getSetting", None) + if callable(getter): + try: + return str(getter(setting_id) or "") + except Exception: + pass + return "" + +def get_setting_bool(setting_id: str, *, default: bool = False) -> bool: + addon = get_addon() + if addon is None: + return default + # Schritt 1: Prüfe ob das Setting überhaupt gesetzt ist (leerer Rohwert = default) + raw_getter = getattr(addon, "getSetting", None) + if callable(raw_getter): + try: + raw = str(raw_getter(setting_id) or "").strip() + if not raw: + return default + except Exception: + return default + # Schritt 2: Bevorzuge getSettingBool für korrekte Typ-Konvertierung + getter = getattr(addon, "getSettingBool", None) + if callable(getter): + try: + return bool(getter(setting_id)) + except Exception: + pass + # Schritt 3: Fallback – Rohwert manuell parsen + if callable(raw_getter): + try: + raw = str(raw_getter(setting_id) or "").strip().lower() + return raw == "true" + except Exception: + pass + return default + +def get_setting_int(setting_id: str, *, default: int = 0) -> int: + addon = get_addon() + if addon is None: + return default + getter = getattr(addon, "getSettingInt", None) + if callable(getter): + try: + raw_getter = getattr(addon, "getSetting", None) + if callable(raw_getter): + raw = str(raw_getter(setting_id) or "").strip() + if not raw: + return default + return int(getter(setting_id)) + except Exception: + pass + getter = getattr(addon, "getSetting", None) + if callable(getter): + try: + raw = str(getter(setting_id) or "").strip() + return int(raw) if raw else default + except Exception: + pass + return default + +def set_setting_string(setting_id: str, value: str) -> None: + addon = get_addon() + if addon is None: + return + setter = getattr(addon, "setSettingString", None) + if callable(setter): + try: + setter(setting_id, str(value)) + return + except Exception: + pass + setter = getattr(addon, "setSetting", None) + if callable(setter): + try: + setter(setting_id, str(value)) + except Exception: + pass + +@contextmanager +def progress_dialog(heading: str, message: str = ""): + """Zeigt einen Fortschrittsdialog in Kodi und liefert eine Update-Funktion.""" + dialog = None + try: + if xbmcgui is not None and hasattr(xbmcgui, "DialogProgress"): + dialog = xbmcgui.DialogProgress() + dialog.create(heading, message) + except Exception: + dialog = None + + def _update_fn(percent: int, msg: str = "") -> bool: + if dialog: + try: + dialog.update(percent, msg or message) + return dialog.iscanceled() + except Exception: + pass + return False + + try: + yield _update_fn + finally: + if dialog: + try: + dialog.close() + except Exception: + pass + +@contextmanager +def busy_dialog(message: str = "Bitte warten...", *, heading: str = "Bitte warten"): + """Progress-Dialog statt Spinner, mit kurzem Status-Text.""" + with progress_dialog(heading, message) as progress: + progress(10, message) + def _update(step_message: str, percent: int | None = None) -> bool: + pct = 50 if percent is None else max(5, min(95, int(percent))) + return progress(pct, step_message or message) + try: + yield _update + finally: + progress(100, "Fertig") + +def run_with_progress(heading: str, message: str, loader: Callable[[], Any]) -> Any: + """Fuehrt eine Ladefunktion mit sichtbarem Fortschrittsdialog aus.""" + with progress_dialog(heading, message) as progress: + progress(10, message) + result = loader() + progress(100, "Fertig") + return result + +def set_content(handle: int, content: str) -> None: + """Hint Kodi about the content type so skins can show watched/resume overlays.""" + content = (content or "").strip() + if not content: + return + try: + setter = getattr(xbmcplugin, "setContent", None) + if callable(setter): + setter(handle, content) + except Exception: + pass + +def add_directory_item( + handle: int, + label: str, + action: str, + params: dict[str, str] | None = None, + *, + is_folder: bool = True, + info_labels: dict[str, Any] | None = None, + art: dict[str, str] | None = None, + cast: Any = None, + base_url: str = "", +) -> None: + """Fuegt einen Eintrag in die Kodi-Liste ein.""" + query: dict[str, str] = {"action": action} + if params: + query.update(params) + url = f"{base_url}?{urlencode(query)}" + item = xbmcgui.ListItem(label=label) + if not is_folder: + try: + item.setProperty("IsPlayable", "true") + except Exception: + pass + apply_video_info(item, info_labels, cast) + if art: + setter = getattr(item, "setArt", None) + if callable(setter): + try: + setter(art) + except Exception: + pass + xbmcplugin.addDirectoryItem(handle=handle, url=url, listitem=item, isFolder=is_folder) + +def apply_video_info(item, info_labels: dict[str, Any] | None, cast: Any = None) -> None: + """Setzt Metadaten via InfoTagVideo (Kodi v20+), mit Fallback.""" + if not info_labels and not cast: + return + info_labels = dict(info_labels or {}) + get_tag = getattr(item, "getVideoInfoTag", None) + tag = None + if callable(get_tag): + try: + tag = get_tag() + except Exception: + tag = None + + if tag is not None: + try: + _apply_tag_info(tag, info_labels) + if cast: + _apply_tag_cast(tag, cast) + except Exception: + pass + else: + # Fallback für ältere Kodi-Versionen + setter = getattr(item, "setInfo", None) + if callable(setter): + try: + setter("video", info_labels) + except Exception: + pass + if cast: + setter = getattr(item, "setCast", None) + if callable(setter): + try: + setter(cast) + except Exception: + pass + +def _apply_tag_info(tag, info: dict[str, Any]) -> None: + for key, method in [ + ("title", "setTitle"), + ("plot", "setPlot"), + ("mediatype", "setMediaType"), + ("tvshowtitle", "setTvShowTitle"), + ]: + val = info.get(key) + if val: + setter = getattr(tag, method, None) + if callable(setter): setter(str(val)) + + for key, method in [("season", "setSeason"), ("episode", "setEpisode")]: + val = info.get(key) + if val not in (None, "", 0, "0"): + setter = getattr(tag, method, None) + if callable(setter): setter(int(val)) + + rating = info.get("rating") + if rating not in (None, "", 0, "0"): + set_rating = getattr(tag, "setRating", None) + if callable(set_rating): + try: set_rating(float(rating)) + except Exception: pass + +def _apply_tag_cast(tag, cast) -> None: + setter = getattr(tag, "setCast", None) + if not callable(setter): + return + try: + formatted_cast = [] + for c in cast: + # Erwarte TmdbCastMember oder ähnliches Objekt/Dict + name = getattr(c, "name", "") or c.get("name", "") if hasattr(c, "get") else "" + role = getattr(c, "role", "") or c.get("role", "") if hasattr(c, "get") else "" + thumb = getattr(c, "thumbnail", "") or c.get("thumbnail", "") if hasattr(c, "get") else "" + if name: + formatted_cast.append(xbmcgui.Actor(name=name, role=role, thumbnail=thumb)) + if formatted_cast: + setter(formatted_cast) + except Exception: + pass + +def label_with_duration(label: str, info_labels: dict[str, Any]) -> str: + duration = info_labels.get("duration") + if not duration: + return label + try: + minutes = int(duration) // 60 + if minutes > 0: + return f"{label} ({minutes} Min.)" + except Exception: + pass + return label + + +def extract_first_int(value: str | int | None) -> Optional[int]: + if value is None: + return None + if isinstance(value, int): + return value + match = re.search(r"\d+", str(value)) + return int(match.group()) if match else None + + +def looks_like_unresolved_hoster_link(url: str) -> bool: + url = (url or "").strip() + return any(p in url.casefold() for p in ["hoster", "link", "resolve"]) + + +def is_resolveurl_missing_error(err: str | None) -> bool: + err = str(err or "").strip().lower() + return "resolveurl" in err and ("missing" in err or "not found" in err) + + +def is_cloudflare_challenge_error(err: str | None) -> bool: + err = str(err or "").strip().lower() + return "cloudflare" in err or "challenge" in err + + +def resolveurl_last_error() -> str: + try: + from resolveurl_backend import get_last_error # type: ignore + except Exception: + return "" + try: + return str(get_last_error() or "") + except Exception: + return "" diff --git a/addon/core/metadata.py b/addon/core/metadata.py new file mode 100644 index 0000000..f246df5 --- /dev/null +++ b/addon/core/metadata.py @@ -0,0 +1,448 @@ +from __future__ import annotations +import asyncio +import os +import re +import threading +from datetime import datetime +from typing import Any, Callable, Dict, List, Optional, Tuple + +from tmdb import ( + TmdbCastMember, + fetch_tv_episode_credits, + lookup_movie, + lookup_tv_season, + lookup_tv_season_summary, + lookup_tv_show, +) + +try: + import xbmc + import xbmcaddon + import xbmcvfs +except ImportError: + xbmc = None + xbmcaddon = None + xbmcvfs = None + +# Caches +_TMDB_CACHE: dict[str, tuple[dict[str, str], dict[str, str]]] = {} +_TMDB_CAST_CACHE: dict[str, list[TmdbCastMember]] = {} +_TMDB_SEASON_CACHE: dict[tuple[int, int, str, str], dict[int, tuple[dict[str, str], dict[str, str]]]] = {} +_TMDB_SEASON_SUMMARY_CACHE: dict[tuple[int, int, str, str], tuple[dict[str, str], dict[str, str]]] = {} +_TMDB_EPISODE_CAST_CACHE: dict[tuple[int, int, int, str], list[TmdbCastMember]] = {} +_TMDB_ID_CACHE: dict[str, int] = {} +_TMDB_LOG_PATH: str | None = None +_TMDB_LOCK = threading.RLock() + + +# Dependency Injection variables +_initialized: bool = False +_get_setting_string: Callable[[str], str] = lambda k: "" +_get_setting_bool: Callable[[str, bool], bool] = lambda k, default=False: default +_get_setting_int: Callable[[str, int], int] = lambda k, default=0: default +_log: Callable[[str, int], None] = lambda msg, level=0: None +_run_async: Callable[[Any], Any] = lambda coro: None +_extract_first_int: Callable[[str], Optional[int]] = lambda val: None + + +def _require_init() -> None: + """Gibt eine Warnung aus, wenn metadata.init() noch nicht aufgerufen wurde.""" + if not _initialized: + import sys + print("[ViewIT/metadata] WARNUNG: metadata.init() wurde nicht aufgerufen – Metadaten-Funktionen arbeiten mit Standardwerten!", file=sys.stderr) + + +def init( + *, + get_setting_string: Callable[[str], str], + get_setting_bool: Callable[..., bool], + get_setting_int: Callable[..., int], + log_fn: Callable[[str, int], None], + run_async_fn: Callable[[Any], Any], + extract_first_int_fn: Callable[[str], Optional[int]], +) -> None: + global _initialized, _get_setting_string, _get_setting_bool, _get_setting_int, _log, _run_async, _extract_first_int + _get_setting_string = get_setting_string + _get_setting_bool = get_setting_bool + _get_setting_int = get_setting_int + _log = log_fn + _run_async = run_async_fn + _extract_first_int = extract_first_int_fn + _initialized = True + + +def _get_log_path(filename: str) -> str: + if xbmcaddon and xbmcvfs: + addon = xbmcaddon.Addon() + profile = xbmcvfs.translatePath(addon.getAddonInfo("profile")) + log_dir = os.path.join(profile, "logs") + if not xbmcvfs.exists(log_dir): + xbmcvfs.mkdirs(log_dir) + return os.path.join(log_dir, filename) + return os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), filename) + + +def tmdb_file_log(message: str) -> None: + global _TMDB_LOG_PATH + if _TMDB_LOG_PATH is None: + _TMDB_LOG_PATH = _get_log_path("tmdb.log") + timestamp = datetime.utcnow().isoformat(timespec="seconds") + "Z" + line = f"{timestamp}\t{message}\n" + try: + with open(_TMDB_LOG_PATH, "a", encoding="utf-8") as handle: + handle.write(line) + except Exception: + if xbmcvfs is None: + return + try: + handle = xbmcvfs.File(_TMDB_LOG_PATH, "a") # type: ignore + handle.write(line) # type: ignore + handle.close() # type: ignore + except Exception: + return + + +def tmdb_cache_get(cache: dict, key, default=None): + with _TMDB_LOCK: + return cache.get(key, default) + + +def tmdb_cache_set(cache: dict, key, value) -> None: + with _TMDB_LOCK: + cache[key] = value + + +def tmdb_prefetch_concurrency() -> int: + try: + raw = _get_setting_string("tmdb_prefetch_concurrency").strip() + value = int(raw) if raw else 6 + except Exception: + value = 6 + return max(1, min(20, value)) + + +def tmdb_enabled() -> bool: + _require_init() + return _get_setting_bool("tmdb_enabled", default=True) + + +def tmdb_list_enabled() -> bool: + return tmdb_enabled() and _get_setting_bool("tmdb_genre_metadata", default=False) + + +def tmdb_labels_and_art(title: str) -> tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]: + if not tmdb_enabled(): + return {}, {}, [] + title_key = (title or "").strip().casefold() + language = _get_setting_string("tmdb_language").strip() or "de-DE" + show_plot = _get_setting_bool("tmdb_show_plot", default=True) + show_art = _get_setting_bool("tmdb_show_art", default=True) + show_fanart = _get_setting_bool("tmdb_show_fanart", default=True) + show_rating = _get_setting_bool("tmdb_show_rating", default=True) + show_votes = _get_setting_bool("tmdb_show_votes", default=False) + show_cast = _get_setting_bool("tmdb_show_cast", default=False) + flags = f"p{int(show_plot)}a{int(show_art)}f{int(show_fanart)}r{int(show_rating)}v{int(show_votes)}c{int(show_cast)}" + cache_key = f"{language}|{flags}|{title_key}" + cached = tmdb_cache_get(_TMDB_CACHE, cache_key) + if cached is not None: + info, art = cached + cast_cached = tmdb_cache_get(_TMDB_CAST_CACHE, cache_key, []) + return info, art, list(cast_cached) + + info_labels: dict[str, str] = {"title": title} + art: dict[str, str] = {} + cast: list[TmdbCastMember] = [] + query = (title or "").strip() + api_key = _get_setting_string("tmdb_api_key").strip() + log_requests = _get_setting_bool("tmdb_log_requests", default=False) + log_responses = _get_setting_bool("tmdb_log_responses", default=False) + if api_key: + try: + log_fn = tmdb_file_log if (log_requests or log_responses) else None + candidates: list[str] = [] + if query: + candidates.append(query) + simplified = re.sub(r"\s*[-–]\s*der\s+film\s*$", "", query, flags=re.IGNORECASE).strip() + if simplified and simplified not in candidates: + candidates.append(simplified) + + meta = None + is_tv = False + for candidate in candidates: + meta = lookup_tv_show( + title=candidate, + api_key=api_key, + language=language, + log=log_fn, + log_responses=log_responses, + include_cast=show_cast, + ) + if meta: + is_tv = True + break + if not meta: + for candidate in candidates: + movie = lookup_movie( + title=candidate, + api_key=api_key, + language=language, + log=log_fn, + log_responses=log_responses, + include_cast=show_cast, + ) + if movie: + meta = movie + break + except Exception as exc: + try: + tmdb_file_log(f"TMDB ERROR lookup_failed title={title!r} error={exc!r}") + except Exception: + pass + _log(f"TMDB Meta fehlgeschlagen: {exc}", 1) # LOGWARNING/LOGDEBUG fallback + meta = None + if meta: + if is_tv: + tmdb_cache_set(_TMDB_ID_CACHE, title_key, int(getattr(meta, "tmdb_id", 0) or 0)) + info_labels.setdefault("mediatype", "tvshow") + else: + info_labels.setdefault("mediatype", "movie") + if show_plot and getattr(meta, "plot", ""): + info_labels["plot"] = getattr(meta, "plot", "") + runtime_minutes = int(getattr(meta, "runtime_minutes", 0) or 0) + if runtime_minutes > 0 and not is_tv: + info_labels["duration"] = str(runtime_minutes * 60) + rating = getattr(meta, "rating", 0.0) or 0.0 + votes = getattr(meta, "votes", 0) or 0 + if show_rating and rating: + info_labels["rating"] = str(rating) + if show_votes and votes: + info_labels["votes"] = str(votes) + if show_art and getattr(meta, "poster", ""): + poster = getattr(meta, "poster", "") + art.update({"thumb": poster, "poster": poster, "icon": poster}) + if show_fanart and getattr(meta, "fanart", ""): + fanart = getattr(meta, "fanart", "") + if fanart: + art.update({"fanart": fanart, "landscape": fanart}) + if show_cast: + cast = list(getattr(meta, "cast", []) or []) + elif log_requests or log_responses: + tmdb_file_log(f"TMDB MISS title={title!r}") + + tmdb_cache_set(_TMDB_CACHE, cache_key, (info_labels, art)) + tmdb_cache_set(_TMDB_CAST_CACHE, cache_key, list(cast)) + return info_labels, art, list(cast) + + +async def _tmdb_labels_and_art_bulk_async( + titles: list[str], +) -> dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]]: + titles = [str(t).strip() for t in (titles or []) if t and str(t).strip()] + if not titles: + return {} + + unique_titles: list[str] = list(dict.fromkeys(titles)) + limit = tmdb_prefetch_concurrency() + semaphore = asyncio.Semaphore(limit) + + async def fetch_one(title: str): + async with semaphore: + return title, await asyncio.to_thread(tmdb_labels_and_art, title) + + tasks = [fetch_one(title) for title in unique_titles] + results = await asyncio.gather(*tasks, return_exceptions=True) + mapped: dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]] = {} + for entry in results: + if isinstance(entry, Exception): + continue + try: + title, payload = entry + except Exception: + continue + if isinstance(title, str) and isinstance(payload, tuple) and len(payload) == 3: + mapped[title] = payload # type: ignore[assignment] + return mapped + + +def tmdb_labels_and_art_bulk( + titles: list[str], +) -> dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]]: + if not tmdb_enabled(): + return {} + return _run_async(_tmdb_labels_and_art_bulk_async(titles)) + + +def tmdb_episode_labels_and_art(*, title: str, season_label: str, episode_label: str) -> tuple[dict[str, str], dict[str, str]]: + if not tmdb_enabled(): + return {"title": episode_label}, {} + title_key = (title or "").strip().casefold() + tmdb_id = tmdb_cache_get(_TMDB_ID_CACHE, title_key) + if not tmdb_id: + tmdb_labels_and_art(title) + tmdb_id = tmdb_cache_get(_TMDB_ID_CACHE, title_key) + if not tmdb_id: + return {"title": episode_label}, {} + + season_number = _extract_first_int(season_label) + episode_number = _extract_first_int(episode_label) + if season_number is None or episode_number is None: + return {"title": episode_label}, {} + + language = _get_setting_string("tmdb_language").strip() or "de-DE" + show_plot = _get_setting_bool("tmdb_show_plot", default=True) + show_art = _get_setting_bool("tmdb_show_art", default=True) + flags = f"p{int(show_plot)}a{int(show_art)}" + season_key = (tmdb_id, season_number, language, flags) + cached_season = tmdb_cache_get(_TMDB_SEASON_CACHE, season_key) + if cached_season is None: + api_key = _get_setting_string("tmdb_api_key").strip() + if not api_key: + return {"title": episode_label}, {} + log_requests = _get_setting_bool("tmdb_log_requests", default=False) + log_responses = _get_setting_bool("tmdb_log_responses", default=False) + log_fn = tmdb_file_log if (log_requests or log_responses) else None + try: + season_meta = lookup_tv_season( + tmdb_id=tmdb_id, + season_number=season_number, + api_key=api_key, + language=language, + log=log_fn, + log_responses=log_responses, + ) + except Exception as exc: + if log_fn: + log_fn(f"TMDB ERROR season_lookup_failed tmdb_id={tmdb_id} season={season_number} error={exc!r}") + season_meta = None + mapped: dict[int, tuple[dict[str, str], dict[str, str]]] = {} + if season_meta: + for ep_no, ep in season_meta.items(): + info: dict[str, str] = {"title": f"Episode {ep_no}"} + if show_plot and ep.plot: + info["plot"] = ep.plot + if getattr(ep, "runtime_minutes", 0): + info["duration"] = str(int(getattr(ep, "runtime_minutes", 0)) * 60) + art: dict[str, str] = {} + if show_art and ep.thumb: + art = {"thumb": ep.thumb} + mapped[ep_no] = (info, art) + tmdb_cache_set(_TMDB_SEASON_CACHE, season_key, mapped) + cached_season = mapped + + return cached_season.get(episode_number, ({"title": episode_label}, {})) + + +def tmdb_episode_cast(*, title: str, season_label: str, episode_label: str) -> list[TmdbCastMember]: + if not tmdb_enabled(): + return [] + show_episode_cast = _get_setting_bool("tmdb_show_episode_cast", default=False) + if not show_episode_cast: + return [] + + title_key = (title or "").strip().casefold() + tmdb_id = tmdb_cache_get(_TMDB_ID_CACHE, title_key) + if not tmdb_id: + tmdb_labels_and_art(title) + tmdb_id = tmdb_cache_get(_TMDB_ID_CACHE, title_key) + if not tmdb_id: + return [] + + season_number = _extract_first_int(season_label) + episode_number = _extract_first_int(episode_label) + if season_number is None or episode_number is None: + return [] + + language = _get_setting_string("tmdb_language").strip() or "de-DE" + cache_key = (tmdb_id, season_number, episode_number, language) + cached = tmdb_cache_get(_TMDB_EPISODE_CAST_CACHE, cache_key) + if cached is not None: + return list(cached) + + api_key = _get_setting_string("tmdb_api_key").strip() + if not api_key: + tmdb_cache_set(_TMDB_EPISODE_CAST_CACHE, cache_key, []) + return [] + + log_requests = _get_setting_bool("tmdb_log_requests", default=False) + log_responses = _get_setting_bool("tmdb_log_responses", default=False) + log_fn = tmdb_file_log if (log_requests or log_responses) else None + try: + cast = fetch_tv_episode_credits( + tmdb_id=tmdb_id, + season_number=season_number, + episode_number=episode_number, + api_key=api_key, + language=language, + log=log_fn, + log_responses=log_responses, + ) + except Exception as exc: + if log_fn: + log_fn( + f"TMDB ERROR episode_credits_failed tmdb_id={tmdb_id} season={season_number} episode={episode_number} error={exc!r}" + ) + cast = [] + tmdb_cache_set(_TMDB_EPISODE_CAST_CACHE, cache_key, list(cast)) + return list(cast) + + +def tmdb_season_labels_and_art( + *, + title: str, + season: str, + title_info_labels: dict[str, str] | None = None, +) -> tuple[dict[str, str], dict[str, str]]: + if not tmdb_enabled(): + return {"title": season}, {} + language = _get_setting_string("tmdb_language").strip() or "de-DE" + show_plot = _get_setting_bool("tmdb_show_plot", default=True) + show_art = _get_setting_bool("tmdb_show_art", default=True) + flags = f"p{int(show_plot)}a{int(show_art)}" + api_key = _get_setting_string("tmdb_api_key").strip() + log_requests = _get_setting_bool("tmdb_log_requests", default=False) + log_responses = _get_setting_bool("tmdb_log_responses", default=False) + log_fn = tmdb_file_log if (log_requests or log_responses) else None + + info_labels: dict[str, str] | None = None + art: dict[str, str] | None = None + season_number = _extract_first_int(season) + + if api_key and season_number is not None: + title_key = (title or "").strip().casefold() + tmdb_id = tmdb_cache_get(_TMDB_ID_CACHE, title_key) or 0 + cache_key = (tmdb_id, season_number, language, flags) + cached = tmdb_cache_get(_TMDB_SEASON_SUMMARY_CACHE, cache_key) + + if cached is None and tmdb_id: + try: + meta = lookup_tv_season_summary( + tmdb_id=tmdb_id, + season_number=season_number, + api_key=api_key, + language=language, + log=log_fn, + log_responses=log_responses, + ) + except Exception as exc: + if log_fn: + log_fn(f"TMDB ERROR season_summary_failed tmdb_id={tmdb_id} season={season_number} error={exc!r}") + meta = None + labels = {"title": season} + art_map: dict[str, str] = {} + if meta: + if show_plot and meta.plot: + labels["plot"] = meta.plot + if show_art and meta.poster: + art_map = {"thumb": meta.poster, "poster": meta.poster} + cached = (labels, art_map) + tmdb_cache_set(_TMDB_SEASON_SUMMARY_CACHE, cache_key, cached) + + if cached is not None: + info_labels, art = cached + + merged_labels = dict(info_labels or {}) + if title_info_labels: + merged_labels = dict(title_info_labels) + merged_labels.update(dict(info_labels or {})) + + return merged_labels, art or {} diff --git a/addon/core/playstate.py b/addon/core/playstate.py new file mode 100644 index 0000000..996c789 --- /dev/null +++ b/addon/core/playstate.py @@ -0,0 +1,54 @@ +from __future__ import annotations +import threading +from typing import Any + +# Playstate-Verwaltung für den ViewIT Kodi Addon. +# Aktuell sind die meisten Funktionen Stubs, da Kodi die Wiedergabe-Stände selbst verwaltet. + +_PLAYSTATE_CACHE: dict[str, dict[str, Any]] | None = None +_PLAYSTATE_LOCK = threading.RLock() + + +def playstate_key(*, plugin_name: str, title: str, season: str, episode: str) -> str: + plugin_name = (plugin_name or "").strip() + title = (title or "").strip() + season = (season or "").strip() + episode = (episode or "").strip() + return f"{plugin_name}\t{title}\t{season}\t{episode}" + + +def load_playstate() -> dict[str, dict[str, Any]]: + return {} + + +def save_playstate(state: dict[str, dict[str, Any]]) -> None: + return + + +def get_playstate(key: str) -> dict[str, Any]: + return {} + + +def set_playstate(key: str, value: dict[str, Any]) -> None: + return + + +def apply_playstate_to_info(info_labels: dict[str, Any], playstate: dict[str, Any]) -> dict[str, Any]: + return dict(info_labels or {}) + + +def label_with_playstate(label: str, playstate: dict[str, Any]) -> str: + return label + + +def title_playstate(plugin_name: str, title: str) -> dict[str, Any]: + return get_playstate(playstate_key(plugin_name=plugin_name, title=title, season="", episode="")) + + +def season_playstate(plugin_name: str, title: str, season: str) -> dict[str, Any]: + return get_playstate(playstate_key(plugin_name=plugin_name, title=title, season=season, episode="")) + + +def track_playback_and_update_state_async(key: str) -> None: + # Eigenes Resume/Watched ist deaktiviert; Kodi verwaltet das selbst. + return diff --git a/addon/core/plugin_manager.py b/addon/core/plugin_manager.py new file mode 100644 index 0000000..922349c --- /dev/null +++ b/addon/core/plugin_manager.py @@ -0,0 +1,158 @@ +#!/usr/bin/env python3 +"""Plugin-Erkennung und -Verwaltung fuer ViewIT. + +Dieses Modul laedt dynamisch alle Plugins aus dem `plugins/` Verzeichnis, +instanziiert sie und cached die Instanzen im RAM. +""" + +from __future__ import annotations + +import importlib.util +import inspect +import sys +from pathlib import Path +from types import ModuleType + +try: # pragma: no cover - Kodi runtime + import xbmc # type: ignore[import-not-found] +except ImportError: # pragma: no cover + xbmc = None + +from plugin_interface import BasisPlugin + +PLUGIN_DIR = Path(__file__).resolve().parent.parent / "plugins" +_PLUGIN_CACHE: dict[str, BasisPlugin] | None = None + + +def _log(message: str, level: int = 1) -> None: + if xbmc is not None: + xbmc.log(f"[ViewIt] {message}", level) + + +def import_plugin_module(path: Path) -> ModuleType: + """Importiert eine einzelne Plugin-Datei als Python-Modul.""" + spec = importlib.util.spec_from_file_location(path.stem, path) + if spec is None or spec.loader is None: + raise ImportError(f"Modul-Spezifikation fuer {path.name} fehlt.") + module = importlib.util.module_from_spec(spec) + sys.modules[spec.name] = module + try: + spec.loader.exec_module(module) + except Exception: + sys.modules.pop(spec.name, None) + raise + return module + + +def discover_plugins() -> dict[str, BasisPlugin]: + """Laedt alle Plugins aus `plugins/*.py` und cached Instanzen im RAM.""" + global _PLUGIN_CACHE + if _PLUGIN_CACHE is not None: + return _PLUGIN_CACHE + + plugins: dict[str, BasisPlugin] = {} + if not PLUGIN_DIR.exists(): + _PLUGIN_CACHE = plugins + return plugins + + for file_path in sorted(PLUGIN_DIR.glob("*.py")): + if file_path.name.startswith("_"): + continue + try: + module = import_plugin_module(file_path) + except Exception as exc: + _log(f"Plugin-Datei {file_path.name} konnte nicht geladen werden: {exc}", 2) + continue + + preferred = getattr(module, "Plugin", None) + if inspect.isclass(preferred) and issubclass(preferred, BasisPlugin) and preferred is not BasisPlugin: + plugin_classes = [preferred] + else: + plugin_classes = [ + obj + for obj in module.__dict__.values() + if inspect.isclass(obj) and issubclass(obj, BasisPlugin) and obj is not BasisPlugin + ] + plugin_classes.sort(key=lambda cls: cls.__name__.casefold()) + + for cls in plugin_classes: + try: + instance = cls() + except Exception as exc: + _log(f"Plugin {cls.__name__} konnte nicht geladen werden: {exc}", 2) + continue + if getattr(instance, "is_available", True) is False: + reason = getattr(instance, "unavailable_reason", "Nicht verfuegbar.") + _log(f"Plugin {cls.__name__} deaktiviert: {reason}", 2) + continue + plugin_name = str(getattr(instance, "name", "") or "").strip() + if not plugin_name: + _log( + f"Plugin {cls.__name__} wurde ohne Name registriert und wird uebersprungen.", + 2, + ) + continue + if plugin_name in plugins: + _log( + f"Plugin-Name doppelt ({plugin_name}), {cls.__name__} wird uebersprungen.", + 2, + ) + continue + plugins[plugin_name] = instance + + plugins = dict(sorted(plugins.items(), key=lambda item: item[0].casefold())) + _PLUGIN_CACHE = plugins + return plugins + + +def plugin_has_capability(plugin: BasisPlugin, capability: str) -> bool: + """Prueft ob ein Plugin eine bestimmte Faehigkeit hat.""" + getter = getattr(plugin, "capabilities", None) + if callable(getter): + try: + capabilities = getter() + except Exception: + capabilities = set() + try: + return capability in set(capabilities or []) + except Exception: + return False + # Backwards compatibility: Popular via POPULAR_GENRE_LABEL constant. + if capability == "popular_series": + return _popular_genre_label(plugin) is not None + return False + + +def _popular_genre_label(plugin: BasisPlugin) -> str | None: + label = getattr(plugin, "POPULAR_GENRE_LABEL", None) + if isinstance(label, str) and label.strip(): + return label.strip() + return None + + +def popular_genre_label(plugin: BasisPlugin) -> str | None: + """Gibt das POPULAR_GENRE_LABEL des Plugins zurueck, falls vorhanden.""" + return _popular_genre_label(plugin) + + +def plugins_with_popular() -> list[tuple[str, BasisPlugin, str]]: + """Liefert alle Plugins die 'popular_series' unterstuetzen.""" + results: list[tuple[str, BasisPlugin, str]] = [] + for plugin_name, plugin in discover_plugins().items(): + if not plugin_has_capability(plugin, "popular_series"): + continue + label = _popular_genre_label(plugin) or "" + results.append((plugin_name, plugin, label)) + return results + + +def series_url_params(plugin: BasisPlugin, title: str) -> dict[str, str]: + """Liefert series_url Parameter fuer Kodi-Navigation, falls vom Plugin bereitgestellt.""" + getter = getattr(plugin, "series_url_for_title", None) + if not callable(getter): + return {} + try: + series_url = str(getter(title) or "").strip() + except Exception: + return {} + return {"series_url": series_url} if series_url else {} diff --git a/addon/core/router.py b/addon/core/router.py new file mode 100644 index 0000000..f625634 --- /dev/null +++ b/addon/core/router.py @@ -0,0 +1,58 @@ +from __future__ import annotations +import sys +from typing import Any, Callable, Dict, Optional +from urllib.parse import parse_qs + + +class Router: + """A simple router for Kodi add-ons.""" + + def __init__(self) -> None: + self._routes: Dict[str, Callable[[Dict[str, str]], Any]] = {} + self._fallback: Optional[Callable[[Dict[str, str]], Any]] = None + + def route(self, action: str) -> Callable[[Callable[[Dict[str, str]], Any]], Callable[[Dict[str, str]], Any]]: + """Decorator to register a function for a specific action.""" + def decorator(handler: Callable[[Dict[str, str]], Any]) -> Callable[[Dict[str, str]], Any]: + self._routes[action] = handler + return handler + return decorator + + def fallback(self) -> Callable[[Callable[[Dict[str, str]], Any]], Callable[[Dict[str, str]], Any]]: + """Decorator to register the fallback (default) handler.""" + def decorator(handler: Callable[[Dict[str, str]], Any]) -> Callable[[Dict[str, str]], Any]: + self._fallback = handler + return handler + return decorator + + def dispatch(self, action: Optional[str] = None, params: Optional[Dict[str, str]] = None) -> Any: + """Dispatch the request to the registered handler.""" + if params is None: + params = {} + + handler = self._routes.get(action) if action else self._fallback + if not handler: + handler = self._fallback + + if handler: + return handler(params) + + raise KeyError(f"No route or fallback defined for action: {action}") + + +def parse_params(argv: Optional[list[str]] = None) -> dict[str, str]: + """Parst Kodi-Plugin-Parameter aus `sys.argv[2]` oder der übergebenen Liste.""" + if argv is None: + argv = sys.argv + if len(argv) <= 2 or not argv[2]: + return {} + raw_params = parse_qs(argv[2].lstrip("?"), keep_blank_values=True) + return {key: values[0] for key, values in raw_params.items()} + + +def parse_positive_int(value: str, *, default: int = 1) -> int: + try: + parsed = int(value) + return parsed if parsed > 0 else default + except (ValueError, TypeError): + return default diff --git a/addon/core/trakt.py b/addon/core/trakt.py new file mode 100644 index 0000000..aeccf39 --- /dev/null +++ b/addon/core/trakt.py @@ -0,0 +1,439 @@ +"""Trakt.tv API-Integration fuer ViewIT. + +Bietet OAuth-Device-Auth, Scrobbling, Watchlist, History und Calendar. +""" + +from __future__ import annotations + +import json +import time +from dataclasses import dataclass, field +from typing import Any, Callable, Dict, List, Optional +from urllib.parse import urlencode + +try: + import requests +except ImportError: + requests = None + +TRAKT_API_BASE = "https://api.trakt.tv" +TRAKT_API_VERSION = "2" + + +# --------------------------------------------------------------------------- +# Dataclasses +# --------------------------------------------------------------------------- + +@dataclass +class TraktToken: + access_token: str + refresh_token: str + expires_at: int # Unix-Timestamp + created_at: int + + +@dataclass(frozen=True) +class TraktDeviceCode: + device_code: str + user_code: str + verification_url: str + expires_in: int + interval: int + + +@dataclass(frozen=True) +class TraktMediaIds: + trakt: int = 0 + tmdb: int = 0 + imdb: str = "" + slug: str = "" + tvdb: int = 0 + + +@dataclass(frozen=True) +class TraktItem: + title: str + year: int + media_type: str # "movie" oder "show" + ids: TraktMediaIds = field(default_factory=TraktMediaIds) + season: int = 0 + episode: int = 0 + watched_at: str = "" + poster: str = "" + + +@dataclass(frozen=True) +class TraktCalendarItem: + """Ein Eintrag aus dem Trakt-Kalender (anstehende Episode).""" + show_title: str + show_year: int + show_ids: TraktMediaIds + season: int + episode: int + episode_title: str + first_aired: str # ISO-8601, z.B. "2026-03-02T02:00:00.000Z" + + +# --------------------------------------------------------------------------- +# Client +# --------------------------------------------------------------------------- + +class TraktClient: + """Trakt API Client.""" + + def __init__( + self, + client_id: str, + client_secret: str, + *, + log: Callable[[str], None] | None = None, + ) -> None: + self._client_id = client_id + self._client_secret = client_secret + self._log = log + + def _headers(self, token: str = "") -> dict[str, str]: + h = { + "Content-Type": "application/json", + "trakt-api-version": TRAKT_API_VERSION, + "trakt-api-key": self._client_id, + } + if token: + h["Authorization"] = f"Bearer {token}" + return h + + def _do_log(self, msg: str) -> None: + if callable(self._log): + self._log(f"[Trakt] {msg}") + + def _post(self, path: str, body: dict, *, token: str = "", timeout: int = 15) -> tuple[int, dict | None]: + if requests is None: + return 0, None + url = f"{TRAKT_API_BASE}{path}" + self._do_log(f"POST {path}") + try: + resp = requests.post(url, json=body, headers=self._headers(token), timeout=timeout) + status = resp.status_code + try: + payload = resp.json() + except Exception: + payload = None + self._do_log(f"POST {path} -> {status}") + return status, payload + except Exception as exc: + self._do_log(f"POST {path} FEHLER: {exc}") + return 0, None + + def _get(self, path: str, *, token: str = "", timeout: int = 15) -> tuple[int, Any]: + if requests is None: + return 0, None + url = f"{TRAKT_API_BASE}{path}" + self._do_log(f"GET {path}") + try: + resp = requests.get(url, headers=self._headers(token), timeout=timeout) + status = resp.status_code + try: + payload = resp.json() + except Exception: + payload = None + self._do_log(f"GET {path} -> {status}") + return status, payload + except Exception as exc: + self._do_log(f"GET {path} FEHLER: {exc}") + return 0, None + + # ------------------------------------------------------------------- + # OAuth Device Flow + # ------------------------------------------------------------------- + + def device_code_request(self) -> TraktDeviceCode | None: + """POST /oauth/device/code – generiert User-Code + Verification-URL.""" + status, payload = self._post("/oauth/device/code", {"client_id": self._client_id}) + if status != 200 or not isinstance(payload, dict): + return None + return TraktDeviceCode( + device_code=payload.get("device_code", ""), + user_code=payload.get("user_code", ""), + verification_url=payload.get("verification_url", "https://trakt.tv/activate"), + expires_in=int(payload.get("expires_in", 600)), + interval=int(payload.get("interval", 5)), + ) + + def poll_device_token(self, device_code: str, *, interval: int = 5, expires_in: int = 600) -> TraktToken | None: + """Pollt POST /oauth/device/token bis autorisiert oder Timeout.""" + body = { + "code": device_code, + "client_id": self._client_id, + "client_secret": self._client_secret, + } + start = time.time() + while time.time() - start < expires_in: + status, payload = self._post("/oauth/device/token", body) + if status == 200 and isinstance(payload, dict): + return TraktToken( + access_token=payload.get("access_token", ""), + refresh_token=payload.get("refresh_token", ""), + expires_at=int(payload.get("created_at", 0)) + int(payload.get("expires_in", 0)), + created_at=int(payload.get("created_at", 0)), + ) + if status == 400: + # Pending – weiter warten + time.sleep(interval) + continue + if status in (404, 410, 418): + # Ungueltig, abgelaufen oder abgelehnt + self._do_log(f"Device-Auth abgebrochen: status={status}") + return None + if status == 429: + time.sleep(interval + 1) + continue + time.sleep(interval) + return None + + def refresh_token(self, refresh_tok: str) -> TraktToken | None: + """POST /oauth/token – Token erneuern.""" + body = { + "refresh_token": refresh_tok, + "client_id": self._client_id, + "client_secret": self._client_secret, + "redirect_uri": "urn:ietf:wg:oauth:2.0:oob", + "grant_type": "refresh_token", + } + status, payload = self._post("/oauth/token", body) + if status != 200 or not isinstance(payload, dict): + return None + return TraktToken( + access_token=payload.get("access_token", ""), + refresh_token=payload.get("refresh_token", ""), + expires_at=int(payload.get("created_at", 0)) + int(payload.get("expires_in", 0)), + created_at=int(payload.get("created_at", 0)), + ) + + # ------------------------------------------------------------------- + # Scrobble + # ------------------------------------------------------------------- + + def _build_scrobble_body( + self, + *, + media_type: str, + title: str, + tmdb_id: int, + imdb_id: str = "", + season: int = 0, + episode: int = 0, + progress: float = 0.0, + ) -> dict: + ids: dict[str, object] = {} + if tmdb_id: + ids["tmdb"] = tmdb_id + if imdb_id: + ids["imdb"] = imdb_id + + body: dict[str, object] = {"progress": round(progress, 1)} + + if media_type == "tv" and season > 0 and episode > 0: + body["show"] = {"title": title, "ids": ids} + body["episode"] = {"season": season, "number": episode} + else: + body["movie"] = {"title": title, "ids": ids} + + return body + + def scrobble_start( + self, token: str, *, media_type: str, title: str, + tmdb_id: int, imdb_id: str = "", + season: int = 0, episode: int = 0, progress: float = 0.0, + ) -> bool: + """POST /scrobble/start""" + body = self._build_scrobble_body( + media_type=media_type, title=title, tmdb_id=tmdb_id, imdb_id=imdb_id, + season=season, episode=episode, progress=progress, + ) + status, _ = self._post("/scrobble/start", body, token=token) + return status in (200, 201) + + def scrobble_pause( + self, token: str, *, media_type: str, title: str, + tmdb_id: int, imdb_id: str = "", + season: int = 0, episode: int = 0, progress: float = 50.0, + ) -> bool: + """POST /scrobble/pause""" + body = self._build_scrobble_body( + media_type=media_type, title=title, tmdb_id=tmdb_id, imdb_id=imdb_id, + season=season, episode=episode, progress=progress, + ) + status, _ = self._post("/scrobble/pause", body, token=token) + return status in (200, 201) + + def scrobble_stop( + self, token: str, *, media_type: str, title: str, + tmdb_id: int, imdb_id: str = "", + season: int = 0, episode: int = 0, progress: float = 100.0, + ) -> bool: + """POST /scrobble/stop""" + body = self._build_scrobble_body( + media_type=media_type, title=title, tmdb_id=tmdb_id, imdb_id=imdb_id, + season=season, episode=episode, progress=progress, + ) + status, _ = self._post("/scrobble/stop", body, token=token) + return status in (200, 201) + + # ------------------------------------------------------------------- + # Watchlist + # ------------------------------------------------------------------- + + def get_watchlist(self, token: str, *, media_type: str = "") -> list[TraktItem]: + """GET /users/me/watchlist[/movies|/shows]""" + path = "/users/me/watchlist" + if media_type in ("movies", "shows"): + path = f"{path}/{media_type}" + status, payload = self._get(path, token=token) + if status != 200 or not isinstance(payload, list): + return [] + return self._parse_list_items(payload) + + def add_to_watchlist( + self, token: str, *, media_type: str, tmdb_id: int, imdb_id: str = "", + ) -> bool: + """POST /sync/watchlist""" + ids: dict[str, object] = {} + if tmdb_id: + ids["tmdb"] = tmdb_id + if imdb_id: + ids["imdb"] = imdb_id + key = "movies" if media_type == "movie" else "shows" + body = {key: [{"ids": ids}]} + status, _ = self._post("/sync/watchlist", body, token=token) + return status in (200, 201) + + def remove_from_watchlist( + self, token: str, *, media_type: str, tmdb_id: int, imdb_id: str = "", + ) -> bool: + """POST /sync/watchlist/remove""" + ids: dict[str, object] = {} + if tmdb_id: + ids["tmdb"] = tmdb_id + if imdb_id: + ids["imdb"] = imdb_id + key = "movies" if media_type == "movie" else "shows" + body = {key: [{"ids": ids}]} + status, _ = self._post("/sync/watchlist/remove", body, token=token) + return status == 200 + + # ------------------------------------------------------------------- + # History + # ------------------------------------------------------------------- + + def get_history( + self, token: str, *, media_type: str = "", page: int = 1, limit: int = 20, + ) -> list[TraktItem]: + """GET /users/me/history[/movies|/shows|/episodes]""" + path = "/users/me/history" + if media_type in ("movies", "shows", "episodes"): + path = f"{path}/{media_type}" + path = f"{path}?page={page}&limit={limit}" + status, payload = self._get(path, token=token) + if status != 200 or not isinstance(payload, list): + return [] + return self._parse_history_items(payload) + + # ------------------------------------------------------------------- + # Calendar + # ------------------------------------------------------------------- + + def get_calendar(self, token: str, start_date: str = "", days: int = 7) -> list[TraktCalendarItem]: + """GET /calendars/my/shows/{start_date}/{days} + + start_date: YYYY-MM-DD (leer = heute). + Liefert anstehende Episoden der eigenen Watchlist-Serien. + """ + if not start_date: + from datetime import date + start_date = date.today().strftime("%Y-%m-%d") + path = f"/calendars/my/shows/{start_date}/{days}" + status, payload = self._get(path, token=token) + if status != 200 or not isinstance(payload, list): + return [] + items: list[TraktCalendarItem] = [] + for entry in payload: + if not isinstance(entry, dict): + continue + show = entry.get("show") or {} + ep = entry.get("episode") or {} + show_ids = self._parse_ids(show.get("ids") or {}) + items.append(TraktCalendarItem( + show_title=str(show.get("title", "") or ""), + show_year=int(show.get("year", 0) or 0), + show_ids=show_ids, + season=int(ep.get("season", 0) or 0), + episode=int(ep.get("number", 0) or 0), + episode_title=str(ep.get("title", "") or ""), + first_aired=str(entry.get("first_aired", "") or ""), + )) + return items + + # ------------------------------------------------------------------- + # Parser + # ------------------------------------------------------------------- + + @staticmethod + def _parse_ids(ids_dict: dict) -> TraktMediaIds: + return TraktMediaIds( + trakt=int(ids_dict.get("trakt", 0) or 0), + tmdb=int(ids_dict.get("tmdb", 0) or 0), + imdb=str(ids_dict.get("imdb", "") or ""), + slug=str(ids_dict.get("slug", "") or ""), + tvdb=int(ids_dict.get("tvdb", 0) or 0), + ) + + def _parse_list_items(self, items: list) -> list[TraktItem]: + result: list[TraktItem] = [] + for entry in items: + if not isinstance(entry, dict): + continue + item_type = entry.get("type", "") + media = entry.get(item_type) or entry.get("movie") or entry.get("show") or {} + if not isinstance(media, dict): + continue + ids = self._parse_ids(media.get("ids") or {}) + result.append(TraktItem( + title=str(media.get("title", "") or ""), + year=int(media.get("year", 0) or 0), + media_type=item_type, + ids=ids, + )) + return result + + def _parse_history_items(self, items: list) -> list[TraktItem]: + result: list[TraktItem] = [] + for entry in items: + if not isinstance(entry, dict): + continue + item_type = entry.get("type", "") + watched_at = str(entry.get("watched_at", "") or "") + + if item_type == "episode": + show = entry.get("show") or {} + ep = entry.get("episode") or {} + ids = self._parse_ids((show.get("ids") or {})) + result.append(TraktItem( + title=str(show.get("title", "") or ""), + year=int(show.get("year", 0) or 0), + media_type="episode", + ids=ids, + season=int(ep.get("season", 0) or 0), + episode=int(ep.get("number", 0) or 0), + watched_at=watched_at, + )) + else: + media = entry.get("movie") or entry.get("show") or {} + ids = self._parse_ids(media.get("ids") or {}) + result.append(TraktItem( + title=str(media.get("title", "") or ""), + year=int(media.get("year", 0) or 0), + media_type=item_type, + ids=ids, + watched_at=watched_at, + )) + return result diff --git a/addon/core/updater.py b/addon/core/updater.py new file mode 100644 index 0000000..e3e4af6 --- /dev/null +++ b/addon/core/updater.py @@ -0,0 +1,738 @@ +#!/usr/bin/env python3 +"""Update- und Versionsverwaltung fuer ViewIT. + +Dieses Modul kuemmert sich um: +- Update-Kanaele (Main, Nightly, Dev, Custom) +- Versions-Abfrage und -Installation aus Repositories +- Changelog-Abruf +- Repository-Quellen-Verwaltung +- ResolveURL Auto-Installation +""" + +from __future__ import annotations + +import io +import json +import os +import re +import time +import xml.etree.ElementTree as ET +import zipfile +from urllib.error import URLError +from urllib.request import Request, urlopen + +try: # pragma: no cover - Kodi runtime + import xbmc # type: ignore[import-not-found] + import xbmcaddon # type: ignore[import-not-found] + import xbmcgui # type: ignore[import-not-found] + import xbmcvfs # type: ignore[import-not-found] +except ImportError: # pragma: no cover - allow importing outside Kodi + xbmc = None + xbmcaddon = None + xbmcgui = None + xbmcvfs = None + +from plugin_helpers import show_error, show_notification + +# --------------------------------------------------------------------------- +# Konstanten +# --------------------------------------------------------------------------- + +UPDATE_CHANNEL_MAIN = 0 +UPDATE_CHANNEL_NIGHTLY = 1 +UPDATE_CHANNEL_CUSTOM = 2 +UPDATE_CHANNEL_DEV = 3 +AUTO_UPDATE_INTERVAL_SEC = 6 * 60 * 60 +UPDATE_HTTP_TIMEOUT_SEC = 8 +UPDATE_ADDON_ID = "plugin.video.viewit" +RESOLVEURL_ADDON_ID = "script.module.resolveurl" +RESOLVEURL_AUTO_INSTALL_INTERVAL_SEC = 6 * 60 * 60 + + +# --------------------------------------------------------------------------- +# Hilfsfunktionen (Settings-Zugriff) +# --------------------------------------------------------------------------- + +# Diese Callbacks werden von default.py einmal gesetzt, damit updater.py +# keine zirkulaeren Abhaengigkeiten hat. +_get_setting_string = None +_get_setting_bool = None +_get_setting_int = None +_set_setting_string = None +_get_addon = None +_log_fn = None + + +def init( + *, + get_setting_string, + get_setting_bool, + get_setting_int, + set_setting_string, + get_addon, + log_fn, +) -> None: + """Initialisiert Callbacks fuer Settings-Zugriff.""" + global _get_setting_string, _get_setting_bool, _get_setting_int + global _set_setting_string, _get_addon, _log_fn + _get_setting_string = get_setting_string + _get_setting_bool = get_setting_bool + _get_setting_int = get_setting_int + _set_setting_string = set_setting_string + _get_addon = get_addon + _log_fn = log_fn + + +def _log(message: str, level: int = 1) -> None: + if _log_fn is not None: + _log_fn(message, level) + + +# --------------------------------------------------------------------------- +# URL-Normalisierung +# --------------------------------------------------------------------------- + + +def normalize_update_info_url(raw: str) -> str: + value = str(raw or "").strip() + default = "http://127.0.0.1:8080/repo/addons.xml" + if not value: + return default + if value.endswith("/addons.xml"): + return value + return value.rstrip("/") + "/addons.xml" + + +# --------------------------------------------------------------------------- +# Update-Kanaele +# --------------------------------------------------------------------------- + + +def selected_update_channel() -> int: + channel = _get_setting_int("update_channel", default=UPDATE_CHANNEL_MAIN) + if channel not in {UPDATE_CHANNEL_MAIN, UPDATE_CHANNEL_NIGHTLY, UPDATE_CHANNEL_CUSTOM, UPDATE_CHANNEL_DEV}: + return UPDATE_CHANNEL_MAIN + return channel + + +def channel_label(channel: int) -> str: + if channel == UPDATE_CHANNEL_NIGHTLY: + return "Nightly" + if channel == UPDATE_CHANNEL_DEV: + return "Dev" + if channel == UPDATE_CHANNEL_CUSTOM: + return "Custom" + return "Main" + + +# --------------------------------------------------------------------------- +# Versionierung +# --------------------------------------------------------------------------- + + +def version_sort_key(version: str) -> tuple[int, ...]: + base = str(version or "").split("-", 1)[0] + parts = [] + for chunk in base.split("."): + try: + parts.append(int(chunk)) + except Exception: + parts.append(0) + while len(parts) < 4: + parts.append(0) + return tuple(parts[:4]) + + +def is_stable_version(version: str) -> bool: + return bool(re.match(r"^\d+\.\d+\.\d+$", str(version or "").strip())) + + +def is_nightly_version(version: str) -> bool: + return bool(re.match(r"^\d+\.\d+\.\d+-nightly$", str(version or "").strip())) + + +def is_dev_version(version: str) -> bool: + return bool(re.match(r"^\d+\.\d+\.\d+-dev$", str(version or "").strip())) + + +def filter_versions_for_channel(channel: int, versions: list[str]) -> list[str]: + if channel == UPDATE_CHANNEL_MAIN: + return [v for v in versions if is_stable_version(v)] + if channel == UPDATE_CHANNEL_NIGHTLY: + return [v for v in versions if is_nightly_version(v)] + if channel == UPDATE_CHANNEL_DEV: + return [v for v in versions if is_dev_version(v)] + return list(versions) + + +# --------------------------------------------------------------------------- +# HTTP-Helfer +# --------------------------------------------------------------------------- + + +def read_text_url(url: str, *, timeout: int = UPDATE_HTTP_TIMEOUT_SEC) -> str: + request = Request(url, headers={"User-Agent": "ViewIT/1.0"}) + response = None + try: + response = urlopen(request, timeout=timeout) + data = response.read() + finally: + if response is not None: + try: + response.close() + except Exception: + pass + return data.decode("utf-8", errors="replace") + + +def read_binary_url(url: str, *, timeout: int = UPDATE_HTTP_TIMEOUT_SEC) -> bytes: + request = Request(url, headers={"User-Agent": "ViewIT/1.0"}) + response = None + try: + response = urlopen(request, timeout=timeout) + return response.read() + finally: + if response is not None: + try: + response.close() + except Exception: + pass + + +# --------------------------------------------------------------------------- +# Repo-Abfragen +# --------------------------------------------------------------------------- + + +def extract_repo_addon_version(xml_text: str, addon_id: str = UPDATE_ADDON_ID) -> str: + try: + root = ET.fromstring(xml_text) + except Exception: + return "-" + if root.tag == "addon": + return str(root.attrib.get("version") or "-") + for node in root.findall("addon"): + if str(node.attrib.get("id") or "").strip() == addon_id: + version = str(node.attrib.get("version") or "").strip() + return version or "-" + return "-" + + +def fetch_repo_addon_version(info_url: str) -> str: + url = normalize_update_info_url(info_url) + try: + xml_text = read_text_url(url) + except URLError: + return "-" + except Exception: + return "-" + return extract_repo_addon_version(xml_text) + + +def _extract_repo_identity(info_url: str) -> tuple[str, str, str, str] | None: + from urllib.parse import urlparse + + parsed = urlparse(str(info_url or "").strip()) + parts = [part for part in parsed.path.split("/") if part] + try: + raw_idx = parts.index("raw") + except ValueError: + return None + if raw_idx < 2 or (raw_idx + 2) >= len(parts): + return None + if parts[raw_idx + 1] != "branch": + return None + owner = parts[raw_idx - 2] + repo = parts[raw_idx - 1] + branch = parts[raw_idx + 2] + scheme = parsed.scheme or "https" + host = parsed.netloc + if not owner or not repo or not branch or not host: + return None + return scheme, host, owner, repo + "|" + branch + + +def fetch_repo_versions(info_url: str) -> list[str]: + identity = _extract_repo_identity(info_url) + if identity is None: + one = fetch_repo_addon_version(info_url) + return [one] if one != "-" else [] + + scheme, host, owner, repo_branch = identity + repo, branch = repo_branch.split("|", 1) + api_url = f"{scheme}://{host}/api/v1/repos/{owner}/{repo}/contents/{UPDATE_ADDON_ID}?ref={branch}" + + try: + payload = read_text_url(api_url) + data = json.loads(payload) + except Exception: + one = fetch_repo_addon_version(info_url) + return [one] if one != "-" else [] + + versions: list[str] = [] + if isinstance(data, list): + for entry in data: + if not isinstance(entry, dict): + continue + name = str(entry.get("name") or "") + match = re.match(rf"^{re.escape(UPDATE_ADDON_ID)}-(.+)\.zip$", name) + if not match: + continue + version = match.group(1).strip() + if version: + versions.append(version) + unique = sorted(set(versions), key=version_sort_key, reverse=True) + return unique + + +# --------------------------------------------------------------------------- +# Changelog +# --------------------------------------------------------------------------- + + +def extract_changelog_section(changelog_text: str, version: str) -> str: + lines = changelog_text.splitlines() + wanted = (version or "").strip() + if not wanted: + return "\n".join(lines[:120]).strip() + + start = -1 + for idx, line in enumerate(lines): + if line.startswith("## ") and wanted in line: + start = idx + break + if start < 0: + return f"Kein Changelog-Abschnitt fuer Version {wanted} gefunden." + + end = len(lines) + for idx in range(start + 1, len(lines)): + if lines[idx].startswith("## "): + end = idx + break + return "\n".join(lines[start:end]).strip() + + +def fetch_changelog_for_channel(channel: int, version: str) -> str: + version_text = str(version or "").strip().casefold() + if version_text.endswith("-dev"): + url = "https://gitea.it-drui.de/viewit/ViewIT/raw/branch/dev/CHANGELOG-DEV.md" + elif version_text.endswith("-nightly"): + url = "https://gitea.it-drui.de/viewit/ViewIT/raw/branch/nightly/CHANGELOG-NIGHTLY.md" + elif channel == UPDATE_CHANNEL_DEV: + url = "https://gitea.it-drui.de/viewit/ViewIT/raw/branch/dev/CHANGELOG-DEV.md" + elif channel == UPDATE_CHANNEL_MAIN: + url = "https://gitea.it-drui.de/viewit/ViewIT/raw/branch/main/CHANGELOG.md" + else: + url = "https://gitea.it-drui.de/viewit/ViewIT/raw/branch/nightly/CHANGELOG-NIGHTLY.md" + try: + text = read_text_url(url) + except Exception: + return "Changelog konnte nicht geladen werden." + return extract_changelog_section(text, version) + + +# --------------------------------------------------------------------------- +# Installation +# --------------------------------------------------------------------------- + + +def install_addon_version_manual(info_url: str, version: str) -> bool: + base = info_url[: -len("/addons.xml")] if info_url.endswith("/addons.xml") else info_url.rstrip("/") + zip_url = f"{base}/{UPDATE_ADDON_ID}/{UPDATE_ADDON_ID}-{version}.zip" + try: + zip_bytes = read_binary_url(zip_url) + except Exception as exc: + _log(f"Download fehlgeschlagen ({zip_url}): {exc}", 2) + return False + + if xbmcvfs is None: + return False + + addons_root = xbmcvfs.translatePath("special://home/addons") + addons_root_real = os.path.realpath(addons_root) + try: + with zipfile.ZipFile(io.BytesIO(zip_bytes)) as archive: + for member in archive.infolist(): + name = str(member.filename or "") + if not name or name.endswith("/"): + continue + target = os.path.realpath(os.path.join(addons_root, name)) + if not target.startswith(addons_root_real + os.sep): + _log(f"Sicherheitswarnung: Verdaechtiger ZIP-Eintrag abgelehnt: {name!r}", 2) + return False + os.makedirs(os.path.dirname(target), exist_ok=True) + with archive.open(member, "r") as src, open(target, "wb") as dst: + dst.write(src.read()) + except Exception as exc: + _log(f"Entpacken fehlgeschlagen: {exc}", 2) + return False + + builtin = getattr(xbmc, "executebuiltin", None) if xbmc else None + if callable(builtin): + builtin("UpdateLocalAddons") + return True + + +def install_addon_version(info_url: str, version: str) -> bool: + base = info_url[: -len("/addons.xml")] if info_url.endswith("/addons.xml") else info_url.rstrip("/") + zip_url = f"{base}/{UPDATE_ADDON_ID}/{UPDATE_ADDON_ID}-{version}.zip" + + builtin = getattr(xbmc, "executebuiltin", None) if xbmc else None + if callable(builtin): + try: + before = installed_addon_version_from_disk() + builtin(f"InstallAddon({zip_url})") + for _ in range(20): + time.sleep(1) + current = installed_addon_version_from_disk() + if current == version: + return True + if before == version: + return True + except Exception as exc: + _log(f"InstallAddon fehlgeschlagen, fallback aktiv: {exc}", 2) + + return install_addon_version_manual(info_url, version) + + +# --------------------------------------------------------------------------- +# Installierte Version / Addon-Pruefung +# --------------------------------------------------------------------------- + + +def installed_addon_version_from_disk() -> str: + if xbmcvfs is None: + return "0.0.0" + try: + addon_xml = xbmcvfs.translatePath(f"special://home/addons/{UPDATE_ADDON_ID}/addon.xml") + except Exception: + return "0.0.0" + if not addon_xml or not os.path.exists(addon_xml): + return "0.0.0" + try: + root = ET.parse(addon_xml).getroot() + version = str(root.attrib.get("version") or "").strip() + return version or "0.0.0" + except Exception: + return "0.0.0" + + +def is_addon_installed(addon_id: str) -> bool: + addon_id = str(addon_id or "").strip() + if not addon_id: + return False + has_addon = getattr(xbmc, "getCondVisibility", None) if xbmc else None + if callable(has_addon): + try: + return bool(has_addon(f"System.HasAddon({addon_id})")) + except Exception: + pass + if xbmcvfs is None: + return False + try: + addon_xml = xbmcvfs.translatePath(f"special://home/addons/{addon_id}/addon.xml") + except Exception: + return False + return bool(addon_xml and os.path.exists(addon_xml)) + + +# --------------------------------------------------------------------------- +# Repository-Quellen-Verwaltung +# --------------------------------------------------------------------------- + + +def repo_addon_xml_path() -> str: + if xbmcvfs is None: + return "" + try: + return xbmcvfs.translatePath("special://home/addons/repository.viewit/addon.xml") + except Exception: + return "" + + +def update_repository_source(info_url: str) -> bool: + path = repo_addon_xml_path() + if not path: + return False + if not os.path.exists(path): + return False + try: + tree = ET.parse(path) + root = tree.getroot() + dir_node = root.find(".//dir") + if dir_node is None: + return False + info = dir_node.find("info") + checksum = dir_node.find("checksum") + datadir = dir_node.find("datadir") + if info is None or checksum is None or datadir is None: + return False + base = info_url[: -len("/addons.xml")] if info_url.endswith("/addons.xml") else info_url.rstrip("/") + info.text = info_url + checksum.text = f"{base}/addons.xml.md5" + datadir.text = f"{base}/" + tree.write(path, encoding="utf-8", xml_declaration=True) + return True + except Exception as exc: + _log(f"Repository-URL konnte nicht gesetzt werden: {exc}", 2) + return False + + +# --------------------------------------------------------------------------- +# ResolveURL +# --------------------------------------------------------------------------- + + +def sync_resolveurl_status_setting() -> None: + status = "Installiert" if is_addon_installed(RESOLVEURL_ADDON_ID) else "Fehlt" + _set_setting_string("resolveurl_status", status) + + +def install_kodi_addon(addon_id: str, *, wait_seconds: int) -> bool: + if is_addon_installed(addon_id): + return True + builtin = getattr(xbmc, "executebuiltin", None) if xbmc else None + if not callable(builtin): + return False + try: + builtin(f"InstallAddon({addon_id})") + builtin("UpdateLocalAddons") + except Exception as exc: + _log(f"InstallAddon fehlgeschlagen ({addon_id}): {exc}", 2) + return False + + if wait_seconds <= 0: + return is_addon_installed(addon_id) + deadline = time.time() + max(1, int(wait_seconds)) + while time.time() < deadline: + if is_addon_installed(addon_id): + return True + time.sleep(1) + return is_addon_installed(addon_id) + + +def ensure_resolveurl_installed(*, force: bool, silent: bool) -> bool: + if is_addon_installed(RESOLVEURL_ADDON_ID): + sync_resolveurl_status_setting() + return True + if not force and not _get_setting_bool("resolveurl_auto_install", default=True): + sync_resolveurl_status_setting() + return False + + now = int(time.time()) + if not force: + last_try = _get_setting_int("resolveurl_last_ts", default=0) + if last_try > 0 and (now - last_try) < RESOLVEURL_AUTO_INSTALL_INTERVAL_SEC: + return False + _set_setting_string("resolveurl_last_ts", str(now)) + + wait_seconds = 20 if force else 0 + ok = install_kodi_addon(RESOLVEURL_ADDON_ID, wait_seconds=wait_seconds) + sync_resolveurl_status_setting() + + if not silent and xbmcgui is not None: + if ok: + xbmcgui.Dialog().notification( + "ResolveURL", + "script.module.resolveurl ist installiert.", + xbmcgui.NOTIFICATION_INFO, + 4000, + ) + else: + xbmcgui.Dialog().notification( + "ResolveURL", + "Installation fehlgeschlagen. Bitte Repository/Netzwerk pruefen.", + xbmcgui.NOTIFICATION_ERROR, + 5000, + ) + return ok + + +def maybe_auto_install_resolveurl(action: str | None) -> None: + if (action or "").strip(): + return + ensure_resolveurl_installed(force=False, silent=True) + + +# --------------------------------------------------------------------------- +# Update-Kanal anwenden / Sync +# --------------------------------------------------------------------------- + + +def resolve_update_info_url() -> str: + channel = selected_update_channel() + if channel == UPDATE_CHANNEL_NIGHTLY: + raw = _get_setting_string("update_repo_url_nightly") + elif channel == UPDATE_CHANNEL_DEV: + raw = _get_setting_string("update_repo_url_dev") + elif channel == UPDATE_CHANNEL_CUSTOM: + raw = _get_setting_string("update_repo_url") + else: + raw = _get_setting_string("update_repo_url_main") + return normalize_update_info_url(raw) + + +def sync_update_channel_status_settings() -> None: + channel = selected_update_channel() + selected_info_url = resolve_update_info_url() + available_selected = fetch_repo_addon_version(selected_info_url) + _set_setting_string("update_active_channel", channel_label(channel)) + _set_setting_string("update_active_repo_url", selected_info_url) + _set_setting_string("update_available_selected", available_selected) + + +def sync_update_version_settings() -> None: + addon_version = installed_addon_version_from_disk() + if addon_version == "0.0.0": + addon = _get_addon() + if addon is not None: + try: + addon_version = str(addon.getAddonInfo("version") or "0.0.0") + except Exception: + addon_version = "0.0.0" + _set_setting_string("update_installed_version", addon_version) + sync_resolveurl_status_setting() + sync_update_channel_status_settings() + + +def apply_update_channel(*, silent: bool = False) -> bool: + if xbmc is None: # pragma: no cover - outside Kodi + return False + info_url = resolve_update_info_url() + channel = selected_update_channel() + sync_update_version_settings() + applied = update_repository_source(info_url) + installed_version = _get_setting_string("update_installed_version").strip() or "0.0.0" + versions = filter_versions_for_channel(channel, fetch_repo_versions(info_url)) + target_version = versions[0] if versions else "-" + + install_result = False + if target_version != "-" and target_version != installed_version: + install_result = install_addon_version(info_url, target_version) + elif target_version == installed_version: + install_result = True + + builtin = getattr(xbmc, "executebuiltin", None) + if callable(builtin): + builtin("UpdateAddonRepos") + builtin("UpdateLocalAddons") + if not silent: + if not applied: + warning_icon = getattr(xbmcgui, "NOTIFICATION_WARNING", xbmcgui.NOTIFICATION_INFO) + show_notification( + "Updates", + "Kanal gespeichert, aber repository.viewit nicht gefunden.", + icon=warning_icon, + milliseconds=5000, + ) + elif target_version == "-": + show_error("Updates", "Kanal angewendet, aber keine Version im Kanal gefunden.", milliseconds=5000) + elif not install_result: + show_error( + "Updates", + f"Kanal angewendet, Installation von {target_version} fehlgeschlagen.", + milliseconds=5000, + ) + elif target_version == installed_version: + show_notification( + "Updates", + f"Kanal angewendet: {channel_label(selected_update_channel())} ({target_version} bereits installiert)", + milliseconds=4500, + ) + else: + show_notification( + "Updates", + f"Kanal angewendet: {channel_label(selected_update_channel())} -> {target_version} installiert", + milliseconds=5000, + ) + sync_update_version_settings() + return applied and install_result + + +def run_update_check(*, silent: bool = False) -> None: + """Stoesst Kodi-Repo- und Addon-Updates an.""" + if xbmc is None: # pragma: no cover - outside Kodi + return + try: + apply_update_channel(silent=True) + if not silent: + builtin = getattr(xbmc, "executebuiltin", None) + if callable(builtin): + builtin("ActivateWindow(addonbrowser,addons://updates/)") + if not silent: + show_notification("Updates", "Update-Check gestartet.", milliseconds=4000) + except Exception as exc: + _log(f"Update-Pruefung fehlgeschlagen: {exc}", 2) + if not silent: + show_error("Updates", "Update-Check fehlgeschlagen.", milliseconds=4000) + + +def show_version_selector() -> None: + if xbmc is None: # pragma: no cover - outside Kodi + return + + info_url = resolve_update_info_url() + channel = selected_update_channel() + sync_update_version_settings() + + versions = filter_versions_for_channel(channel, fetch_repo_versions(info_url)) + if not versions: + show_error("Updates", "Keine Versionen im Repo gefunden.", milliseconds=4000) + return + + installed = _get_setting_string("update_installed_version").strip() or "-" + options = [] + for version in versions: + label = version + if version == installed: + label = f"{version} (installiert)" + options.append(label) + + selected = xbmcgui.Dialog().select("Version waehlen", options) + if selected < 0 or selected >= len(versions): + return + + version = versions[selected] + changelog = fetch_changelog_for_channel(channel, version) + viewer = getattr(xbmcgui.Dialog(), "textviewer", None) + if callable(viewer): + try: + viewer(f"Changelog {version}", changelog) + except Exception: + pass + + dialog = xbmcgui.Dialog() + try: + confirmed = dialog.yesno( + "Version installieren", + f"Installiert: {installed}", + f"Ausgewaehlt: {version}", + yeslabel="Installieren", + nolabel="Abbrechen", + ) + except TypeError: + confirmed = dialog.yesno("Version installieren", f"Installiert: {installed}", f"Ausgewaehlt: {version}") + if not confirmed: + return + + show_notification("Updates", f"Installation gestartet: {version}", milliseconds=2500) + ok = install_addon_version(info_url, version) + if ok: + sync_update_version_settings() + show_notification("Updates", f"Version {version} installiert.", milliseconds=4000) + else: + show_error("Updates", f"Installation von {version} fehlgeschlagen.", milliseconds=4500) + + +def maybe_run_auto_update_check(action: str | None) -> None: + action = (action or "").strip() + if action: + return + if not _get_setting_bool("auto_update_enabled", default=False): + return + now = int(time.time()) + last = _get_setting_int("auto_update_last_ts", default=0) + if last > 0 and (now - last) < AUTO_UPDATE_INTERVAL_SEC: + return + _set_setting_string("auto_update_last_ts", str(now)) + run_update_check(silent=True) diff --git a/addon/default.py b/addon/default.py index b38fc64..e5f0100 100644 --- a/addon/default.py +++ b/addon/default.py @@ -109,20 +109,25 @@ except ImportError: # pragma: no cover - allow importing outside Kodi (e.g. lin from plugin_interface import BasisPlugin from http_session_pool import close_all_sessions -from plugin_helpers import normalize_resolved_stream_url, show_error, show_notification +from plugin_helpers import normalize_resolved_stream_url from metadata_utils import ( collect_plugin_metadata as _collect_plugin_metadata, merge_metadata as _merge_metadata, metadata_policy as _metadata_policy_impl, needs_tmdb as _needs_tmdb, ) -from tmdb import TmdbCastMember, fetch_tv_episode_credits, lookup_movie, lookup_tv_season, lookup_tv_season_summary, lookup_tv_show +from tmdb import TmdbCastMember, TmdbExternalIds, fetch_external_ids, fetch_tv_episode_credits, lookup_movie, lookup_tv_season, lookup_tv_season_summary, lookup_tv_show +from core.router import Router + +_router = Router() PLUGIN_DIR = Path(__file__).with_name("plugins") _PLUGIN_CACHE: dict[str, BasisPlugin] | None = None _TMDB_CACHE: dict[str, tuple[dict[str, str], dict[str, str]]] = {} _TMDB_CAST_CACHE: dict[str, list[TmdbCastMember]] = {} _TMDB_ID_CACHE: dict[str, int] = {} +_IMDB_ID_CACHE: dict[str, str] = {} +_MEDIA_TYPE_CACHE: dict[str, str] = {} _TMDB_SEASON_CACHE: dict[tuple[int, int, str, str], dict[int, tuple[dict[str, str], dict[str, str]]]] = {} _TMDB_SEASON_SUMMARY_CACHE: dict[tuple[int, int, str, str], tuple[dict[str, str], dict[str, str]]] = {} _TMDB_EPISODE_CAST_CACHE: dict[tuple[int, int, int, str], list[TmdbCastMember]] = {} @@ -132,6 +137,17 @@ _ADDON_INSTANCE = None _PLAYSTATE_CACHE: dict[str, dict[str, object]] | None = None _PLAYSTATE_LOCK = threading.RLock() _TMDB_LOCK = threading.RLock() +_PLUGIN_CACHE_LOCK = threading.Lock() +_GENRE_TITLES_CACHE_LOCK = threading.Lock() +_TRAKT_WATCHED_CACHE: dict[str, set[tuple[int, int]]] = {} +_TRAKT_WATCHED_CACHE_TS: float = 0.0 +_TRAKT_WATCHED_CACHE_TTL: int = 300 # 5 Minuten +_TRAKT_WATCHED_CACHE_LOCK = threading.RLock() +_TRAKT_PLUGIN_MATCH_CACHE: dict[str, tuple[str, str] | None] = {} +_TRAKT_PLUGIN_MATCH_CACHE_TS: float = 0.0 +_TRAKT_PLUGIN_MATCH_CACHE_TTL: int = 300 # 5 Minuten +_TRAKT_PLUGIN_MATCH_LOCK = threading.RLock() +_CACHE_MAXSIZE = 500 WATCHED_THRESHOLD = 0.9 POPULAR_MENU_LABEL = "Haeufig gesehen" LATEST_MENU_LABEL = "Neuste Titel" @@ -148,6 +164,158 @@ def _tmdb_cache_get(cache: dict, key, default=None): def _tmdb_cache_set(cache: dict, key, value) -> None: with _TMDB_LOCK: cache[key] = value + if len(cache) > _CACHE_MAXSIZE: + # Python 3.7+: dicts sind insertion-ordered → aelteste Haelfte entfernen + excess = len(cache) - _CACHE_MAXSIZE // 2 + for k in list(cache.keys())[:excess]: + del cache[k] + + +def _fetch_and_cache_imdb_id(title_key: str, tmdb_id: int, kind: str) -> str: + """Holt die IMDb-ID via TMDB external_ids und cached sie.""" + cached = _tmdb_cache_get(_IMDB_ID_CACHE, title_key) + if cached is not None: + return cached + api_key = _get_setting_string("tmdb_api_key").strip() + if not api_key or not tmdb_id: + return "" + ext = fetch_external_ids(kind=kind, tmdb_id=tmdb_id, api_key=api_key) + imdb_id = ext.imdb_id if ext else "" + _tmdb_cache_set(_IMDB_ID_CACHE, title_key, imdb_id) + return imdb_id + + +def _set_trakt_ids_property(title: str, tmdb_id: int, imdb_id: str = "") -> None: + """Setzt script.trakt.ids als Window Property fuer script.trakt-Kompatibilitaet.""" + if not tmdb_id: + return + ids: dict[str, object] = {"tmdb": tmdb_id} + if imdb_id: + ids["imdb"] = imdb_id + try: + window = xbmcgui.Window(10000) + window.setProperty("script.trakt.ids", json.dumps(ids)) + _log(f"script.trakt.ids gesetzt: {ids}", xbmc.LOGDEBUG) + except Exception as exc: + _log(f"script.trakt.ids setzen fehlgeschlagen: {exc}", xbmc.LOGDEBUG) + + +# --------------------------------------------------------------------------- +# Trakt-Helfer +# --------------------------------------------------------------------------- + +def _trakt_load_token(): + """Laedt den gespeicherten Trakt-Token aus den Addon-Settings.""" + access = _get_setting_string("trakt_access_token").strip() + refresh = _get_setting_string("trakt_refresh_token").strip() + expires = _get_setting_string("trakt_token_expires").strip() + if not access: + return None + from core.trakt import TraktToken + return TraktToken( + access_token=access, refresh_token=refresh, + expires_at=int(expires or "0"), created_at=0, + ) + + +def _trakt_save_token(token) -> None: + """Speichert den Trakt-Token in den Addon-Settings.""" + addon = _get_addon() + addon.setSetting("trakt_access_token", token.access_token) + addon.setSetting("trakt_refresh_token", token.refresh_token) + addon.setSetting("trakt_token_expires", str(token.expires_at)) + + +def _trakt_get_client(): + """Erstellt einen TraktClient falls client_id und client_secret konfiguriert sind.""" + client_id = _get_setting_string("trakt_client_id").strip() + client_secret = _get_setting_string("trakt_client_secret").strip() + if not client_id or not client_secret: + return None + from core.trakt import TraktClient + return TraktClient(client_id, client_secret, log=lambda m: _log(m, xbmc.LOGDEBUG)) + + +def _trakt_get_valid_token() -> str: + """Gibt einen gueltigen Access-Token zurueck, refresht ggf. automatisch.""" + token = _trakt_load_token() + if not token: + return "" + if token.expires_at > 0 and time.time() > token.expires_at - 86400: + client = _trakt_get_client() + if client: + new_token = client.refresh_token(token.refresh_token) + if new_token: + _trakt_save_token(new_token) + return new_token.access_token + return token.access_token + + +def _trakt_find_in_plugins(title: str) -> tuple[str, str] | None: + """Sucht einen Trakt-Titel in allen verfuegbaren Plugins (casefold-Vergleich). + + Gibt (plugin_name, matched_title) zurueck oder None bei keinem Treffer. + Ergebnisse werden 5 Minuten gecacht (inkl. None-Misses). + """ + if not title: + return None + title_cf = title.casefold() + now = time.time() + with _TRAKT_PLUGIN_MATCH_LOCK: + if now - _TRAKT_PLUGIN_MATCH_CACHE_TS < _TRAKT_PLUGIN_MATCH_CACHE_TTL: + if title_cf in _TRAKT_PLUGIN_MATCH_CACHE: + return _TRAKT_PLUGIN_MATCH_CACHE[title_cf] + result: tuple[str, str] | None = None + for plugin_name, plugin in _discover_plugins().items(): + try: + coro = _call_plugin_search(plugin, title) + results = _run_async(coro) if inspect.iscoroutine(coro) else (coro or []) + for r in (results or []): + if str(r).strip().casefold() == title_cf: + result = (plugin_name, str(r).strip()) + break + except Exception: + pass + if result: + break + with _TRAKT_PLUGIN_MATCH_LOCK: + global _TRAKT_PLUGIN_MATCH_CACHE_TS + _TRAKT_PLUGIN_MATCH_CACHE[title_cf] = result + _TRAKT_PLUGIN_MATCH_CACHE_TS = now + return result + + +def _trakt_watched_set(title: str) -> set[tuple[int, int]]: + """Liefert die Menge der gesehenen (season, episode)-Tupel fuer einen Titel. + + Ergebnis wird _TRAKT_WATCHED_CACHE_TTL Sekunden gecacht. + Gibt ein leeres Set zurueck wenn Trakt nicht aktiviert oder kein Token vorhanden. + """ + if not _get_setting_bool("trakt_enabled", default=False): + return set() + token = _trakt_get_valid_token() + client = _trakt_get_client() + if not token or not client: + return set() + title_cf = title.casefold() + now = time.time() + with _TRAKT_WATCHED_CACHE_LOCK: + if now - _TRAKT_WATCHED_CACHE_TS < _TRAKT_WATCHED_CACHE_TTL: + if title_cf in _TRAKT_WATCHED_CACHE: + return set(_TRAKT_WATCHED_CACHE[title_cf]) # Kopie zurueckgeben + try: + history = client.get_history(token, media_type="episodes", limit=200) + except Exception: + return set() + watched: set[tuple[int, int]] = set() + for item in history: + if item.title.casefold() == title_cf: + watched.add((item.season, item.episode)) + with _TRAKT_WATCHED_CACHE_LOCK: + _TRAKT_WATCHED_CACHE[title_cf] = watched + global _TRAKT_WATCHED_CACHE_TS + _TRAKT_WATCHED_CACHE_TS = now + return set(watched) def _tmdb_prefetch_concurrency() -> int: @@ -734,11 +902,14 @@ def _tmdb_labels_and_art(title: str) -> tuple[dict[str, str], dict[str, str], li _log(f"TMDB Meta fehlgeschlagen: {exc}", xbmc.LOGDEBUG) meta = None if meta: - # Nur TV-IDs cachen (für Staffel-/Episoden-Lookups); Movie-IDs würden dort fehlschlagen. + tmdb_id = int(getattr(meta, "tmdb_id", 0) or 0) + if tmdb_id: + _tmdb_cache_set(_TMDB_ID_CACHE, title_key, tmdb_id) if is_tv: - _tmdb_cache_set(_TMDB_ID_CACHE, title_key, int(getattr(meta, "tmdb_id", 0) or 0)) + _tmdb_cache_set(_MEDIA_TYPE_CACHE, title_key, "tv") info_labels.setdefault("mediatype", "tvshow") else: + _tmdb_cache_set(_MEDIA_TYPE_CACHE, title_key, "movie") info_labels.setdefault("mediatype", "movie") if show_plot and getattr(meta, "plot", ""): info_labels["plot"] = getattr(meta, "plot", "") @@ -967,7 +1138,6 @@ def _normalize_update_info_url(raw: str) -> str: UPDATE_CHANNEL_MAIN = 0 UPDATE_CHANNEL_NIGHTLY = 1 UPDATE_CHANNEL_CUSTOM = 2 -UPDATE_CHANNEL_DEV = 3 AUTO_UPDATE_INTERVAL_SEC = 6 * 60 * 60 UPDATE_HTTP_TIMEOUT_SEC = 8 UPDATE_ADDON_ID = "plugin.video.viewit" @@ -977,7 +1147,7 @@ RESOLVEURL_AUTO_INSTALL_INTERVAL_SEC = 6 * 60 * 60 def _selected_update_channel() -> int: channel = _get_setting_int("update_channel", default=UPDATE_CHANNEL_MAIN) - if channel not in {UPDATE_CHANNEL_MAIN, UPDATE_CHANNEL_NIGHTLY, UPDATE_CHANNEL_CUSTOM, UPDATE_CHANNEL_DEV}: + if channel not in {UPDATE_CHANNEL_MAIN, UPDATE_CHANNEL_NIGHTLY, UPDATE_CHANNEL_CUSTOM}: return UPDATE_CHANNEL_MAIN return channel @@ -985,8 +1155,6 @@ def _selected_update_channel() -> int: def _channel_label(channel: int) -> str: if channel == UPDATE_CHANNEL_NIGHTLY: return "Nightly" - if channel == UPDATE_CHANNEL_DEV: - return "Dev" if channel == UPDATE_CHANNEL_CUSTOM: return "Custom" return "Main" @@ -1013,17 +1181,11 @@ def _is_nightly_version(version: str) -> bool: return bool(re.match(r"^\d+\.\d+\.\d+-nightly$", str(version or "").strip())) -def _is_dev_version(version: str) -> bool: - return bool(re.match(r"^\d+\.\d+\.\d+-dev$", str(version or "").strip())) - - def _filter_versions_for_channel(channel: int, versions: list[str]) -> list[str]: if channel == UPDATE_CHANNEL_MAIN: return [v for v in versions if _is_stable_version(v)] if channel == UPDATE_CHANNEL_NIGHTLY: return [v for v in versions if _is_nightly_version(v)] - if channel == UPDATE_CHANNEL_DEV: - return [v for v in versions if _is_dev_version(v)] return list(versions) @@ -1031,8 +1193,6 @@ def _resolve_update_info_url() -> str: channel = _selected_update_channel() if channel == UPDATE_CHANNEL_NIGHTLY: raw = _get_setting_string("update_repo_url_nightly") - elif channel == UPDATE_CHANNEL_DEV: - raw = _get_setting_string("update_repo_url_dev") elif channel == UPDATE_CHANNEL_CUSTOM: raw = _get_setting_string("update_repo_url") else: @@ -1179,8 +1339,6 @@ def _fetch_changelog_for_channel(channel: int, version: str) -> str: url = "https://gitea.it-drui.de/viewit/ViewIT/raw/branch/dev/CHANGELOG-DEV.md" elif version_text.endswith("-nightly"): url = "https://gitea.it-drui.de/viewit/ViewIT/raw/branch/nightly/CHANGELOG-NIGHTLY.md" - elif channel == UPDATE_CHANNEL_DEV: - url = "https://gitea.it-drui.de/viewit/ViewIT/raw/branch/dev/CHANGELOG-DEV.md" elif channel == UPDATE_CHANNEL_MAIN: url = "https://gitea.it-drui.de/viewit/ViewIT/raw/branch/main/CHANGELOG.md" else: @@ -1427,6 +1585,34 @@ def _show_root_menu() -> None: for plugin_name in sorted(plugins.keys(), key=lambda value: value.casefold()): _add_directory_item(handle, plugin_name, "plugin_menu", {"plugin": plugin_name}, is_folder=True) + # Katalog-Caches im Hintergrund vorwaermen (fire-and-forget) + for _pname, _plugin in plugins.items(): + _warmer = getattr(_plugin, "warm_catalog_cache", None) + if callable(_warmer): + def _warm_and_notify(_fn=_warmer, _name=_pname): + try: + loaded = _fn() + except Exception: + loaded = False + if loaded: + # executebuiltin ist thread-sicher; xbmcgui.Dialog() darf nicht + # aus Daemon-Threads aufgerufen werden (Kodi-Absturzgefahr). + safe_name = _name.replace('"', "") + xbmc.executebuiltin( + f'Notification("{safe_name}", "Suchindex geladen", 3000, "")' + ) + threading.Thread(target=_warm_and_notify, daemon=True, name=f"viewit-warmup-{_pname}").start() + + # Trakt-Menue (nur wenn aktiviert) + if _get_setting_bool("trakt_enabled", default=False): + if _trakt_load_token(): + _add_directory_item(handle, "Weiterschauen", "trakt_continue", is_folder=True) + _add_directory_item(handle, "Trakt Upcoming", "trakt_upcoming", is_folder=True) + _add_directory_item(handle, "Trakt Watchlist", "trakt_watchlist", is_folder=True) + _add_directory_item(handle, "Trakt History", "trakt_history", {"page": "1"}, is_folder=True) + else: + _add_directory_item(handle, "Trakt autorisieren", "trakt_auth", is_folder=True) + _add_directory_item(handle, "Einstellungen", "settings") xbmcplugin.endOfDirectory(handle) @@ -1459,6 +1645,21 @@ def _show_plugin_menu(plugin_name: str) -> None: if _plugin_has_capability(plugin, "popular_series"): _add_directory_item(handle, POPULAR_MENU_LABEL, "popular", {"plugin": plugin_name, "page": "1"}, is_folder=True) + if _plugin_has_capability(plugin, "year_filter"): + _add_directory_item(handle, "Nach Jahr", "year_menu", {"plugin": plugin_name}, is_folder=True) + + if _plugin_has_capability(plugin, "country_filter"): + _add_directory_item(handle, "Nach Land", "country_menu", {"plugin": plugin_name}, is_folder=True) + + if _plugin_has_capability(plugin, "collections"): + _add_directory_item(handle, "Sammlungen", "collections_menu", {"plugin": plugin_name}, is_folder=True) + + if _plugin_has_capability(plugin, "tags"): + _add_directory_item(handle, "Schlagworte", "tags_menu", {"plugin": plugin_name}, is_folder=True) + + if _plugin_has_capability(plugin, "random"): + _add_directory_item(handle, "Zufaelliger Titel", "random_title", {"plugin": plugin_name}, is_folder=False) + xbmcplugin.endOfDirectory(handle) @@ -1608,13 +1809,15 @@ def _import_plugin_module(path: Path) -> ModuleType: def _discover_plugins() -> dict[str, BasisPlugin]: """Laedt alle Plugins aus `plugins/*.py` und cached Instanzen im RAM.""" global _PLUGIN_CACHE - if _PLUGIN_CACHE is not None: - return _PLUGIN_CACHE + with _PLUGIN_CACHE_LOCK: + if _PLUGIN_CACHE is not None: + return _PLUGIN_CACHE # Plugins werden dynamisch aus `plugins/*.py` geladen, damit Integrationen getrennt # entwickelt und bei Fehlern isoliert deaktiviert werden koennen. plugins: dict[str, BasisPlugin] = {} if not PLUGIN_DIR.exists(): - _PLUGIN_CACHE = plugins + with _PLUGIN_CACHE_LOCK: + _PLUGIN_CACHE = plugins return plugins for file_path in sorted(PLUGIN_DIR.glob("*.py")): if file_path.name.startswith("_"): @@ -1659,7 +1862,8 @@ def _discover_plugins() -> dict[str, BasisPlugin]: continue plugins[plugin_name] = instance plugins = dict(sorted(plugins.items(), key=lambda item: item[0].casefold())) - _PLUGIN_CACHE = plugins + with _PLUGIN_CACHE_LOCK: + _PLUGIN_CACHE = plugins return plugins @@ -2119,6 +2323,7 @@ def _show_episodes(plugin_name: str, title: str, season: str, series_url: str = show_fanart = (show_art or {}).get("fanart") if isinstance(show_art, dict) else "" show_poster = (show_art or {}).get("poster") if isinstance(show_art, dict) else "" + trakt_watched = _trakt_watched_set(title) with _busy_dialog("Episoden werden aufbereitet..."): for episode in episodes: if show_tmdb: @@ -2146,6 +2351,12 @@ def _show_episodes(plugin_name: str, title: str, season: str, series_url: str = if episode_number: merged_info.setdefault("episode", str(episode_number)) + # Trakt Watched-Status: gesehene Episoden mit playcount markieren. + if trakt_watched and season_number and episode_number: + if (season_number, episode_number) in trakt_watched: + merged_info["playcount"] = 1 + merged_info["overlay"] = 7 # xbmcgui.ICON_OVERLAY_WATCHED + # Episode-Items ohne eigenes Artwork: Fanart/Poster vom Titel durchreichen. if show_fanart: merged_art.setdefault("fanart", show_fanart) @@ -2291,53 +2502,64 @@ def _show_categories(plugin_name: str) -> None: xbmcplugin.endOfDirectory(handle) -def _show_category_titles_page(plugin_name: str, category: str, page: int = 1) -> None: +def _show_paged_title_list( + plugin_name: str, + filter_value: str, + page: int, + dialog_label: str, + page_action: str, + filter_param: str, + paging_method: str, + count_method: str, + has_more_method: str | None, +) -> None: + """Gemeinsame Implementierung fuer seitenweise Titellisten (Genre/Kategorie/A-Z).""" handle = _get_handle() plugin = _discover_plugins().get(plugin_name) if plugin is None: - xbmcgui.Dialog().notification("Kategorien", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification(dialog_label, "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return page = max(1, int(page or 1)) - paging_getter = getattr(plugin, "titles_for_genre_page", None) + paging_getter = getattr(plugin, paging_method, None) if not callable(paging_getter): - xbmcgui.Dialog().notification("Kategorien", "Seitenwechsel nicht verfuegbar.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification(dialog_label, "Seitenwechsel nicht verfuegbar.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return total_pages = None - count_getter = getattr(plugin, "genre_page_count", None) + count_getter = getattr(plugin, count_method, None) if callable(count_getter): try: - total_pages = int(count_getter(category) or 1) + total_pages = int(count_getter(filter_value) or 1) except Exception: total_pages = None if total_pages is not None: page = min(page, max(1, total_pages)) - xbmcplugin.setPluginCategory(handle, f"{category} ({page}/{total_pages})") + xbmcplugin.setPluginCategory(handle, f"{filter_value} ({page}/{total_pages})") else: - xbmcplugin.setPluginCategory(handle, f"{category} ({page})") + xbmcplugin.setPluginCategory(handle, f"{filter_value} ({page})") _set_content(handle, "movies" if (plugin_name or "").casefold() == "einschalten" else "tvshows") if page > 1: _add_directory_item( handle, "Vorherige Seite", - "category_titles_page", - {"plugin": plugin_name, "category": category, "page": str(page - 1)}, + page_action, + {"plugin": plugin_name, filter_param: filter_value, "page": str(page - 1)}, is_folder=True, ) try: titles = _run_with_progress( - "Kategorien", - f"{plugin_name}: {category} Seite {page} wird geladen...", - lambda: list(paging_getter(category, page) or []), + dialog_label, + f"{plugin_name}: {filter_value} Seite {page} wird geladen...", + lambda: list(paging_getter(filter_value, page) or []), ) except Exception as exc: - _log(f"Kategorie-Seite konnte nicht geladen werden ({plugin_name}/{category} p{page}): {exc}", xbmc.LOGWARNING) - xbmcgui.Dialog().notification("Kategorien", "Seite konnte nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000) + _log(f"{dialog_label}-Seite konnte nicht geladen werden ({plugin_name}/{filter_value} p{page}): {exc}", xbmc.LOGWARNING) + xbmcgui.Dialog().notification(dialog_label, "Seite konnte nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return @@ -2362,12 +2584,13 @@ def _show_category_titles_page(plugin_name: str, category: str, page: int = 1) - if _needs_tmdb(meta_labels, meta_art, want_plot=show_plot, want_art=show_art): tmdb_titles.append(title) if show_tmdb and tmdb_titles: - with _busy_dialog("Genre-Liste wird geladen..."): + with _busy_dialog(f"{dialog_label}-Liste wird geladen..."): tmdb_prefetched = _tmdb_labels_and_art_bulk(tmdb_titles) for title in titles: tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) if show_tmdb else ({}, {}, []) meta = plugin_meta.get(title) info_labels, art, cast = _merge_metadata(title, tmdb_info, tmdb_art, tmdb_cast, meta) + info_labels = dict(info_labels or {}) info_labels.setdefault("mediatype", "tvshow") if (info_labels.get("mediatype") or "").strip().casefold() == "tvshow": info_labels.setdefault("tvshowtitle", title) @@ -2393,11 +2616,11 @@ def _show_category_titles_page(plugin_name: str, category: str, page: int = 1) - show_next = False if total_pages is not None: show_next = page < total_pages - else: - has_more_getter = getattr(plugin, "genre_has_more", None) + elif has_more_method is not None: + has_more_getter = getattr(plugin, has_more_method, None) if callable(has_more_getter): try: - show_next = bool(has_more_getter(category, page)) + show_next = bool(has_more_getter(filter_value, page)) except Exception: show_next = False @@ -2405,131 +2628,35 @@ def _show_category_titles_page(plugin_name: str, category: str, page: int = 1) - _add_directory_item( handle, "Naechste Seite", - "category_titles_page", - {"plugin": plugin_name, "category": category, "page": str(page + 1)}, + page_action, + {"plugin": plugin_name, filter_param: filter_value, "page": str(page + 1)}, is_folder=True, ) xbmcplugin.endOfDirectory(handle) + +def _show_category_titles_page(plugin_name: str, category: str, page: int = 1) -> None: + _show_paged_title_list( + plugin_name, category, page, + dialog_label="Kategorien", + page_action="category_titles_page", + filter_param="category", + paging_method="titles_for_genre_page", + count_method="genre_page_count", + has_more_method="genre_has_more", + ) + + def _show_genre_titles_page(plugin_name: str, genre: str, page: int = 1) -> None: - handle = _get_handle() - plugin = _discover_plugins().get(plugin_name) - if plugin is None: - xbmcgui.Dialog().notification("Genres", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) - xbmcplugin.endOfDirectory(handle) - return - - page = max(1, int(page or 1)) - paging_getter = getattr(plugin, "titles_for_genre_page", None) - if not callable(paging_getter): - xbmcgui.Dialog().notification("Genres", "Seitenwechsel nicht verfuegbar.", xbmcgui.NOTIFICATION_INFO, 3000) - xbmcplugin.endOfDirectory(handle) - return - - total_pages = None - count_getter = getattr(plugin, "genre_page_count", None) - if callable(count_getter): - try: - total_pages = int(count_getter(genre) or 1) - except Exception: - total_pages = None - if total_pages is not None: - page = min(page, max(1, total_pages)) - xbmcplugin.setPluginCategory(handle, f"{genre} ({page}/{total_pages})") - else: - xbmcplugin.setPluginCategory(handle, f"{genre} ({page})") - _set_content(handle, "movies" if (plugin_name or "").casefold() == "einschalten" else "tvshows") - - if page > 1: - _add_directory_item( - handle, - "Vorherige Seite", - "genre_titles_page", - {"plugin": plugin_name, "genre": genre, "page": str(page - 1)}, - is_folder=True, - ) - - try: - titles = _run_with_progress( - "Genres", - f"{plugin_name}: {genre} Seite {page} wird geladen...", - lambda: list(paging_getter(genre, page) or []), - ) - except Exception as exc: - _log(f"Genre-Seite konnte nicht geladen werden ({plugin_name}/{genre} p{page}): {exc}", xbmc.LOGWARNING) - xbmcgui.Dialog().notification("Genres", "Seite konnte nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000) - xbmcplugin.endOfDirectory(handle) - return - - titles = [str(t).strip() for t in titles if t and str(t).strip()] - titles.sort(key=lambda value: value.casefold()) - - if titles: - use_source, show_tmdb, prefer_source = _metadata_policy( - plugin_name, plugin, allow_tmdb=_tmdb_list_enabled() - ) - plugin_meta = _collect_plugin_metadata(plugin, titles) if use_source else {} - show_plot = _get_setting_bool("tmdb_show_plot", default=True) - show_art = _get_setting_bool("tmdb_show_art", default=True) - tmdb_prefetched: dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]] = {} - tmdb_titles = list(titles) if show_tmdb else [] - if show_tmdb and prefer_source and use_source: - tmdb_titles = [] - for title in titles: - meta = plugin_meta.get(title) - meta_labels = meta[0] if meta else {} - meta_art = meta[1] if meta else {} - if _needs_tmdb(meta_labels, meta_art, want_plot=show_plot, want_art=show_art): - tmdb_titles.append(title) - if show_tmdb and tmdb_titles: - with _busy_dialog("Genre-Seite wird geladen..."): - tmdb_prefetched = _tmdb_labels_and_art_bulk(tmdb_titles) - for title in titles: - tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) if show_tmdb else ({}, {}, []) - meta = plugin_meta.get(title) - info_labels, art, cast = _merge_metadata(title, tmdb_info, tmdb_art, tmdb_cast, meta) - info_labels.setdefault("mediatype", "tvshow") - if (info_labels.get("mediatype") or "").strip().casefold() == "tvshow": - info_labels.setdefault("tvshowtitle", title) - playstate = _title_playstate(plugin_name, title) - info_labels = _apply_playstate_to_info(dict(info_labels), playstate) - display_label = _label_with_duration(title, info_labels) - display_label = _label_with_playstate(display_label, playstate) - direct_play = bool( - plugin_name.casefold() == "einschalten" - and _get_setting_bool("einschalten_enable_playback", default=False) - ) - _add_directory_item( - handle, - display_label, - "play_movie" if direct_play else "seasons", - {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, - is_folder=not direct_play, - info_labels=info_labels, - art=art, - cast=cast, - ) - - show_next = False - if total_pages is not None: - show_next = page < total_pages - else: - has_more_getter = getattr(plugin, "genre_has_more", None) - if callable(has_more_getter): - try: - show_next = bool(has_more_getter(genre, page)) - except Exception: - show_next = False - - if show_next: - _add_directory_item( - handle, - "Naechste Seite", - "genre_titles_page", - {"plugin": plugin_name, "genre": genre, "page": str(page + 1)}, - is_folder=True, - ) - xbmcplugin.endOfDirectory(handle) + _show_paged_title_list( + plugin_name, genre, page, + dialog_label="Genres", + page_action="genre_titles_page", + filter_param="genre", + paging_method="titles_for_genre_page", + count_method="genre_page_count", + has_more_method="genre_has_more", + ) def _show_alpha_index(plugin_name: str) -> None: @@ -2571,118 +2698,15 @@ def _show_alpha_index(plugin_name: str) -> None: def _show_alpha_titles_page(plugin_name: str, letter: str, page: int = 1) -> None: - handle = _get_handle() - plugin = _discover_plugins().get(plugin_name) - if plugin is None: - xbmcgui.Dialog().notification("A-Z", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) - xbmcplugin.endOfDirectory(handle) - return - - page = max(1, int(page or 1)) - paging_getter = getattr(plugin, "titles_for_alpha_page", None) - if not callable(paging_getter): - xbmcgui.Dialog().notification("A-Z", "Seitenwechsel nicht verfuegbar.", xbmcgui.NOTIFICATION_INFO, 3000) - xbmcplugin.endOfDirectory(handle) - return - - total_pages = None - count_getter = getattr(plugin, "alpha_page_count", None) - if callable(count_getter): - try: - total_pages = int(count_getter(letter) or 1) - except Exception: - total_pages = None - if total_pages is not None: - page = min(page, max(1, total_pages)) - xbmcplugin.setPluginCategory(handle, f"{letter} ({page}/{total_pages})") - else: - xbmcplugin.setPluginCategory(handle, f"{letter} ({page})") - _set_content(handle, "movies" if (plugin_name or "").casefold() == "einschalten" else "tvshows") - - if page > 1: - _add_directory_item( - handle, - "Vorherige Seite", - "alpha_titles_page", - {"plugin": plugin_name, "letter": letter, "page": str(page - 1)}, - is_folder=True, - ) - - try: - titles = _run_with_progress( - "A-Z", - f"{plugin_name}: {letter} Seite {page} wird geladen...", - lambda: list(paging_getter(letter, page) or []), - ) - except Exception as exc: - _log(f"A-Z Seite konnte nicht geladen werden ({plugin_name}/{letter} p{page}): {exc}", xbmc.LOGWARNING) - xbmcgui.Dialog().notification("A-Z", "Seite konnte nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000) - xbmcplugin.endOfDirectory(handle) - return - - titles = [str(t).strip() for t in titles if t and str(t).strip()] - titles.sort(key=lambda value: value.casefold()) - - if titles: - use_source, show_tmdb, prefer_source = _metadata_policy( - plugin_name, plugin, allow_tmdb=_tmdb_list_enabled() - ) - plugin_meta = _collect_plugin_metadata(plugin, titles) if use_source else {} - show_plot = _get_setting_bool("tmdb_show_plot", default=True) - show_art = _get_setting_bool("tmdb_show_art", default=True) - tmdb_prefetched: dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]] = {} - tmdb_titles = list(titles) if show_tmdb else [] - if show_tmdb and prefer_source and use_source: - tmdb_titles = [] - for title in titles: - meta = plugin_meta.get(title) - meta_labels = meta[0] if meta else {} - meta_art = meta[1] if meta else {} - if _needs_tmdb(meta_labels, meta_art, want_plot=show_plot, want_art=show_art): - tmdb_titles.append(title) - if show_tmdb and tmdb_titles: - with _busy_dialog("A-Z Liste wird geladen..."): - tmdb_prefetched = _tmdb_labels_and_art_bulk(tmdb_titles) - for title in titles: - tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) if show_tmdb else ({}, {}, []) - meta = plugin_meta.get(title) - info_labels, art, cast = _merge_metadata(title, tmdb_info, tmdb_art, tmdb_cast, meta) - info_labels = dict(info_labels or {}) - info_labels.setdefault("mediatype", "tvshow") - if (info_labels.get("mediatype") or "").strip().casefold() == "tvshow": - info_labels.setdefault("tvshowtitle", title) - playstate = _title_playstate(plugin_name, title) - info_labels = _apply_playstate_to_info(dict(info_labels), playstate) - display_label = _label_with_duration(title, info_labels) - display_label = _label_with_playstate(display_label, playstate) - direct_play = bool( - plugin_name.casefold() == "einschalten" - and _get_setting_bool("einschalten_enable_playback", default=False) - ) - _add_directory_item( - handle, - display_label, - "play_movie" if direct_play else "seasons", - {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, - is_folder=not direct_play, - info_labels=info_labels, - art=art, - cast=cast, - ) - - show_next = False - if total_pages is not None: - show_next = page < total_pages - - if show_next: - _add_directory_item( - handle, - "Naechste Seite", - "alpha_titles_page", - {"plugin": plugin_name, "letter": letter, "page": str(page + 1)}, - is_folder=True, - ) - xbmcplugin.endOfDirectory(handle) + _show_paged_title_list( + plugin_name, letter, page, + dialog_label="A-Z", + page_action="alpha_titles_page", + filter_param="letter", + paging_method="titles_for_alpha_page", + count_method="alpha_page_count", + has_more_method=None, + ) def _show_series_catalog(plugin_name: str, page: int = 1) -> None: @@ -2857,7 +2881,8 @@ def _group_matches(group_code: str, title: str) -> bool: def _get_genre_titles(plugin_name: str, genre: str) -> list[str]: cache_key = (plugin_name, genre) - cached = _GENRE_TITLES_CACHE.get(cache_key) + with _GENRE_TITLES_CACHE_LOCK: + cached = _GENRE_TITLES_CACHE.get(cache_key) if cached is not None: return list(cached) plugin = _discover_plugins().get(plugin_name) @@ -2866,7 +2891,12 @@ def _get_genre_titles(plugin_name: str, genre: str) -> list[str]: titles = plugin.titles_for_genre(genre) titles = [str(t).strip() for t in titles if t and str(t).strip()] titles.sort(key=lambda value: value.casefold()) - _GENRE_TITLES_CACHE[cache_key] = list(titles) + with _GENRE_TITLES_CACHE_LOCK: + _GENRE_TITLES_CACHE[cache_key] = list(titles) + if len(_GENRE_TITLES_CACHE) > _CACHE_MAXSIZE: + excess = len(_GENRE_TITLES_CACHE) - _CACHE_MAXSIZE // 2 + for k in list(_GENRE_TITLES_CACHE.keys())[:excess]: + del _GENRE_TITLES_CACHE[k] return list(titles) @@ -3218,9 +3248,13 @@ def _show_latest_episodes(plugin_name: str, page: int = 1) -> None: xbmcplugin.endOfDirectory(handle) return - xbmcplugin.setPluginCategory(handle, f"{plugin_name}: {LATEST_MENU_LABEL}") + xbmcplugin.setPluginCategory(handle, f"{plugin_name}: {LATEST_MENU_LABEL} (Seite {page})") _set_content(handle, "episodes") + if page > 1: + _add_directory_item(handle, "Vorherige Seite", "latest_titles", + {"plugin": plugin_name, "page": str(page - 1)}, is_folder=True) + try: entries = _run_with_progress( LATEST_MENU_LABEL, @@ -3279,6 +3313,10 @@ def _show_latest_episodes(plugin_name: str, page: int = 1) -> None: info_labels=info_labels, ) + if entries: + _add_directory_item(handle, "Naechste Seite", "latest_titles", + {"plugin": plugin_name, "page": str(page + 1)}, is_folder=True) + xbmcplugin.endOfDirectory(handle) @@ -3510,31 +3548,39 @@ def _apply_update_channel(*, silent: bool = False) -> bool: if not silent: if not applied: warning_icon = getattr(xbmcgui, "NOTIFICATION_WARNING", xbmcgui.NOTIFICATION_INFO) - show_notification( + xbmcgui.Dialog().notification( "Updates", "Kanal gespeichert, aber repository.viewit nicht gefunden.", - icon=warning_icon, - milliseconds=5000, + warning_icon, + 5000, ) elif target_version == "-": - show_error("Updates", "Kanal angewendet, aber keine Version im Kanal gefunden.", milliseconds=5000) + xbmcgui.Dialog().notification( + "Updates", + "Kanal angewendet, aber keine Version im Kanal gefunden.", + xbmcgui.NOTIFICATION_ERROR, + 5000, + ) elif not install_result: - show_error( + xbmcgui.Dialog().notification( "Updates", f"Kanal angewendet, Installation von {target_version} fehlgeschlagen.", - milliseconds=5000, + xbmcgui.NOTIFICATION_ERROR, + 5000, ) elif target_version == installed_version: - show_notification( + xbmcgui.Dialog().notification( "Updates", f"Kanal angewendet: {_channel_label(_selected_update_channel())} ({target_version} bereits installiert)", - milliseconds=4500, + xbmcgui.NOTIFICATION_INFO, + 4500, ) else: - show_notification( + xbmcgui.Dialog().notification( "Updates", f"Kanal angewendet: {_channel_label(_selected_update_channel())} -> {target_version} installiert", - milliseconds=5000, + xbmcgui.NOTIFICATION_INFO, + 5000, ) _sync_update_version_settings() return applied and install_result @@ -3551,11 +3597,14 @@ def _run_update_check(*, silent: bool = False) -> None: if callable(builtin): builtin("ActivateWindow(addonbrowser,addons://updates/)") if not silent: - show_notification("Updates", "Update-Check gestartet.", milliseconds=4000) + xbmcgui.Dialog().notification("Updates", "Update-Check gestartet.", xbmcgui.NOTIFICATION_INFO, 4000) except Exception as exc: _log(f"Update-Pruefung fehlgeschlagen: {exc}", xbmc.LOGWARNING) if not silent: - show_error("Updates", "Update-Check fehlgeschlagen.", milliseconds=4000) + try: + xbmcgui.Dialog().notification("Updates", "Update-Check fehlgeschlagen.", xbmcgui.NOTIFICATION_ERROR, 4000) + except Exception: + pass def _show_version_selector() -> None: @@ -3568,7 +3617,7 @@ def _show_version_selector() -> None: versions = _filter_versions_for_channel(channel, _fetch_repo_versions(info_url)) if not versions: - show_error("Updates", "Keine Versionen im Repo gefunden.", milliseconds=4000) + xbmcgui.Dialog().notification("Updates", "Keine Versionen im Repo gefunden.", xbmcgui.NOTIFICATION_ERROR, 4000) return installed = _get_setting_string("update_installed_version").strip() or "-" @@ -3606,13 +3655,13 @@ def _show_version_selector() -> None: if not confirmed: return - show_notification("Updates", f"Installation gestartet: {version}", milliseconds=2500) + xbmcgui.Dialog().notification("Updates", f"Installation gestartet: {version}", xbmcgui.NOTIFICATION_INFO, 2500) ok = _install_addon_version(info_url, version) if ok: _sync_update_version_settings() - show_notification("Updates", f"Version {version} installiert.", milliseconds=4000) + xbmcgui.Dialog().notification("Updates", f"Version {version} installiert.", xbmcgui.NOTIFICATION_INFO, 4000) else: - show_error("Updates", f"Installation von {version} fehlgeschlagen.", milliseconds=4500) + xbmcgui.Dialog().notification("Updates", f"Installation von {version} fehlgeschlagen.", xbmcgui.NOTIFICATION_ERROR, 4500) def _maybe_run_auto_update_check(action: str | None) -> None: @@ -3666,58 +3715,6 @@ def _is_resolveurl_missing_error(message: str) -> bool: return str(message or "").strip().casefold() == "resolveurl missing" -def _looks_like_unresolved_hoster_link(url: str) -> bool: - raw = (url or "").strip() - if not raw: - return False - media_url = raw.split("|", 1)[0].strip() - try: - parsed = urlparse(media_url) - except Exception: - return False - host = (parsed.netloc or "").casefold() - path = (parsed.path or "").casefold() - if parsed.scheme not in {"http", "https"} or not host: - return False - known_hoster_domains = ( - "voe.sx", - "supervideo.", - "doodstream.", - "vidnest.", - "vidara.", - "filemoon.", - "streamtape.", - "vidmoly.", - "veev.", - "strmup.", - ) - if not any(domain in host for domain in known_hoster_domains): - return False - return path.startswith(("/e/", "/v/", "/d/", "/embed")) - - -def _resolve_unresolved_hoster_link(url: str, *, source_url: str) -> tuple[str, str]: - candidate = (url or "").strip() - if not _looks_like_unresolved_hoster_link(candidate): - return candidate, "" - _log(f"ResolveURL dispatch: {candidate}", xbmc.LOGDEBUG) - try: - from resolveurl_backend import resolve as resolve_with_resolveurl # type: ignore - except Exception: - resolve_with_resolveurl = None - if callable(resolve_with_resolveurl): - try: - resolved = resolve_with_resolveurl(candidate) - except Exception: - resolved = None - if resolved: - _log(f"ResolveURL output: {resolved}", xbmc.LOGDEBUG) - return normalize_resolved_stream_url(resolved, source_url=source_url or candidate), "" - err = _resolveurl_last_error() - _log(f"ResolveURL output: ({err})", xbmc.LOGDEBUG) - return candidate, err - - def _play_final_link( link: str, *, @@ -3726,6 +3723,7 @@ def _play_final_link( art: dict[str, str] | None = None, cast: list[TmdbCastMember] | None = None, resolve_handle: int | None = None, + trakt_media: dict[str, object] | None = None, ) -> None: list_item = xbmcgui.ListItem(label=display_title or "", path=link) try: @@ -3760,6 +3758,35 @@ def _play_final_link( player = xbmc.Player() player.play(item=link, listitem=list_item) + # Trakt Scrobble Start (Hintergrund-Thread) + if trakt_media and _get_setting_bool("trakt_enabled", default=False): + _trakt_scrobble_start_async(trakt_media) + + +def _trakt_scrobble_start_async(media: dict[str, object]) -> None: + """Sendet scrobble/start an die Trakt-API in einem Hintergrund-Thread.""" + def _do() -> None: + try: + from core.trakt import TraktClient + except Exception: + return + client_id = _get_setting_string("trakt_client_id").strip() + client_secret = _get_setting_string("trakt_client_secret").strip() + access_token = _get_setting_string("trakt_access_token").strip() + if not client_id or not client_secret or not access_token: + return + client = TraktClient(client_id, client_secret, log=lambda m: _log(m, xbmc.LOGDEBUG)) + client.scrobble_start( + access_token, + media_type=str(media.get("kind", "movie")), + title=str(media.get("title", "")), + tmdb_id=int(media.get("tmdb_id", 0)), + imdb_id=str(media.get("imdb_id", "")), + season=int(media.get("season", 0)), + episode=int(media.get("episode", 0)), + ) + threading.Thread(target=_do, daemon=True).start() + def _track_playback_and_update_state_async(key: str) -> None: # Eigenes Resume/Watched ist deaktiviert; Kodi verwaltet das selbst. @@ -3859,30 +3886,23 @@ def _play_episode( err = _resolveurl_last_error() if _is_cloudflare_challenge_error(err): _log(f"ResolveURL Cloudflare-Challenge: {err}", xbmc.LOGWARNING) - show_notification( + xbmcgui.Dialog().notification( "Wiedergabe", "Hoster durch Cloudflare geschuetzt. Bitte spaeter erneut probieren.", - milliseconds=4500, + xbmcgui.NOTIFICATION_INFO, + 4500, ) return final_link = resolved_link or link final_link = normalize_resolved_stream_url(final_link, source_url=link) - final_link, resolve_err = _resolve_unresolved_hoster_link(final_link, source_url=link) - if _looks_like_unresolved_hoster_link(final_link): - err = (resolve_err or _resolveurl_last_error()).strip() - if _is_resolveurl_missing_error(err): - show_error("Wiedergabe", "ResolveURL fehlt oder ist nicht geladen.", milliseconds=4500) - else: - show_error("Wiedergabe", "Hoster-Link konnte nicht aufgeloest werden.", milliseconds=4500) - _log(f"Hoster-Link blieb unaufgeloest: {final_link} (error={err})", xbmc.LOGWARNING) - return err = _resolveurl_last_error() if _is_cloudflare_challenge_error(err) and final_link.strip() == link.strip(): _log(f"ResolveURL Cloudflare-Challenge (unresolved): {err}", xbmc.LOGWARNING) - show_notification( + xbmcgui.Dialog().notification( "Wiedergabe", "Hoster durch Cloudflare geschuetzt. Bitte spaeter erneut probieren.", - milliseconds=4500, + xbmcgui.NOTIFICATION_INFO, + 4500, ) return finally: @@ -3898,6 +3918,21 @@ def _play_episode( display_title = title info_labels, art, cast = _tmdb_labels_and_art(title) display_title = _label_with_duration(display_title, info_labels) + + # Trakt-IDs fuer script.trakt-Kompatibilitaet und eigenes Scrobbling + title_key = (title or "").strip().casefold() + _tmdb_id = _tmdb_cache_get(_TMDB_ID_CACHE, title_key, 0) + _imdb_id = "" + trakt_media: dict[str, object] | None = None + if _tmdb_id: + _kind = _tmdb_cache_get(_MEDIA_TYPE_CACHE, title_key, "tv") + _imdb_id = _fetch_and_cache_imdb_id(title_key, _tmdb_id, _kind) + _set_trakt_ids_property(title, _tmdb_id, _imdb_id) + trakt_media = { + "title": title, "tmdb_id": _tmdb_id, "imdb_id": _imdb_id, "kind": _kind, + "season": season_number or 0, "episode": episode_number or 0, + } + _play_final_link( final_link, display_title=display_title, @@ -3905,6 +3940,7 @@ def _play_episode( art=art, cast=cast, resolve_handle=resolve_handle, + trakt_media=trakt_media, ) _track_playback_and_update_state_async( _playstate_key(plugin_name=plugin_name, title=title, season=season, episode=episode) @@ -3981,30 +4017,23 @@ def _play_episode_url( err = _resolveurl_last_error() if _is_cloudflare_challenge_error(err): _log(f"ResolveURL Cloudflare-Challenge: {err}", xbmc.LOGWARNING) - show_notification( + xbmcgui.Dialog().notification( "Wiedergabe", "Hoster durch Cloudflare geschuetzt. Bitte spaeter erneut probieren.", - milliseconds=4500, + xbmcgui.NOTIFICATION_INFO, + 4500, ) return final_link = resolved_link or link final_link = normalize_resolved_stream_url(final_link, source_url=link) - final_link, resolve_err = _resolve_unresolved_hoster_link(final_link, source_url=link) - if _looks_like_unresolved_hoster_link(final_link): - err = (resolve_err or _resolveurl_last_error()).strip() - if _is_resolveurl_missing_error(err): - show_error("Wiedergabe", "ResolveURL fehlt oder ist nicht geladen.", milliseconds=4500) - else: - show_error("Wiedergabe", "Hoster-Link konnte nicht aufgeloest werden.", milliseconds=4500) - _log(f"Hoster-Link blieb unaufgeloest: {final_link} (error={err})", xbmc.LOGWARNING) - return err = _resolveurl_last_error() if _is_cloudflare_challenge_error(err) and final_link.strip() == link.strip(): _log(f"ResolveURL Cloudflare-Challenge (unresolved): {err}", xbmc.LOGWARNING) - show_notification( + xbmcgui.Dialog().notification( "Wiedergabe", "Hoster durch Cloudflare geschuetzt. Bitte spaeter erneut probieren.", - milliseconds=4500, + xbmcgui.NOTIFICATION_INFO, + 4500, ) return finally: @@ -4042,156 +4071,864 @@ def _parse_params() -> dict[str, str]: return {key: values[0] for key, values in raw_params.items()} +def _show_year_menu(plugin_name: str) -> None: + """Zeigt verfuegbare Erscheinungsjahre eines Plugins (Capability: year_filter).""" + handle = _get_handle() + plugin = _discover_plugins().get(plugin_name) + if plugin is None: + xbmcgui.Dialog().notification("Jahr", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcplugin.endOfDirectory(handle) + return + getter = getattr(plugin, "years_available", None) + if not callable(getter): + xbmcplugin.endOfDirectory(handle) + return + xbmcplugin.setPluginCategory(handle, f"{plugin_name}: Nach Jahr") + try: + years = list(getter() or []) + except Exception as exc: + _log(f"Jahre konnten nicht geladen werden ({plugin_name}): {exc}", xbmc.LOGWARNING) + xbmcplugin.endOfDirectory(handle) + return + for year in years: + _add_directory_item(handle, str(year), "year_titles_page", + {"plugin": plugin_name, "year": str(year), "page": "1"}, is_folder=True) + xbmcplugin.endOfDirectory(handle) + + +def _show_year_titles_page(plugin_name: str, year: str, page: int = 1) -> None: + """Zeigt Titel eines bestimmten Erscheinungsjahres.""" + handle = _get_handle() + plugin = _discover_plugins().get(plugin_name) + if plugin is None: + xbmcplugin.endOfDirectory(handle) + return + getter = getattr(plugin, "titles_for_year", None) + if not callable(getter): + xbmcplugin.endOfDirectory(handle) + return + xbmcplugin.setPluginCategory(handle, f"{plugin_name}: {year} (Seite {page})") + _set_content(handle, "movies") + if page > 1: + _add_directory_item(handle, "Vorherige Seite", "year_titles_page", + {"plugin": plugin_name, "year": year, "page": str(page - 1)}, is_folder=True) + try: + titles = _run_with_progress("Jahr", f"{plugin_name}: {year} wird geladen...", + lambda: list(getter(year, page) or [])) + except Exception as exc: + _log(f"Jahr-Titel konnten nicht geladen werden ({plugin_name}/{year} p{page}): {exc}", xbmc.LOGWARNING) + xbmcplugin.endOfDirectory(handle) + return + titles = [str(t).strip() for t in titles if t and str(t).strip()] + for title in titles: + _add_directory_item(handle, title, "seasons", + {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, + is_folder=True) + if titles: + _add_directory_item(handle, "Naechste Seite", "year_titles_page", + {"plugin": plugin_name, "year": year, "page": str(page + 1)}, is_folder=True) + xbmcplugin.endOfDirectory(handle) + + +def _show_country_menu(plugin_name: str) -> None: + """Zeigt verfuegbare Produktionslaender eines Plugins (Capability: country_filter).""" + handle = _get_handle() + plugin = _discover_plugins().get(plugin_name) + if plugin is None: + xbmcgui.Dialog().notification("Land", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcplugin.endOfDirectory(handle) + return + getter = getattr(plugin, "countries_available", None) + if not callable(getter): + xbmcplugin.endOfDirectory(handle) + return + xbmcplugin.setPluginCategory(handle, f"{plugin_name}: Nach Land") + try: + countries = list(getter() or []) + except Exception as exc: + _log(f"Laender konnten nicht geladen werden ({plugin_name}): {exc}", xbmc.LOGWARNING) + xbmcplugin.endOfDirectory(handle) + return + for country in countries: + _add_directory_item(handle, str(country), "country_titles_page", + {"plugin": plugin_name, "country": str(country), "page": "1"}, is_folder=True) + xbmcplugin.endOfDirectory(handle) + + +def _show_country_titles_page(plugin_name: str, country: str, page: int = 1) -> None: + """Zeigt Titel eines bestimmten Produktionslandes.""" + handle = _get_handle() + plugin = _discover_plugins().get(plugin_name) + if plugin is None: + xbmcplugin.endOfDirectory(handle) + return + getter = getattr(plugin, "titles_for_country", None) + if not callable(getter): + xbmcplugin.endOfDirectory(handle) + return + xbmcplugin.setPluginCategory(handle, f"{plugin_name}: {country} (Seite {page})") + _set_content(handle, "movies") + if page > 1: + _add_directory_item(handle, "Vorherige Seite", "country_titles_page", + {"plugin": plugin_name, "country": country, "page": str(page - 1)}, is_folder=True) + try: + titles = _run_with_progress("Land", f"{plugin_name}: {country} wird geladen...", + lambda: list(getter(country, page) or [])) + except Exception as exc: + _log(f"Land-Titel konnten nicht geladen werden ({plugin_name}/{country} p{page}): {exc}", xbmc.LOGWARNING) + xbmcplugin.endOfDirectory(handle) + return + titles = [str(t).strip() for t in titles if t and str(t).strip()] + for title in titles: + _add_directory_item(handle, title, "seasons", + {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, + is_folder=True) + if titles: + _add_directory_item(handle, "Naechste Seite", "country_titles_page", + {"plugin": plugin_name, "country": country, "page": str(page + 1)}, is_folder=True) + xbmcplugin.endOfDirectory(handle) + + +def _show_collections_menu(plugin_name: str) -> None: + """Zeigt Sammlungen/Filmreihen eines Plugins (Capability: collections).""" + handle = _get_handle() + plugin = _discover_plugins().get(plugin_name) + if plugin is None: + xbmcgui.Dialog().notification("Sammlungen", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcplugin.endOfDirectory(handle) + return + getter = getattr(plugin, "collections", None) + if not callable(getter): + xbmcplugin.endOfDirectory(handle) + return + xbmcplugin.setPluginCategory(handle, f"{plugin_name}: Sammlungen") + try: + cols = list(getter() or []) + except Exception as exc: + _log(f"Sammlungen konnten nicht geladen werden ({plugin_name}): {exc}", xbmc.LOGWARNING) + xbmcplugin.endOfDirectory(handle) + return + for col in cols: + _add_directory_item(handle, str(col), "collection_titles_page", + {"plugin": plugin_name, "collection": str(col), "page": "1"}, is_folder=True) + xbmcplugin.endOfDirectory(handle) + + +def _show_collection_titles_page(plugin_name: str, collection: str, page: int = 1) -> None: + """Zeigt Titel einer Sammlung/Filmreihe.""" + handle = _get_handle() + plugin = _discover_plugins().get(plugin_name) + if plugin is None: + xbmcplugin.endOfDirectory(handle) + return + getter = getattr(plugin, "titles_for_collection", None) + if not callable(getter): + xbmcplugin.endOfDirectory(handle) + return + xbmcplugin.setPluginCategory(handle, f"{plugin_name}: {collection}") + _set_content(handle, "movies") + if page > 1: + _add_directory_item(handle, "Vorherige Seite", "collection_titles_page", + {"plugin": plugin_name, "collection": collection, "page": str(page - 1)}, is_folder=True) + try: + titles = _run_with_progress("Sammlung", f"{plugin_name}: {collection} wird geladen...", + lambda: list(getter(collection, page) or [])) + except Exception as exc: + _log(f"Sammlungs-Titel konnten nicht geladen werden ({plugin_name}/{collection}): {exc}", xbmc.LOGWARNING) + xbmcplugin.endOfDirectory(handle) + return + titles = [str(t).strip() for t in titles if t and str(t).strip()] + direct_play = bool(plugin_name.casefold() == "einschalten" + and _get_setting_bool("einschalten_enable_playback", default=False)) + for title in titles: + _add_directory_item(handle, title, "play_movie" if direct_play else "seasons", + {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, + is_folder=not direct_play) + if titles: + _add_directory_item(handle, "Naechste Seite", "collection_titles_page", + {"plugin": plugin_name, "collection": collection, "page": str(page + 1)}, is_folder=True) + xbmcplugin.endOfDirectory(handle) + + +def _show_tags_menu(plugin_name: str) -> None: + """Zeigt Schlagworte/Tags eines Plugins (Capability: tags).""" + handle = _get_handle() + plugin = _discover_plugins().get(plugin_name) + if plugin is None: + xbmcgui.Dialog().notification("Schlagworte", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcplugin.endOfDirectory(handle) + return + getter = getattr(plugin, "tags", None) + if not callable(getter): + xbmcplugin.endOfDirectory(handle) + return + xbmcplugin.setPluginCategory(handle, f"{plugin_name}: Schlagworte") + try: + tag_list = list(getter() or []) + except Exception as exc: + _log(f"Tags konnten nicht geladen werden ({plugin_name}): {exc}", xbmc.LOGWARNING) + xbmcplugin.endOfDirectory(handle) + return + for tag in sorted(tag_list, key=lambda t: str(t).casefold()): + _add_directory_item(handle, str(tag), "tag_titles_page", + {"plugin": plugin_name, "tag": str(tag), "page": "1"}, is_folder=True) + xbmcplugin.endOfDirectory(handle) + + +def _show_tag_titles_page(plugin_name: str, tag: str, page: int = 1) -> None: + """Zeigt Titel zu einem Schlagwort/Tag.""" + handle = _get_handle() + plugin = _discover_plugins().get(plugin_name) + if plugin is None: + xbmcplugin.endOfDirectory(handle) + return + getter = getattr(plugin, "titles_for_tag", None) + if not callable(getter): + xbmcplugin.endOfDirectory(handle) + return + xbmcplugin.setPluginCategory(handle, f"{plugin_name}: {tag} (Seite {page})") + _set_content(handle, "tvshows") + if page > 1: + _add_directory_item(handle, "Vorherige Seite", "tag_titles_page", + {"plugin": plugin_name, "tag": tag, "page": str(page - 1)}, is_folder=True) + try: + titles = _run_with_progress("Schlagwort", f"{plugin_name}: {tag} wird geladen...", + lambda: list(getter(tag, page) or [])) + except Exception as exc: + _log(f"Tag-Titel konnten nicht geladen werden ({plugin_name}/{tag} p{page}): {exc}", xbmc.LOGWARNING) + xbmcplugin.endOfDirectory(handle) + return + titles = [str(t).strip() for t in titles if t and str(t).strip()] + for title in titles: + _add_directory_item(handle, title, "seasons", + {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, + is_folder=True) + if titles: + _add_directory_item(handle, "Naechste Seite", "tag_titles_page", + {"plugin": plugin_name, "tag": tag, "page": str(page + 1)}, is_folder=True) + xbmcplugin.endOfDirectory(handle) + + +def _play_random_title(plugin_name: str) -> None: + """Oeffnet einen zufaelligen Titel direkt (Capability: random).""" + plugin = _discover_plugins().get(plugin_name) + if plugin is None: + return + getter = getattr(plugin, "random_title", None) + if not callable(getter): + return + try: + title = getter() + except Exception as exc: + _log(f"Zufaelliger Titel konnten nicht geladen werden ({plugin_name}): {exc}", xbmc.LOGWARNING) + return + if title: + _show_seasons(plugin_name, str(title), "") + + +# --------------------------------------------------------------------------- +# --------------------------------------------------------------------------- +# Trakt-Aktionen +# --------------------------------------------------------------------------- + +def _trakt_authorize() -> None: + """Startet den OAuth Device Auth Flow.""" + client = _trakt_get_client() + if not client: + xbmcgui.Dialog().notification("Trakt", "Client ID/Secret fehlt – bitte in den Einstellungen eintragen.", + xbmcgui.NOTIFICATION_INFO, 4000) + return + code = client.device_code_request() + if not code: + xbmcgui.Dialog().notification("Trakt", "Fehler bei der Autorisierung.", xbmcgui.NOTIFICATION_INFO, 3000) + return + dialog = xbmcgui.DialogProgress() + dialog.create("Trakt Autorisierung", + f"Gehe zu {code.verification_url}\nund gib diesen Code ein:\n\n{code.user_code}") + token = None + start = time.time() + while time.time() - start < code.expires_in: + if dialog.iscanceled(): + break + time.sleep(code.interval) + from core.trakt import TraktClient + # Einzelversuch (kein internes Polling – wir steuern die Schleife selbst) + client_id = _get_setting_string("trakt_client_id").strip() + client_secret = _get_setting_string("trakt_client_secret").strip() + tmp_client = TraktClient(client_id, client_secret, log=lambda m: _log(m, xbmc.LOGDEBUG)) + status, payload = tmp_client._post("/oauth/device/token", { + "code": code.device_code, + "client_id": client_id, + "client_secret": client_secret, + }) + if status == 200 and isinstance(payload, dict): + from core.trakt import TraktToken + token = TraktToken( + access_token=payload.get("access_token", ""), + refresh_token=payload.get("refresh_token", ""), + expires_at=int(payload.get("created_at", 0)) + int(payload.get("expires_in", 0)), + created_at=int(payload.get("created_at", 0)), + ) + break + if status in (404, 410, 418): + break + progress = int((time.time() - start) / code.expires_in * 100) + dialog.update(min(progress, 99)) + dialog.close() + if token: + _trakt_save_token(token) + xbmcgui.Dialog().notification("Trakt", "Erfolgreich autorisiert!", xbmcgui.NOTIFICATION_INFO, 3000) + else: + xbmcgui.Dialog().notification("Trakt", "Autorisierung fehlgeschlagen oder abgebrochen.", + xbmcgui.NOTIFICATION_INFO, 3000) + + +def _show_trakt_watchlist(media_type: str = "") -> None: + handle = _get_handle() + token = _trakt_get_valid_token() + client = _trakt_get_client() + if not token or not client: + xbmcgui.Dialog().notification("Trakt", "Nicht autorisiert.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcplugin.endOfDirectory(handle) + return + + if not media_type: + _add_directory_item(handle, "Filme", "trakt_watchlist", {"type": "movies"}, is_folder=True) + _add_directory_item(handle, "Serien", "trakt_watchlist", {"type": "shows"}, is_folder=True) + xbmcplugin.endOfDirectory(handle) + return + + items = client.get_watchlist(token, media_type=media_type) + for item in items: + label = f"{item.title}" + if item.year: + label = f"{item.title} ({item.year})" + _add_directory_item(handle, label, "search", {"query": item.title}, is_folder=True) + if not items: + xbmcgui.Dialog().notification("Trakt", "Watchlist ist leer.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcplugin.endOfDirectory(handle) + + +def _show_trakt_history(page: int = 1) -> None: + handle = _get_handle() + token = _trakt_get_valid_token() + client = _trakt_get_client() + if not token or not client: + xbmcgui.Dialog().notification("Trakt", "Nicht autorisiert.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcplugin.endOfDirectory(handle) + return + + items = client.get_history(token, page=page, limit=LIST_PAGE_SIZE) + for item in items: + label = item.title + if item.media_type == "episode" and item.season and item.episode: + label = f"{item.title} - S{item.season:02d}E{item.episode:02d}" + elif item.year: + label = f"{item.title} ({item.year})" + _add_directory_item(handle, label, "search", {"query": item.title}, is_folder=True) + + if len(items) >= LIST_PAGE_SIZE: + _add_directory_item(handle, "Naechste Seite >>", "trakt_history", {"page": str(page + 1)}, is_folder=True) + if not items and page == 1: + xbmcgui.Dialog().notification("Trakt", "Keine History vorhanden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcplugin.endOfDirectory(handle) + + +def _show_trakt_upcoming() -> None: + """Zeigt anstehende Episoden der Watchlist-Serien (Trakt-Kalender, naechste 14 Tage).""" + handle = _get_handle() + token = _trakt_get_valid_token() + client = _trakt_get_client() + if not token or not client: + xbmcgui.Dialog().notification("Trakt", "Nicht autorisiert.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcplugin.endOfDirectory(handle) + return + + xbmcplugin.setPluginCategory(handle, "Trakt: Upcoming") + _set_content(handle, "episodes") + + try: + from core.trakt import TraktCalendarItem as _TCI # noqa: F401 + items = client.get_calendar(token, days=14) + except Exception as exc: + _log(f"Trakt Calendar fehlgeschlagen: {exc}", xbmc.LOGWARNING) + xbmcgui.Dialog().notification("Trakt", "Kalender konnte nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcplugin.endOfDirectory(handle) + return + + if not items: + xbmcgui.Dialog().notification("Trakt", "Keine anstehenden Folgen in den naechsten 14 Tagen.", xbmcgui.NOTIFICATION_INFO, 4000) + xbmcplugin.endOfDirectory(handle) + return + + for item in items: + # Datum aufbereiten: ISO -> lesbares Datum + airdate = "" + if item.first_aired: + try: + from datetime import datetime, timezone + dt = datetime.fromisoformat(item.first_aired.replace("Z", "+00:00")) + airdate = dt.astimezone(tz=None).strftime("%d.%m.%Y") + except Exception: + airdate = item.first_aired[:10] + + label = f"{item.show_title} \u2013 S{item.season:02d}E{item.episode:02d}" + if airdate: + label = f"{label} ({airdate})" + + info_labels: dict[str, object] = { + "title": label, + "tvshowtitle": item.show_title, + "season": item.season, + "episode": item.episode, + "mediatype": "episode", + } + if item.show_year: + info_labels["year"] = item.show_year + + _, art, _ = _tmdb_labels_and_art(item.show_title) + + match = _trakt_find_in_plugins(item.show_title) + if match: + plugin_name, matched_title = match + action = "episodes" + params: dict[str, str] = { + "plugin": plugin_name, + "title": matched_title, + "season": f"Staffel {item.season}", + } + else: + action = "search" + params = {"query": item.show_title} + + _add_directory_item(handle, label, action, params, is_folder=True, info_labels=info_labels, art=art) + + xbmcplugin.endOfDirectory(handle) + + +def _show_trakt_continue_watching() -> None: + """Zeigt die naechste ungesehene Folge je Serie aus der Trakt-History.""" + handle = _get_handle() + token = _trakt_get_valid_token() + client = _trakt_get_client() + if not token or not client: + xbmcgui.Dialog().notification("Trakt", "Nicht autorisiert.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcplugin.endOfDirectory(handle) + return + + xbmcplugin.setPluginCategory(handle, "Weiterschauen") + _set_content(handle, "episodes") + + try: + history = client.get_history(token, media_type="episodes", limit=100) + except Exception as exc: + _log(f"Trakt History fehlgeschlagen: {exc}", xbmc.LOGWARNING) + xbmcgui.Dialog().notification("Trakt", "History konnte nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcplugin.endOfDirectory(handle) + return + + # Pro Serie nur den zuletzt gesehenen Eintrag behalten (History ist absteigend sortiert) + seen: dict[str, object] = {} + for item in history: + if item.title and item.title not in seen: + seen[item.title] = item + + if not seen: + xbmcgui.Dialog().notification("Trakt", "Keine History vorhanden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcplugin.endOfDirectory(handle) + return + + for last in seen.values(): + next_season = last.season + next_ep = last.episode + 1 + + match = _trakt_find_in_plugins(last.title) + + # Wenn kein Plugin-Match: Suchaktion anbieten (kein Episode-Overflow-Problem) + if not match: + label = f"{last.title} \u2013 S{next_season:02d}E{next_ep:02d}" + sub = f"(zuletzt: S{last.season:02d}E{last.episode:02d})" + display_label = f"{label} {sub}" + info_labels: dict[str, object] = { + "title": display_label, + "tvshowtitle": last.title, + "mediatype": "episode", + } + if last.year: + info_labels["year"] = last.year + _, art, _ = _tmdb_labels_and_art(last.title) + _add_directory_item(handle, display_label, "search", {"query": last.title}, is_folder=True, info_labels=info_labels, art=art) + continue + + plugin_name, matched_title = match + # Prüfe ob die nächste Episode im Plugin tatsächlich existiert + plugin = _discover_plugins().get(plugin_name) + episodes_getter = getattr(plugin, "episodes_for_season", None) if plugin else None + if callable(episodes_getter): + try: + ep_list = episodes_getter(matched_title, next_season) or [] + if next_ep > len(ep_list): + # Letzte Folge der Staffel war die letzte – nächste Staffel, Folge 1 + next_season += 1 + next_ep = 1 + except Exception: + pass + + label = f"{last.title} \u2013 S{next_season:02d}E{next_ep:02d}" + sub = f"(zuletzt: S{last.season:02d}E{last.episode:02d})" + display_label = f"{label} {sub}" + + info_labels = { + "title": display_label, + "tvshowtitle": last.title, + "season": next_season, + "episode": next_ep, + "mediatype": "episode", + } + if last.year: + info_labels["year"] = last.year + + _, art, _ = _tmdb_labels_and_art(last.title) + + params: dict[str, str] = { + "plugin": plugin_name, + "title": matched_title, + "season": f"Staffel {next_season}", + } + _add_directory_item(handle, display_label, "episodes", params, is_folder=True, info_labels=info_labels, art=art) + + xbmcplugin.endOfDirectory(handle) + + +# --------------------------------------------------------------------------- +# Route-Handler – registriert über @_router.route("action") +# Jeder Handler nimmt params: dict[str, str] und delegiert an den +# zuständigen _show_*- oder _play_*-Handler. +# --------------------------------------------------------------------------- + +@_router.route("search") +def _route_search(params: dict[str, str]) -> None: + _show_search() + + +@_router.route("plugin_menu") +def _route_plugin_menu(params: dict[str, str]) -> None: + _show_plugin_menu(params.get("plugin", "")) + + +@_router.route("plugin_search") +def _route_plugin_search(params: dict[str, str]) -> None: + _show_plugin_search(params.get("plugin", "")) + + +@_router.route("genre_sources") +def _route_genre_sources(params: dict[str, str]) -> None: + _show_genre_sources() + + +@_router.route("genres") +def _route_genres(params: dict[str, str]) -> None: + _show_genres(params.get("plugin", "")) + + +@_router.route("categories") +def _route_categories(params: dict[str, str]) -> None: + _show_categories(params.get("plugin", "")) + + +@_router.route("latest_titles") +def _route_latest_titles(params: dict[str, str]) -> None: + _show_latest_titles(params.get("plugin", ""), _parse_positive_int(params.get("page", "1"), default=1)) + + +@_router.route("new_titles") +def _route_new_titles(params: dict[str, str]) -> None: + _show_new_titles(params.get("plugin", ""), _parse_positive_int(params.get("page", "1"), default=1)) + + +@_router.route("latest_episodes") +def _route_latest_episodes(params: dict[str, str]) -> None: + _show_latest_episodes(params.get("plugin", ""), _parse_positive_int(params.get("page", "1"), default=1)) + + +@_router.route("genre_series") +def _route_genre_series(params: dict[str, str]) -> None: + _show_genre_series(params.get("plugin", ""), params.get("genre", "")) + + +@_router.route("genre_titles_page") +def _route_genre_titles_page(params: dict[str, str]) -> None: + _show_genre_titles_page( + params.get("plugin", ""), params.get("genre", ""), + _parse_positive_int(params.get("page", "1"), default=1), + ) + + +@_router.route("category_titles_page") +def _route_category_titles_page(params: dict[str, str]) -> None: + _show_category_titles_page( + params.get("plugin", ""), params.get("category", ""), + _parse_positive_int(params.get("page", "1"), default=1), + ) + + +@_router.route("alpha_index") +def _route_alpha_index(params: dict[str, str]) -> None: + _show_alpha_index(params.get("plugin", "")) + + +@_router.route("alpha_titles_page") +def _route_alpha_titles_page(params: dict[str, str]) -> None: + _show_alpha_titles_page( + params.get("plugin", ""), params.get("letter", ""), + _parse_positive_int(params.get("page", "1"), default=1), + ) + + +@_router.route("series_catalog") +def _route_series_catalog(params: dict[str, str]) -> None: + _show_series_catalog(params.get("plugin", ""), _parse_positive_int(params.get("page", "1"), default=1)) + + +@_router.route("genre_series_group") +def _route_genre_series_group(params: dict[str, str]) -> None: + _show_genre_series_group( + params.get("plugin", ""), params.get("genre", ""), params.get("group", ""), + _parse_positive_int(params.get("page", "1"), default=1), + ) + + +@_router.route("popular") +def _route_popular(params: dict[str, str]) -> None: + _show_popular(params.get("plugin") or None, _parse_positive_int(params.get("page", "1"), default=1)) + + +@_router.route("settings") +def _route_settings(params: dict[str, str]) -> None: + _open_settings() + + +@_router.route("check_updates") +def _route_check_updates(params: dict[str, str]) -> None: + _run_update_check() + + +@_router.route("apply_update_channel") +def _route_apply_update_channel(params: dict[str, str]) -> None: + _apply_update_channel() + + +@_router.route("select_update_version") +def _route_select_update_version(params: dict[str, str]) -> None: + _show_version_selector() + + +@_router.route("install_resolveurl") +def _route_install_resolveurl(params: dict[str, str]) -> None: + _ensure_resolveurl_installed(force=True, silent=False) + + +@_router.route("seasons") +def _route_seasons(params: dict[str, str]) -> None: + _show_seasons(params.get("plugin", ""), params.get("title", ""), params.get("series_url", "")) + + +@_router.route("episodes") +def _route_episodes(params: dict[str, str]) -> None: + _show_episodes( + params.get("plugin", ""), params.get("title", ""), + params.get("season", ""), params.get("series_url", ""), + ) + + +@_router.route("play_episode") +def _route_play_episode(params: dict[str, str]) -> None: + _play_episode( + params.get("plugin", ""), params.get("title", ""), + params.get("season", ""), params.get("episode", ""), + forced_hoster=params.get("hoster", ""), + episode_url=params.get("url", ""), + series_url=params.get("series_url", ""), + resolve_handle=_get_handle(), + ) + + +@_router.route("play_movie") +def _route_play_movie(params: dict[str, str]) -> None: + plugin_name = params.get("plugin", "") + title = params.get("title", "") + series_url = params.get("series_url", "") + if series_url: + plugin = _discover_plugins().get(plugin_name) + remember_fn = getattr(plugin, "remember_series_url", None) if plugin is not None else None + if callable(remember_fn): + try: + remember_fn(title, series_url) + except Exception: + pass + # Einschalten: Filme haben kein Staffel-/Episodenkonzept → Stream → Titel. + if (plugin_name or "").casefold() == "einschalten": + _play_episode(plugin_name, title, "Stream", title, resolve_handle=_get_handle()) + else: + _play_episode(plugin_name, title, "Film", "Stream", resolve_handle=_get_handle()) + + +@_router.route("play_episode_url") +def _route_play_episode_url(params: dict[str, str]) -> None: + _play_episode_url( + params.get("plugin", ""), + title=params.get("title", ""), + season_number=_parse_positive_int(params.get("season", "0"), default=0), + episode_number=_parse_positive_int(params.get("episode", "0"), default=0), + episode_url=params.get("url", ""), + resolve_handle=_get_handle(), + ) + + +@_router.route("play") +def _route_play(params: dict[str, str]) -> None: + link = params.get("url", "") + if link: + _play_final_link(link, resolve_handle=_get_handle()) + + +@_router.route("year_menu") +def _route_year_menu(params: dict[str, str]) -> None: + _show_year_menu(params.get("plugin", "")) + + +@_router.route("year_titles_page") +def _route_year_titles_page(params: dict[str, str]) -> None: + _show_year_titles_page( + params.get("plugin", ""), params.get("year", ""), + _parse_positive_int(params.get("page", "1"), default=1), + ) + + +@_router.route("country_menu") +def _route_country_menu(params: dict[str, str]) -> None: + _show_country_menu(params.get("plugin", "")) + + +@_router.route("country_titles_page") +def _route_country_titles_page(params: dict[str, str]) -> None: + _show_country_titles_page( + params.get("plugin", ""), params.get("country", ""), + _parse_positive_int(params.get("page", "1"), default=1), + ) + + +@_router.route("collections_menu") +def _route_collections_menu(params: dict[str, str]) -> None: + _show_collections_menu(params.get("plugin", "")) + + +@_router.route("collection_titles_page") +def _route_collection_titles_page(params: dict[str, str]) -> None: + _show_collection_titles_page( + params.get("plugin", ""), params.get("collection", ""), + _parse_positive_int(params.get("page", "1"), default=1), + ) + + +@_router.route("tags_menu") +def _route_tags_menu(params: dict[str, str]) -> None: + _show_tags_menu(params.get("plugin", "")) + + +@_router.route("tag_titles_page") +def _route_tag_titles_page(params: dict[str, str]) -> None: + _show_tag_titles_page( + params.get("plugin", ""), params.get("tag", ""), + _parse_positive_int(params.get("page", "1"), default=1), + ) + + +@_router.route("random_title") +def _route_random_title(params: dict[str, str]) -> None: + _play_random_title(params.get("plugin", "")) + + +@_router.route("trakt_auth") +def _route_trakt_auth(params: dict[str, str]) -> None: + _trakt_authorize() + xbmcplugin.endOfDirectory(_get_handle(), succeeded=False) + + +@_router.route("trakt_watchlist") +def _route_trakt_watchlist(params: dict[str, str]) -> None: + _show_trakt_watchlist(params.get("type", "")) + + +@_router.route("trakt_history") +def _route_trakt_history(params: dict[str, str]) -> None: + _show_trakt_history(_parse_positive_int(params.get("page", "1"), default=1)) + + +@_router.route("trakt_upcoming") +def _route_trakt_upcoming(params: dict[str, str]) -> None: + _show_trakt_upcoming() + + +@_router.route("trakt_continue") +def _route_trakt_continue(params: dict[str, str]) -> None: + _show_trakt_continue_watching() + + +@_router.route("trakt_watchlist_add") +def _route_trakt_watchlist_add(params: dict[str, str]) -> None: + client = _trakt_get_client() + token = _trakt_get_valid_token() + if client and token: + try: + tmdb_id = int(params.get("tmdb_id", "0") or "0") + except ValueError: + tmdb_id = 0 + ok = client.add_to_watchlist( + token, + media_type=params.get("type", "movie"), + tmdb_id=tmdb_id, + imdb_id=params.get("imdb_id", ""), + ) + msg = "Zur Watchlist hinzugefuegt" if ok else "Fehler beim Hinzufuegen" + else: + msg = "Trakt nicht autorisiert" + xbmcgui.Dialog().notification("Trakt", msg, xbmcgui.NOTIFICATION_INFO, 3000) + + +@_router.route("trakt_watchlist_remove") +def _route_trakt_watchlist_remove(params: dict[str, str]) -> None: + client = _trakt_get_client() + token = _trakt_get_valid_token() + if client and token: + try: + tmdb_id = int(params.get("tmdb_id", "0") or "0") + except ValueError: + tmdb_id = 0 + ok = client.remove_from_watchlist( + token, + media_type=params.get("type", "movie"), + tmdb_id=tmdb_id, + imdb_id=params.get("imdb_id", ""), + ) + msg = "Von Watchlist entfernt" if ok else "Fehler beim Entfernen" + else: + msg = "Trakt nicht autorisiert" + xbmcgui.Dialog().notification("Trakt", msg, xbmcgui.NOTIFICATION_INFO, 3000) + + +@_router.fallback() +def _route_fallback(params: dict[str, str]) -> None: + _show_root_menu() + + def run() -> None: params = _parse_params() action = params.get("action") _log(f"Action: {action}", xbmc.LOGDEBUG) _maybe_run_auto_update_check(action) _maybe_auto_install_resolveurl(action) - if action == "search": - _show_search() - elif action == "plugin_menu": - _show_plugin_menu(params.get("plugin", "")) - elif action == "plugin_search": - _show_plugin_search(params.get("plugin", "")) - elif action == "genre_sources": - _show_genre_sources() - elif action == "genres": - _show_genres(params.get("plugin", "")) - elif action == "categories": - _show_categories(params.get("plugin", "")) - elif action == "latest_titles": - _show_latest_titles( - params.get("plugin", ""), - _parse_positive_int(params.get("page", "1"), default=1), - ) - elif action == "new_titles": - _show_new_titles( - params.get("plugin", ""), - _parse_positive_int(params.get("page", "1"), default=1), - ) - elif action == "latest_episodes": - _show_latest_episodes( - params.get("plugin", ""), - _parse_positive_int(params.get("page", "1"), default=1), - ) - elif action == "genre_series": - _show_genre_series( - params.get("plugin", ""), - params.get("genre", ""), - ) - elif action == "genre_titles_page": - _show_genre_titles_page( - params.get("plugin", ""), - params.get("genre", ""), - _parse_positive_int(params.get("page", "1"), default=1), - ) - elif action == "category_titles_page": - _show_category_titles_page( - params.get("plugin", ""), - params.get("category", ""), - _parse_positive_int(params.get("page", "1"), default=1), - ) - elif action == "alpha_index": - _show_alpha_index(params.get("plugin", "")) - elif action == "alpha_titles_page": - _show_alpha_titles_page( - params.get("plugin", ""), - params.get("letter", ""), - _parse_positive_int(params.get("page", "1"), default=1), - ) - elif action == "series_catalog": - _show_series_catalog( - params.get("plugin", ""), - _parse_positive_int(params.get("page", "1"), default=1), - ) - elif action == "genre_series_group": - _show_genre_series_group( - params.get("plugin", ""), - params.get("genre", ""), - params.get("group", ""), - _parse_positive_int(params.get("page", "1"), default=1), - ) - elif action == "popular": - _show_popular( - params.get("plugin") or None, - _parse_positive_int(params.get("page", "1"), default=1), - ) - elif action == "settings": - _open_settings() - elif action == "check_updates": - _run_update_check() - elif action == "apply_update_channel": - _apply_update_channel() - elif action == "select_update_version": - _show_version_selector() - elif action == "install_resolveurl": - _ensure_resolveurl_installed(force=True, silent=False) - elif action == "seasons": - _show_seasons(params.get("plugin", ""), params.get("title", ""), params.get("series_url", "")) - elif action == "episodes": - _show_episodes( - params.get("plugin", ""), - params.get("title", ""), - params.get("season", ""), - params.get("series_url", ""), - ) - elif action == "play_episode": - _play_episode( - params.get("plugin", ""), - params.get("title", ""), - params.get("season", ""), - params.get("episode", ""), - forced_hoster=params.get("hoster", ""), - episode_url=params.get("url", ""), - series_url=params.get("series_url", ""), - resolve_handle=_get_handle(), - ) - elif action == "play_movie": - plugin_name = params.get("plugin", "") - title = params.get("title", "") - series_url = params.get("series_url", "") - if series_url: - plugin = _discover_plugins().get(plugin_name) - remember_series_url = getattr(plugin, "remember_series_url", None) if plugin is not None else None - if callable(remember_series_url): - try: - remember_series_url(title, series_url) - except Exception: - pass - # Einschalten liefert Filme (keine Staffeln/Episoden). Für Playback nutzen wir: - # -> Stream -> . - if (plugin_name or "").casefold() == "einschalten": - _play_episode( - plugin_name, - title, - "Stream", - title, - resolve_handle=_get_handle(), - ) - else: - _play_episode( - plugin_name, - title, - "Film", - "Stream", - resolve_handle=_get_handle(), - ) - elif action == "play_episode_url": - _play_episode_url( - params.get("plugin", ""), - title=params.get("title", ""), - season_number=_parse_positive_int(params.get("season", "0"), default=0), - episode_number=_parse_positive_int(params.get("episode", "0"), default=0), - episode_url=params.get("url", ""), - resolve_handle=_get_handle(), - ) - elif action == "play": - link = params.get("url", "") - if link: - _play_final_link(link, resolve_handle=_get_handle()) - else: - _show_root_menu() + _router.dispatch(action=action, params=params) if __name__ == "__main__": diff --git a/addon/plugin_helpers.py b/addon/plugin_helpers.py index 3ca5e7b..b88813a 100644 --- a/addon/plugin_helpers.py +++ b/addon/plugin_helpers.py @@ -270,6 +270,27 @@ def dump_response_html( _append_text_file(path, content) +def resolve_via_resolveurl(link: str, *, fallback_to_link: bool = True) -> Optional[str]: + """Versucht einen Hoster-Link mit resolveurl_backend aufzuloesen. + + Gibt den aufgeloesten Link zurueck, oder – wenn resolveurl nicht verfuegbar + ist oder nichts liefert – den Original-Link (wenn fallback_to_link=True) + bzw. None (wenn fallback_to_link=False). + """ + link = (link or "").strip() + if not link: + return None + try: + from resolveurl_backend import resolve as _resolve_fn # type: ignore[import-not-found] + except Exception: + _resolve_fn = None + if callable(_resolve_fn): + resolved = _resolve_fn(link) + if resolved: + return resolved + return link if fallback_to_link else None + + def normalize_resolved_stream_url(final_url: str, *, source_url: str = "") -> str: """Normalisiert hoster-spezifische Header im finalen Stream-Link. diff --git a/addon/plugin_interface.py b/addon/plugin_interface.py index 83948d7..2de8d37 100644 --- a/addon/plugin_interface.py +++ b/addon/plugin_interface.py @@ -53,8 +53,14 @@ class BasisPlugin(ABC): def capabilities(self) -> Set[str]: """Optional: Liefert eine Menge an Features/Capabilities dieses Plugins. - Beispiele: - - `popular_series`: Plugin kann eine Liste beliebter Serien liefern. + Bekannte Werte: + - 'popular_series' – Plugin hat beliebte Serien/Filme + - 'latest_titles' – Plugin hat neu hinzugefuegte Titel + - 'year_filter' – Plugin unterstuetzt Jahr-Filter + - 'country_filter' – Plugin unterstuetzt Land-Filter + - 'collections' – Plugin hat Sammlungen/Filmreihen + - 'tags' – Plugin hat Tag/Schlagwort-Suche + - 'random' – Plugin kann einen zufaelligen Titel liefern """ return set() @@ -63,3 +69,85 @@ class BasisPlugin(ABC): """Optional: Liefert eine Liste beliebter Serien (als Titel-Strings).""" return [] + + # ------------------------------------------------------------------ + # Neue Felder fuer "Neue Titel"-Menü + # ------------------------------------------------------------------ + + def latest_titles(self, page: int = 1) -> List[str]: + """Optional: Liefert neu hinzugefuegte Titel (Filme oder Serien). + + Capability: 'latest_titles' + """ + return [] + + # ------------------------------------------------------------------ + # Jahr-Filter + # ------------------------------------------------------------------ + + def years_available(self) -> List[str]: + """Optional: Liefert verfuegbare Erscheinungsjahre (z.B. ['2026', '2025', ...]). + + Capability: 'year_filter' + """ + return [] + + def titles_for_year(self, year: str, page: int = 1) -> List[str]: + """Optional: Liefert Titel fuer ein bestimmtes Erscheinungsjahr.""" + return [] + + # ------------------------------------------------------------------ + # Land-Filter + # ------------------------------------------------------------------ + + def countries_available(self) -> List[str]: + """Optional: Liefert verfuegbare Produktionslaender. + + Capability: 'country_filter' + """ + return [] + + def titles_for_country(self, country: str, page: int = 1) -> List[str]: + """Optional: Liefert Titel fuer ein bestimmtes Produktionsland.""" + return [] + + # ------------------------------------------------------------------ + # Sammlungen / Collections + # ------------------------------------------------------------------ + + def collections(self) -> List[str]: + """Optional: Liefert verfuegbare Sammlungen/Filmreihen. + + Capability: 'collections' + """ + return [] + + def titles_for_collection(self, collection: str, page: int = 1) -> List[str]: + """Optional: Liefert Titel einer Sammlung/Filmreihe.""" + return [] + + # ------------------------------------------------------------------ + # Tags / Schlagworte + # ------------------------------------------------------------------ + + def tags(self) -> List[str]: + """Optional: Liefert verfuegbare Schlagworte/Tags. + + Capability: 'tags' + """ + return [] + + def titles_for_tag(self, tag: str, page: int = 1) -> List[str]: + """Optional: Liefert Titel zu einem Schlagwort/Tag.""" + return [] + + # ------------------------------------------------------------------ + # Zufaelliger Titel + # ------------------------------------------------------------------ + + def random_title(self) -> Optional[str]: + """Optional: Liefert einen zufaelligen Titel. + + Capability: 'random' + """ + return None diff --git a/addon/plugins/__init__.py b/addon/plugins/__init__.py index 9929cfa..6bb3115 100644 --- a/addon/plugins/__init__.py +++ b/addon/plugins/__init__.py @@ -1 +1,2 @@ """Kodi addon plugins.""" +from __future__ import annotations diff --git a/addon/plugins/_template_plugin.py b/addon/plugins/_template_plugin.py index 043cba1..d923831 100644 --- a/addon/plugins/_template_plugin.py +++ b/addon/plugins/_template_plugin.py @@ -7,9 +7,19 @@ Vorgehen fuer ein neues Plugin: 1. Datei kopieren/umbenennen (ohne fuehrenden Unterstrich), z.B. `my_site_plugin.py` 2. `name`, `ADDON_ID`, `BASE_URL` und Header anpassen 3. `search_titles`, `seasons_for`, `episodes_for` gemaess Zielseite implementieren -4. Optional weitere Methoden wie `stream_link_for`, `resolve_stream_link`, - `popular_series`, `genres`, `titles_for_genre`, `available_hosters_for` etc. - implementieren – siehe `docs/PLUGIN_DEVELOPMENT.md` und bestehende Plugins. +4. Optional weitere Methoden implementieren – capabilities deklarieren und Methoden ueberschreiben: + - `popular_series()` + capability 'popular_series' + - `latest_titles(page)` + capability 'latest_titles' + - `genres()` + `titles_for_genre(genre)` + `titles_for_genre_page(genre, page)` + - `alpha_index()` + `titles_for_alpha_page(letter, page)` + - `years_available()` + `titles_for_year(year, page)` + capability 'year_filter' + - `countries_available()` + `titles_for_country(country, page)` + capability 'country_filter' + - `collections()` + `titles_for_collection(collection, page)` + capability 'collections' + - `tags()` + `titles_for_tag(tag, page)` + capability 'tags' + - `random_title()` + capability 'random' + - `stream_link_for(...)`, `resolve_stream_link(link)`, `available_hosters_for(...)` + - `metadata_for(title)` fuer eigene Metadaten + Siehe `docs/PLUGIN_DEVELOPMENT.md` und bestehende Plugins. """ from __future__ import annotations @@ -157,10 +167,17 @@ class TemplatePlugin(BasisPlugin): def capabilities(self) -> set[str]: """Optional: Deklariert die Faehigkeiten dieses Plugins. - Beispiele: - - `popular_series`: Plugin kann beliebte Titel liefern - - `genres`: Plugin unterstuetzt Genre-Browser - - `latest_episodes`: Plugin liefert eine Liste neuer Episoden + Bekannte Werte (aus plugin_interface.py): + - 'popular_series' – Plugin hat beliebte Serien/Filme + - 'latest_titles' – Plugin hat neu hinzugefuegte Titel + - 'year_filter' – Plugin unterstuetzt Jahr-Filter + - 'country_filter' – Plugin unterstuetzt Land-Filter + - 'collections' – Plugin hat Sammlungen/Filmreihen + - 'tags' – Plugin hat Tag/Schlagwort-Suche + - 'random' – Plugin kann einen zufaelligen Titel liefern + - 'genres' – Plugin hat Genre-Browser + - 'alpha' – Plugin hat A-Z-Index + - 'latest_episodes' – Plugin liefert neue Episoden """ return set() diff --git a/addon/plugins/aniworld_plugin.py b/addon/plugins/aniworld_plugin.py index d887f9e..8c15d18 100644 --- a/addon/plugins/aniworld_plugin.py +++ b/addon/plugins/aniworld_plugin.py @@ -1024,7 +1024,7 @@ class AniworldPlugin(BasisPlugin): _session_cache_set(self._season_episodes_cache_name(season_url), payload) def capabilities(self) -> set[str]: - return {"popular_series", "genres", "latest_episodes"} + return {"popular_series", "genres", "latest_episodes", "latest_titles"} def _find_series_by_title(self, title: str) -> Optional[SeriesResult]: title = (title or "").strip() @@ -1277,6 +1277,100 @@ class AniworldPlugin(BasisPlugin): self._save_title_url_cache() return [entry.title for entry in entries if entry.title] + def _genre_slug(self, genre: str) -> str: + """Wandelt einen Genre-Namen in einen URL-Slug um.""" + slug = (genre or "").strip().lower() + slug = re.sub(r"[^a-z0-9]+", "-", slug).strip("-") + return slug + + def _genre_page_url(self, genre: str, page: int) -> str: + slug = self._genre_slug(genre) + base = f"{_get_base_url()}/genre/{slug}" + return base if page <= 1 else f"{base}?page={page}" + + def _parse_genre_page_titles(self, soup: BeautifulSoupT) -> List[str]: + """Extrahiert Titel von einer paginierten Genre-Seite.""" + titles: List[str] = [] + seen: set[str] = set() + for anchor in soup.select("div.seriesListContainer a[href], ul.seriesList li a[href], a[href*='/anime/stream/']"): + href = (anchor.get("href") or "").strip() + if not href or "/staffel-" in href or "/episode-" in href: + continue + title = (anchor.get_text(" ", strip=True) or "").strip() + if not title: + continue + key = title.casefold() + if key in seen: + continue + seen.add(key) + url = _absolute_url(href) + self._remember_anime_result(title, url, persist=False) + titles.append(title) + return titles + + def _extract_genre_last_page(self, soup: BeautifulSoupT) -> int: + max_page = 1 + for anchor in soup.select("a.page-link[href], nav a[href]"): + href = (anchor.get("href") or "").strip() + for match in re.findall(r"[?&]page=(\d+)", href): + try: + max_page = max(max_page, int(match)) + except Exception: + continue + return max_page + + def titles_for_genre_page(self, genre: str, page: int = 1) -> List[str]: + """Liefert Titel einer Genre-Seite (paginiert).""" + genre = (genre or "").strip() + if not genre or not self._requests_available: + return [] + page = max(1, int(page or 1)) + try: + url = self._genre_page_url(genre, page) + soup = _get_soup_simple(url) + return self._parse_genre_page_titles(soup) + except Exception: + return [] + + def genre_page_count(self, genre: str) -> int: + """Liefert die Seitenanzahl fuer eine Genre-Seite.""" + genre = (genre or "").strip() + if not genre or not self._requests_available: + return 1 + try: + url = self._genre_page_url(genre, 1) + soup = _get_soup_simple(url) + return max(1, self._extract_genre_last_page(soup)) + except Exception: + return 1 + + def latest_titles(self, page: int = 1) -> List[str]: + """Liefert neu hinzugefuegte Anime vom Animekalender.""" + if not self._requests_available: + return [] + page = max(1, int(page or 1)) + try: + url = f"{_get_base_url()}/animekalender" + if page > 1: + url = f"{url}?page={page}" + soup = _get_soup_simple(url) + titles: List[str] = [] + seen: set[str] = set() + for anchor in soup.select("a[href*='/anime/stream/']"): + title = (anchor.get_text(" ", strip=True) or "").strip() + href = (anchor.get("href") or "").strip() + if not title or "/staffel-" in href or "/episode-" in href: + continue + key = title.casefold() + if key in seen: + continue + seen.add(key) + self._remember_anime_result(title, _absolute_url(href), persist=False) + titles.append(title) + return titles + except Exception: + return [] + def _season_label(self, number: int) -> str: return f"Staffel {number}" diff --git a/addon/plugins/dokustreams_plugin.py b/addon/plugins/dokustreams_plugin.py index 778dd9b..fa27ed9 100644 --- a/addon/plugins/dokustreams_plugin.py +++ b/addon/plugins/dokustreams_plugin.py @@ -36,6 +36,8 @@ ADDON_ID = "plugin.video.viewit" SETTING_BASE_URL = "doku_streams_base_url" DEFAULT_BASE_URL = "https://doku-streams.com" MOST_VIEWED_PATH = "/meistgesehene/" +RANDOM_PATH = "/zufaellige-doku/" +TAGS_BASE_PATH = "/tag/" DEFAULT_TIMEOUT = 20 GLOBAL_SETTING_LOG_URLS = "debug_log_urls" GLOBAL_SETTING_DUMP_HTML = "debug_dump_html" @@ -78,12 +80,12 @@ def _extract_last_page(soup: BeautifulSoupT) -> int: for anchor in soup.select("nav.navigation a[href], nav.pagination a[href], a.page-numbers[href]"): text = (anchor.get_text(" ", strip=True) or "").strip() for candidate in (text, (anchor.get("href") or "").strip()): - for value in re.findall(r"/page/(\\d+)/", candidate): + for value in re.findall(r"/page/(\d+)/", candidate): try: max_page = max(max_page, int(value)) except Exception: continue - for value in re.findall(r"(\\d+)", candidate): + for value in re.findall(r"(\d+)", candidate): try: max_page = max(max_page, int(value)) except Exception: @@ -287,7 +289,7 @@ class DokuStreamsPlugin(BasisPlugin): return _parse_listing_hits(soup, query=query) def capabilities(self) -> set[str]: - return {"genres", "popular_series"} + return {"genres", "popular_series", "tags", "random"} def _categories_url(self) -> str: return _absolute_url("/kategorien/") @@ -465,6 +467,90 @@ class DokuStreamsPlugin(BasisPlugin): return [] return [title] + def tags(self) -> List[str]: + """Liefert Schlagworte/Tags von der Startseite.""" + if not self._requests_available: + return [] + try: + soup = _get_soup(_absolute_url("/"), session=get_requests_session("dokustreams", headers=HEADERS)) + except Exception: + return [] + tag_list: list[str] = [] + for anchor in soup.select("a[href*='/tag/']"): + name = (anchor.get_text(" ", strip=True) or "").strip() + href = (anchor.get("href") or "").strip() + if name and TAGS_BASE_PATH in href and name not in tag_list: + tag_list.append(name) + return sorted(tag_list, key=lambda t: t.casefold()) + + def titles_for_tag(self, tag: str, page: int = 1) -> List[str]: + """Liefert Titel zu einem Schlagwort.""" + tag = (tag or "").strip() + if not tag or not self._requests_available: + return [] + page = max(1, int(page or 1)) + slug = tag.lower().replace(" ", "-") + base = _absolute_url(f"{TAGS_BASE_PATH}{slug}/") + url = base if page == 1 else f"{base}page/{page}/" + try: + soup = _get_soup(url, session=get_requests_session("dokustreams", headers=HEADERS)) + except Exception: + return [] + hits = _parse_listing_hits(soup) + self._title_to_url.update({hit.title: hit.url for hit in hits if hit.title and hit.url}) + for hit in hits: + if hit.title: + self._title_meta[hit.title] = (hit.plot, hit.poster) + return [hit.title for hit in hits if hit.title] + + def random_title(self) -> Optional[str]: + """Liefert einen zufaelligen Doku-Titel via Redirect.""" + if not self._requests_available: + return None + try: + session = get_requests_session("dokustreams", headers=HEADERS) + resp = session.get(_absolute_url(RANDOM_PATH), headers=HEADERS, + timeout=DEFAULT_TIMEOUT, allow_redirects=True) + resp.raise_for_status() + final_url = (resp.url or "").strip() + if not final_url or final_url.rstrip("/").endswith(RANDOM_PATH.rstrip("/")): + return None + soup = _get_soup(final_url, session=session) + hits = _parse_listing_hits(soup) + if not hits: + # Einzelseite: Titel aus H1 oder og:title lesen + h1 = soup.select_one("h1.entry-title, h1") + title = (h1.get_text(" ", strip=True) if h1 else "").strip() + if title: + self._title_to_url[title] = final_url + return title + return None + hit = hits[0] + if hit.title: + self._title_to_url[hit.title] = hit.url + return hit.title + except Exception: + return None + return None + + def resolve_stream_link(self, link: str) -> Optional[str]: + """Folgt Redirects und versucht ResolveURL fuer Hoster-Links.""" + if not link: + return None + from plugin_helpers import resolve_via_resolveurl + resolved = resolve_via_resolveurl(link, fallback_to_link=False) + if resolved: + return resolved + if self._requests_available: + try: + session = get_requests_session("dokustreams", headers=HEADERS) + resp = session.get(link, headers=HEADERS, timeout=DEFAULT_TIMEOUT, allow_redirects=True) + resp.raise_for_status() + return (resp.url or link).strip() or link + except Exception: + pass + return link + def stream_link_for(self, title: str, season: str, episode: str) -> Optional[str]: title = (title or "").strip() if not title: diff --git a/addon/plugins/einschalten_plugin.py b/addon/plugins/einschalten_plugin.py index 9e7c47d..932df4e 100644 --- a/addon/plugins/einschalten_plugin.py +++ b/addon/plugins/einschalten_plugin.py @@ -1015,16 +1015,40 @@ class EinschaltenPlugin(BasisPlugin): return stream_url or None def resolve_stream_link(self, link: str) -> Optional[str]: - try: - from resolveurl_backend import resolve as resolve_with_resolveurl - except Exception: - resolve_with_resolveurl = None - if callable(resolve_with_resolveurl): - return resolve_with_resolveurl(link) or link - return link + from plugin_helpers import resolve_via_resolveurl + return resolve_via_resolveurl(link, fallback_to_link=True) def capabilities(self) -> Set[str]: - return {"new_titles", "genres"} + return {"new_titles", "genres", "popular_series", "latest_titles"} + + def popular_series(self) -> List[str]: + """Liefert die am besten bewerteten Filme (nach voteAverage sortiert).""" + if not REQUESTS_AVAILABLE: + return [] + if not self._get_base_url(): + return [] + movies = self._load_movies() + with_rating = [m for m in movies if m.vote_average is not None] + without_rating = [m for m in movies if m.vote_average is None] + ranked = sorted(with_rating, key=lambda m: (m.vote_average or 0.0), reverse=True) + ordered = ranked + without_rating + titles: List[str] = [] + seen: set[str] = set() + for movie in ordered[:50]: + if movie.title in seen: + continue + seen.add(movie.title) + self._id_by_title[movie.title] = movie.id + titles.append(movie.title) + return titles + + def latest_titles(self, page: int = 1) -> List[str]: + """Liefert neu hinzugefügte Filme (Alias zu new_titles_page).""" + if not REQUESTS_AVAILABLE: + return [] + if not self._get_base_url(): + return [] + return self.new_titles_page(max(1, int(page or 1))) def new_titles(self) -> List[str]: if not REQUESTS_AVAILABLE: diff --git a/addon/plugins/filmpalast_plugin.py b/addon/plugins/filmpalast_plugin.py index 589ff68..e644591 100644 --- a/addon/plugins/filmpalast_plugin.py +++ b/addon/plugins/filmpalast_plugin.py @@ -525,7 +525,7 @@ class FilmpalastPlugin(BasisPlugin): return max_page def capabilities(self) -> set[str]: - return {"genres", "alpha", "series_catalog"} + return {"genres", "alpha", "series_catalog", "popular_series", "latest_titles"} def _parse_alpha_links(self, soup: BeautifulSoupT) -> Dict[str, str]: alpha: Dict[str, str] = {} @@ -726,7 +726,7 @@ class FilmpalastPlugin(BasisPlugin): merged_poster = (poster or old_poster or "").strip() self._title_meta[title] = (merged_plot, merged_poster) - def _extract_detail_metadata(self, soup: BeautifulSoupT) -> tuple[str, str]: + def _extract_detail_metadata(self, soup: BeautifulSoupT) -> tuple[str, str, str]: if not soup: return "", "" root = soup.select_one("div#content[role='main']") or soup @@ -773,7 +773,22 @@ class FilmpalastPlugin(BasisPlugin): if "/themes/" not in lower and "spacer.gif" not in lower and "/files/movies/" in lower: poster = candidate - return plot, poster + # IMDb-Rating: Schema.org aggregateRating + rating = "" + rating_node = detail.select_one("[itemprop='ratingValue']") + if rating_node is not None: + rating = (rating_node.get_text(" ", strip=True) or "").strip() + if not rating: + # Fallback: data-attribute oder Klassen-basierte Anzeige + for sel in ("span.imdb", "span.rating", "[class*='imdb']"): + node = detail.select_one(sel) + if node is not None: + candidate = (node.get_text(" ", strip=True) or "").strip() + if candidate: + rating = candidate + break + + return plot, poster, rating def remember_series_url(self, title: str, series_url: str) -> None: title = (title or "").strip() @@ -830,12 +845,17 @@ class FilmpalastPlugin(BasisPlugin): try: soup = _get_soup(detail_url, session=get_requests_session("filmpalast", headers=HEADERS)) - plot, poster = self._extract_detail_metadata(soup) + plot, poster, rating = self._extract_detail_metadata(soup) except Exception: - plot, poster = "", "" + plot, poster, rating = "", "", "" if plot: info["plot"] = plot + if rating: + try: + info["rating"] = str(float(rating.replace(",", "."))) + except (ValueError, TypeError): + pass if poster: art = {"thumb": poster, "poster": poster} self._store_title_meta(title, plot=info.get("plot", ""), poster=poster) @@ -1025,6 +1045,32 @@ class FilmpalastPlugin(BasisPlugin): def reset_preferred_hosters(self) -> None: self._preferred_hosters = list(self._default_preferred_hosters) + def popular_series(self) -> List[str]: + """Liefert beliebte Titel von /movies/top.""" + if not self._requests_available: + return [] + try: + url = _absolute_url("/movies/top") + soup = _get_soup(url, session=get_requests_session("filmpalast", headers=HEADERS)) + hits = self._parse_listing_hits(soup) + return self._apply_hits_to_title_index(hits) + except Exception: + return [] + + def latest_titles(self, page: int = 1) -> List[str]: + """Liefert neu hinzugefuegte Titel von /movies/new.""" + if not self._requests_available: + return [] + page = max(1, int(page or 1)) + try: + base = _absolute_url("/movies/new") + url = base if page == 1 else urljoin(base.rstrip("/") + "/", f"page/{page}") + soup = _get_soup(url, session=get_requests_session("filmpalast", headers=HEADERS)) + hits = self._parse_listing_hits(soup) + return self._apply_hits_to_title_index(hits) + except Exception: + return [] + def resolve_stream_link(self, link: str) -> Optional[str]: if not link: return None diff --git a/addon/plugins/serienstream_plugin.py b/addon/plugins/serienstream_plugin.py index 1f9fccb..9e29a4e 100644 --- a/addon/plugins/serienstream_plugin.py +++ b/addon/plugins/serienstream_plugin.py @@ -17,7 +17,7 @@ import os import re import time import unicodedata -from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple +from typing import TYPE_CHECKING, Any, Callable, Optional from urllib.parse import quote try: # pragma: no cover - optional dependency @@ -82,11 +82,11 @@ SESSION_CACHE_MAX_TITLE_URLS = 800 CATALOG_SEARCH_TTL_SECONDS = 600 CATALOG_SEARCH_CACHE_KEY = "catalog_index" GENRE_LIST_PAGE_SIZE = 20 -_CATALOG_INDEX_MEMORY: tuple[float, List["SeriesResult"]] = (0.0, []) -ProgressCallback = Optional[Callable[[str, Optional[int]], Any]] +_CATALOG_INDEX_MEMORY: tuple[float, list["SeriesResult"]] = (0.0, []) +ProgressCallback = Optional[Callable[[str, int | None], Any]] -def _emit_progress(callback: ProgressCallback, message: str, percent: Optional[int] = None) -> None: +def _emit_progress(callback: ProgressCallback, message: str, percent: int | None = None) -> None: if not callable(callback): return try: @@ -110,8 +110,8 @@ class EpisodeInfo: original_title: str url: str season_label: str = "" - languages: List[str] = field(default_factory=list) - hosters: List[str] = field(default_factory=list) + languages: list[str] = field(default_factory=list) + hosters: list[str] = field(default_factory=list) @dataclass @@ -127,12 +127,12 @@ class LatestEpisode: class SeasonInfo: number: int url: str - episodes: List[EpisodeInfo] + episodes: list[EpisodeInfo] -def _extract_series_metadata(soup: BeautifulSoupT) -> Tuple[Dict[str, str], Dict[str, str]]: - info: Dict[str, str] = {} - art: Dict[str, str] = {} +def _extract_series_metadata(soup: BeautifulSoupT) -> tuple[dict[str, str], dict[str, str]]: + info: dict[str, str] = {} + art: dict[str, str] = {} if not soup: return info, art @@ -423,7 +423,7 @@ def _looks_like_cloudflare_challenge(body: str) -> bool: return any(marker in lower for marker in markers) -def _get_soup(url: str, *, session: Optional[RequestsSession] = None) -> BeautifulSoupT: +def _get_soup(url: str, *, session: RequestsSession | None = None) -> BeautifulSoupT: _ensure_requests() _log_visit(url) response = None @@ -484,8 +484,8 @@ def _get_soup_simple(url: str) -> BeautifulSoupT: return BeautifulSoup(body, "html.parser") -def _extract_genre_names_from_html(body: str) -> List[str]: - names: List[str] = [] +def _extract_genre_names_from_html(body: str) -> list[str]: + names: list[str] = [] seen: set[str] = set() pattern = re.compile( r"]*class=[\"'][^\"']*background-1[^\"']*[\"'][^>]*>.*?]*>(.*?)", @@ -508,7 +508,7 @@ def _strip_tags(value: str) -> str: return re.sub(r"<[^>]+>", " ", value or "") -def _search_series_api(query: str) -> List[SeriesResult]: +def _search_series_api(query: str) -> list[SeriesResult]: query = (query or "").strip() if not query: return [] @@ -544,7 +544,7 @@ def _search_series_api(query: str) -> List[SeriesResult]: shows = payload.get("shows") if isinstance(payload, dict) else None if not isinstance(shows, list): continue - results: List[SeriesResult] = [] + results: list[SeriesResult] = [] for item in shows: if not isinstance(item, dict): continue @@ -570,7 +570,7 @@ def _search_series_api(query: str) -> List[SeriesResult]: return [] -def _search_series_server(query: str) -> List[SeriesResult]: +def _search_series_server(query: str) -> list[SeriesResult]: if not query: return [] base = _get_base_url() @@ -588,7 +588,7 @@ def _search_series_server(query: str) -> List[SeriesResult]: if root is None: continue seen_urls: set[str] = set() - results: List[SeriesResult] = [] + results: list[SeriesResult] = [] for card in root.select(".cover-card"): anchor = card.select_one("a[href*='/serie/']") if not anchor: @@ -613,8 +613,8 @@ def _search_series_server(query: str) -> List[SeriesResult]: return [] -def _extract_catalog_index_from_html(body: str, *, progress_callback: ProgressCallback = None) -> List[SeriesResult]: - items: List[SeriesResult] = [] +def _extract_catalog_index_from_html(body: str, *, progress_callback: ProgressCallback = None) -> list[SeriesResult]: + items: list[SeriesResult] = [] if not body: return items seen_urls: set[str] = set() @@ -649,8 +649,8 @@ def _extract_catalog_index_from_html(body: str, *, progress_callback: ProgressCa return items -def _catalog_index_from_soup(soup: BeautifulSoupT) -> List[SeriesResult]: - items: List[SeriesResult] = [] +def _catalog_index_from_soup(soup: BeautifulSoupT) -> list[SeriesResult]: + items: list[SeriesResult] = [] if not soup: return items seen_urls: set[str] = set() @@ -673,7 +673,7 @@ def _catalog_index_from_soup(soup: BeautifulSoupT) -> List[SeriesResult]: return items -def _load_catalog_index_from_cache() -> Optional[List[SeriesResult]]: +def _load_catalog_index_from_cache() -> Optional[list[SeriesResult]]: global _CATALOG_INDEX_MEMORY expires_at, cached = _CATALOG_INDEX_MEMORY if cached and expires_at > time.time(): @@ -681,7 +681,7 @@ def _load_catalog_index_from_cache() -> Optional[List[SeriesResult]]: raw = _session_cache_get(CATALOG_SEARCH_CACHE_KEY) if not isinstance(raw, list): return None - items: List[SeriesResult] = [] + items: list[SeriesResult] = [] for entry in raw: if not isinstance(entry, list) or len(entry) < 2: continue @@ -696,12 +696,12 @@ def _load_catalog_index_from_cache() -> Optional[List[SeriesResult]]: return items or None -def _store_catalog_index_in_cache(items: List[SeriesResult]) -> None: +def _store_catalog_index_in_cache(items: list[SeriesResult]) -> None: global _CATALOG_INDEX_MEMORY if not items: return _CATALOG_INDEX_MEMORY = (time.time() + CATALOG_SEARCH_TTL_SECONDS, list(items)) - payload: List[List[str]] = [] + payload: list[list[str]] = [] for entry in items: if not entry.title or not entry.url: continue @@ -709,7 +709,7 @@ def _store_catalog_index_in_cache(items: List[SeriesResult]) -> None: _session_cache_set(CATALOG_SEARCH_CACHE_KEY, payload, ttl_seconds=CATALOG_SEARCH_TTL_SECONDS) -def search_series(query: str, *, progress_callback: ProgressCallback = None) -> List[SeriesResult]: +def search_series(query: str, *, progress_callback: ProgressCallback = None) -> list[SeriesResult]: """Sucht Serien im (/serien)-Katalog nach Titel. Nutzt Cache + Ein-Pass-Filter.""" _ensure_requests() if not _normalize_search_text(query): @@ -724,7 +724,7 @@ def search_series(query: str, *, progress_callback: ProgressCallback = None) -> _emit_progress(progress_callback, "Lade Katalogseite", 42) catalog_url = f"{_get_base_url()}/serien?by=genre" - items: List[SeriesResult] = [] + items: list[SeriesResult] = [] try: # Bevorzugt den Soup-Helper, damit Tests HTML einfache injizieren koennen. soup = _get_soup_simple(catalog_url) @@ -749,9 +749,9 @@ def search_series(query: str, *, progress_callback: ProgressCallback = None) -> return [] -def parse_series_catalog(soup: BeautifulSoupT) -> Dict[str, List[SeriesResult]]: +def parse_series_catalog(soup: BeautifulSoupT) -> dict[str, list[SeriesResult]]: """Parst die Serien-Übersicht (/serien) und liefert Genre -> Serienliste.""" - catalog: Dict[str, List[SeriesResult]] = {} + catalog: dict[str, list[SeriesResult]] = {} # Neues Layout (Stand: 2026-01): Gruppen-Header + Liste. # - Header: `div.background-1 ...` mit `h3` @@ -763,7 +763,7 @@ def parse_series_catalog(soup: BeautifulSoupT) -> Dict[str, List[SeriesResult]]: list_node = header.parent.find_next_sibling("ul", class_="series-list") if not list_node: continue - series: List[SeriesResult] = [] + series: list[SeriesResult] = [] for item in list_node.select("li.series-item"): anchor = item.find("a", href=True) if not anchor: @@ -784,8 +784,8 @@ def parse_series_catalog(soup: BeautifulSoupT) -> Dict[str, List[SeriesResult]]: return catalog -def _extract_season_links(soup: BeautifulSoupT) -> List[Tuple[int, str]]: - season_links: List[Tuple[int, str]] = [] +def _extract_season_links(soup: BeautifulSoupT) -> list[tuple[int, str]]: + season_links: list[tuple[int, str]] = [] seen_numbers: set[int] = set() anchors = soup.select("ul.nav.list-items-nav a[data-season-pill][href]") for anchor in anchors: @@ -814,7 +814,7 @@ def _extract_season_links(soup: BeautifulSoupT) -> List[Tuple[int, str]]: return season_links -def _extract_number_of_seasons(soup: BeautifulSoupT) -> Optional[int]: +def _extract_number_of_seasons(soup: BeautifulSoupT) -> int | None: tag = soup.select_one('meta[itemprop="numberOfSeasons"]') if not tag: return None @@ -834,8 +834,8 @@ def _extract_canonical_url(soup: BeautifulSoupT, fallback: str) -> str: return fallback.rstrip("/") -def _extract_episodes(soup: BeautifulSoupT) -> List[EpisodeInfo]: - episodes: List[EpisodeInfo] = [] +def _extract_episodes(soup: BeautifulSoupT) -> list[EpisodeInfo]: + episodes: list[EpisodeInfo] = [] season_label = "" season_header = soup.select_one("section.episode-section h2") or soup.select_one("h2.h3") if season_header: @@ -892,13 +892,13 @@ def _extract_episodes(soup: BeautifulSoupT) -> List[EpisodeInfo]: if _is_episode_tba(title, original_title): continue - hosters: List[str] = [] + hosters: list[str] = [] for img in row.select(".episode-watch-cell img"): label = (img.get("alt") or img.get("title") or "").strip() if label and label not in hosters: hosters.append(label) - languages: List[str] = [] + languages: list[str] = [] for flag in row.select(".episode-language-cell .watch-language"): classes = flag.get("class") or [] if isinstance(classes, str): @@ -931,8 +931,8 @@ def _extract_episodes(soup: BeautifulSoupT) -> List[EpisodeInfo]: def fetch_episode_stream_link( episode_url: str, *, - preferred_hosters: Optional[List[str]] = None, -) -> Optional[str]: + preferred_hosters: Optional[list[str]] = None, +) -> str | None: _ensure_requests() normalized_url = _absolute_url(episode_url) preferred = [hoster.lower() for hoster in (preferred_hosters or DEFAULT_PREFERRED_HOSTERS)] @@ -943,7 +943,7 @@ def fetch_episode_stream_link( except Exception: pass soup = _get_soup(normalized_url, session=session) - candidates: List[Tuple[str, str]] = [] + candidates: list[tuple[str, str]] = [] for button in soup.select("button.link-box[data-play-url]"): play_url = (button.get("data-play-url") or "").strip() provider = (button.get("data-provider-name") or "").strip() @@ -961,7 +961,7 @@ def fetch_episode_stream_link( return candidates[0][1] -def fetch_episode_hoster_names(episode_url: str) -> List[str]: +def fetch_episode_hoster_names(episode_url: str) -> list[str]: """Liest die verfügbaren Hoster-Namen für eine Episode aus.""" _ensure_requests() normalized_url = _absolute_url(episode_url) @@ -972,7 +972,7 @@ def fetch_episode_hoster_names(episode_url: str) -> List[str]: except Exception: pass soup = _get_soup(normalized_url, session=session) - names: List[str] = [] + names: list[str] = [] seen: set[str] = set() for button in soup.select("button.link-box[data-provider-name]"): name = (button.get("data-provider-name") or "").strip() @@ -995,9 +995,9 @@ _LATEST_EPISODE_TAG_RE = re.compile(SEASON_EPISODE_TAG, re.IGNORECASE) _LATEST_EPISODE_URL_RE = re.compile(SEASON_EPISODE_URL, re.IGNORECASE) -def _extract_latest_episodes(soup: BeautifulSoupT) -> List[LatestEpisode]: +def _extract_latest_episodes(soup: BeautifulSoupT) -> list[LatestEpisode]: """Parst die neuesten Episoden von der Startseite.""" - episodes: List[LatestEpisode] = [] + episodes: list[LatestEpisode] = [] seen: set[str] = set() for anchor in soup.select("a.latest-episode-row[href]"): @@ -1016,8 +1016,8 @@ def _extract_latest_episodes(soup: BeautifulSoupT) -> List[LatestEpisode]: season_text = (anchor.select_one(".ep-season").get_text(strip=True) if anchor.select_one(".ep-season") else "").strip() episode_text = (anchor.select_one(".ep-episode").get_text(strip=True) if anchor.select_one(".ep-episode") else "").strip() - season_number: Optional[int] = None - episode_number: Optional[int] = None + season_number: int | None = None + episode_number: int | None = None match = re.search(r"S\s*(\d+)", season_text, re.IGNORECASE) if match: season_number = int(match.group(1)) @@ -1054,7 +1054,7 @@ def _extract_latest_episodes(soup: BeautifulSoupT) -> List[LatestEpisode]: return episodes -def resolve_redirect(target_url: str) -> Optional[str]: +def resolve_redirect(target_url: str) -> str | None: _ensure_requests() normalized_url = _absolute_url(target_url) _log_visit(normalized_url) @@ -1085,10 +1085,10 @@ def resolve_redirect(target_url: str) -> Optional[str]: def scrape_series_detail( series_identifier: str, - max_seasons: Optional[int] = None, + max_seasons: int | None = None, *, load_episodes: bool = True, -) -> List[SeasonInfo]: +) -> list[SeasonInfo]: _ensure_requests() series_url = _series_root_url(_normalize_series_url(series_identifier)) _log_url(series_url, kind="SERIES") @@ -1110,9 +1110,9 @@ def scrape_series_detail( season_links.sort(key=lambda item: item[0]) if max_seasons is not None: season_links = season_links[:max_seasons] - seasons: List[SeasonInfo] = [] + seasons: list[SeasonInfo] = [] for number, url in season_links: - episodes: List[EpisodeInfo] = [] + episodes: list[EpisodeInfo] = [] if load_episodes: season_soup = _get_soup(url, session=session) episodes = _extract_episodes(season_soup) @@ -1129,27 +1129,27 @@ class SerienstreamPlugin(BasisPlugin): POPULAR_GENRE_LABEL = "Haeufig gesehen" def __init__(self) -> None: - self._series_results: Dict[str, SeriesResult] = {} - self._title_url_cache: Dict[str, str] = self._load_title_url_cache() - self._genre_names_cache: Optional[List[str]] = None - self._season_cache: Dict[str, List[SeasonInfo]] = {} - self._season_links_cache: Dict[str, List[SeasonInfo]] = {} - self._episode_label_cache: Dict[Tuple[str, str], Dict[str, EpisodeInfo]] = {} - self._catalog_cache: Optional[Dict[str, List[SeriesResult]]] = None - self._genre_group_cache: Dict[str, Dict[str, List[str]]] = {} - self._genre_page_entries_cache: Dict[Tuple[str, int], List[SeriesResult]] = {} - self._genre_page_has_more_cache: Dict[Tuple[str, int], bool] = {} - self._popular_cache: Optional[List[SeriesResult]] = None + self._series_results: dict[str, SeriesResult] = {} + self._title_url_cache: dict[str, str] = self._load_title_url_cache() + self._genre_names_cache: Optional[list[str]] = None + self._season_cache: dict[str, list[SeasonInfo]] = {} + self._season_links_cache: dict[str, list[SeasonInfo]] = {} + self._episode_label_cache: dict[tuple[str, str], dict[str, EpisodeInfo]] = {} + self._catalog_cache: Optional[dict[str, list[SeriesResult]]] = None + self._genre_group_cache: dict[str, dict[str, list[str]]] = {} + self._genre_page_entries_cache: dict[tuple[str, int], list[SeriesResult]] = {} + self._genre_page_has_more_cache: dict[tuple[str, int], bool] = {} + self._popular_cache: Optional[list[SeriesResult]] = None self._requests_available = REQUESTS_AVAILABLE - self._default_preferred_hosters: List[str] = list(DEFAULT_PREFERRED_HOSTERS) - self._preferred_hosters: List[str] = list(self._default_preferred_hosters) - self._hoster_cache: Dict[Tuple[str, str, str], List[str]] = {} - self._latest_cache: Dict[int, List[LatestEpisode]] = {} - self._latest_hoster_cache: Dict[str, List[str]] = {} - self._series_metadata_cache: Dict[str, Tuple[Dict[str, str], Dict[str, str]]] = {} + self._default_preferred_hosters: list[str] = list(DEFAULT_PREFERRED_HOSTERS) + self._preferred_hosters: list[str] = list(self._default_preferred_hosters) + self._hoster_cache: dict[tuple[str, str, str], list[str]] = {} + self._latest_cache: dict[int, list[LatestEpisode]] = {} + self._latest_hoster_cache: dict[str, list[str]] = {} + self._series_metadata_cache: dict[str, tuple[dict[str, str], dict[str, str]]] = {} self._series_metadata_full: set[str] = set() self.is_available = True - self.unavailable_reason: Optional[str] = None + self.unavailable_reason: str | None = None if not self._requests_available: # pragma: no cover - optional dependency self.is_available = False self.unavailable_reason = ( @@ -1163,11 +1163,11 @@ class SerienstreamPlugin(BasisPlugin): print(f"Importfehler: {REQUESTS_IMPORT_ERROR}") return - def _load_title_url_cache(self) -> Dict[str, str]: + def _load_title_url_cache(self) -> dict[str, str]: raw = _session_cache_get("title_urls") if not isinstance(raw, dict): return {} - result: Dict[str, str] = {} + result: dict[str, str] = {} for key, value in raw.items(): key_text = str(key or "").strip().casefold() url_text = str(value or "").strip() @@ -1205,7 +1205,7 @@ class SerienstreamPlugin(BasisPlugin): def _metadata_cache_key(title: str) -> str: return (title or "").strip().casefold() - def _series_for_title(self, title: str) -> Optional[SeriesResult]: + def _series_for_title(self, title: str) -> SeriesResult | None: direct = self._series_results.get(title) if direct and direct.url: return direct @@ -1228,11 +1228,11 @@ class SerienstreamPlugin(BasisPlugin): digest = hashlib.sha1((season_url or "").encode("utf-8")).hexdigest()[:20] return f"season_episodes.{digest}" - def _load_session_season_links(self, series_url: str) -> Optional[List[SeasonInfo]]: + def _load_session_season_links(self, series_url: str) -> Optional[list[SeasonInfo]]: raw = _session_cache_get(self._season_links_cache_name(series_url)) if not isinstance(raw, list): return None - seasons: List[SeasonInfo] = [] + seasons: list[SeasonInfo] = [] for item in raw: if not isinstance(item, dict): continue @@ -1249,16 +1249,16 @@ class SerienstreamPlugin(BasisPlugin): seasons.sort(key=lambda s: s.number) return seasons - def _save_session_season_links(self, series_url: str, seasons: List[SeasonInfo]) -> None: + def _save_session_season_links(self, series_url: str, seasons: list[SeasonInfo]) -> None: payload = [{"number": int(season.number), "url": season.url} for season in seasons if season.url] if payload: _session_cache_set(self._season_links_cache_name(series_url), payload) - def _load_session_season_episodes(self, season_url: str) -> Optional[List[EpisodeInfo]]: + def _load_session_season_episodes(self, season_url: str) -> Optional[list[EpisodeInfo]]: raw = _session_cache_get(self._season_episodes_cache_name(season_url)) if not isinstance(raw, list): return None - episodes: List[EpisodeInfo] = [] + episodes: list[EpisodeInfo] = [] for item in raw: if not isinstance(item, dict): continue @@ -1290,7 +1290,7 @@ class SerienstreamPlugin(BasisPlugin): episodes.sort(key=lambda item: item.number) return episodes - def _save_session_season_episodes(self, season_url: str, episodes: List[EpisodeInfo]) -> None: + def _save_session_season_episodes(self, season_url: str, episodes: list[EpisodeInfo]) -> None: payload = [] for item in episodes: payload.append( @@ -1307,7 +1307,7 @@ class SerienstreamPlugin(BasisPlugin): if payload: _session_cache_set(self._season_episodes_cache_name(season_url), payload) - def _ensure_catalog(self) -> Dict[str, List[SeriesResult]]: + def _ensure_catalog(self) -> dict[str, list[SeriesResult]]: if self._catalog_cache is not None: return self._catalog_cache # Stand: 2026-01 liefert `?by=genre` konsistente Gruppen für `genres()`. @@ -1317,7 +1317,7 @@ class SerienstreamPlugin(BasisPlugin): _session_cache_set("genres", sorted(self._catalog_cache.keys(), key=str.casefold)) return self._catalog_cache - def _ensure_genre_names(self) -> List[str]: + def _ensure_genre_names(self) -> list[str]: if self._genre_names_cache is not None: return list(self._genre_names_cache) @@ -1341,7 +1341,7 @@ class SerienstreamPlugin(BasisPlugin): cached = _session_cache_get("genres") if isinstance(cached, list): - genres: List[str] = [] + genres: list[str] = [] for value in cached: normalized = _normalize_cached_genre(value) if normalized: @@ -1364,7 +1364,7 @@ class SerienstreamPlugin(BasisPlugin): _session_cache_set("genres", self._genre_names_cache) return list(self._genre_names_cache) - def genres(self) -> List[str]: + def genres(self) -> list[str]: """Optional: Liefert alle Genres aus dem Serien-Katalog.""" if not self._requests_available: return [] @@ -1374,7 +1374,7 @@ class SerienstreamPlugin(BasisPlugin): """Meldet unterstützte Features für Router-Menüs.""" return {"popular_series", "genres", "latest_episodes"} - def popular_series(self) -> List[str]: + def popular_series(self) -> list[str]: """Liefert die Titel der beliebten Serien (Quelle: `/beliebte-serien`).""" if not self._requests_available: return [] @@ -1383,7 +1383,7 @@ class SerienstreamPlugin(BasisPlugin): self._remember_series_result(entry.title, entry.url, entry.description) return [entry.title for entry in entries if entry.title] - def titles_for_genre(self, genre: str) -> List[str]: + def titles_for_genre(self, genre: str) -> list[str]: """Optional: Liefert Titel für ein Genre.""" if not self._requests_available: return [] @@ -1438,12 +1438,12 @@ class SerienstreamPlugin(BasisPlugin): return "U" <= key <= "Z" return False - def _ensure_genre_group_cache(self, genre: str) -> Dict[str, List[str]]: + def _ensure_genre_group_cache(self, genre: str) -> dict[str, list[str]]: cached = self._genre_group_cache.get(genre) if cached is not None: return cached titles = self.titles_for_genre(genre) - grouped: Dict[str, List[str]] = {} + grouped: dict[str, list[str]] = {} for title in titles: for code in ("A-E", "F-J", "K-O", "P-T", "U-Z", "0-9"): if self._group_matches(code, title): @@ -1482,7 +1482,7 @@ class SerienstreamPlugin(BasisPlugin): def _card_description(anchor: BeautifulSoupT) -> str: if not anchor: return "" - candidates: List[str] = [] + candidates: list[str] = [] direct = (anchor.get("data-search") or "").strip() if direct: candidates.append(direct) @@ -1514,8 +1514,8 @@ class SerienstreamPlugin(BasisPlugin): return cleaned return "" - def _parse_genre_entries_from_soup(self, soup: BeautifulSoupT) -> List[SeriesResult]: - entries: List[SeriesResult] = [] + def _parse_genre_entries_from_soup(self, soup: BeautifulSoupT) -> list[SeriesResult]: + entries: list[SeriesResult] = [] seen_urls: set[str] = set() def _add_entry(title: str, description: str, href: str, cover: str) -> None: @@ -1565,7 +1565,7 @@ class SerienstreamPlugin(BasisPlugin): _add_entry(title, description, href, cover) return entries - def _fetch_genre_page_entries(self, genre: str, page: int) -> Tuple[List[SeriesResult], bool]: + def _fetch_genre_page_entries(self, genre: str, page: int) -> tuple[list[SeriesResult], bool]: slug = self._genre_slug(genre) if not slug: return [], False @@ -1604,7 +1604,7 @@ class SerienstreamPlugin(BasisPlugin): self._genre_page_has_more_cache[cache_key] = bool(has_more) return list(entries), bool(has_more) - def titles_for_genre_page(self, genre: str, page: int) -> List[str]: + def titles_for_genre_page(self, genre: str, page: int) -> list[str]: genre = (genre or "").strip() page = max(1, int(page or 1)) entries, _ = self._fetch_genre_page_entries(genre, page) @@ -1623,13 +1623,13 @@ class SerienstreamPlugin(BasisPlugin): _, has_more = self._fetch_genre_page_entries(genre, page) return bool(has_more) - def titles_for_genre_group_page(self, genre: str, group_code: str, page: int = 1, page_size: int = 10) -> List[str]: + def titles_for_genre_group_page(self, genre: str, group_code: str, page: int = 1, page_size: int = 10) -> list[str]: genre = (genre or "").strip() group_code = (group_code or "").strip() page = max(1, int(page or 1)) page_size = max(1, int(page_size or 10)) needed = page * page_size + 1 - matched: List[str] = [] + matched: list[str] = [] try: page_index = 1 has_more = True @@ -1677,12 +1677,12 @@ class SerienstreamPlugin(BasisPlugin): titles = grouped.get(group_code, []) return len(titles) > (page * page_size) - def _ensure_popular(self) -> List[SeriesResult]: + def _ensure_popular(self) -> list[SeriesResult]: """Laedt und cached die Liste der beliebten Serien aus `/beliebte-serien`.""" if self._popular_cache is not None: return list(self._popular_cache) soup = _get_soup_simple(_popular_series_url()) - results: List[SeriesResult] = [] + results: list[SeriesResult] = [] seen: set[str] = set() # Neues Layout (Stand: 2026-01): Abschnitt "Meistgesehen" hat Karten mit @@ -1723,7 +1723,7 @@ class SerienstreamPlugin(BasisPlugin): @staticmethod def _episode_label(info: EpisodeInfo) -> str: - suffix_parts: List[str] = [] + suffix_parts: list[str] = [] if info.original_title: suffix_parts.append(info.original_title) # Staffel nicht im Episoden-Label anzeigen (wird im UI bereits gesetzt). @@ -1732,7 +1732,7 @@ class SerienstreamPlugin(BasisPlugin): return f"Episode {info.number}: {info.title}{suffix}" @staticmethod - def _parse_season_number(label: str) -> Optional[int]: + def _parse_season_number(label: str) -> int | None: digits = "".join(ch for ch in label if ch.isdigit()) if not digits: return None @@ -1752,7 +1752,7 @@ class SerienstreamPlugin(BasisPlugin): self._episode_label(info): info for info in season_info.episodes } - def _ensure_season_links(self, title: str) -> List[SeasonInfo]: + def _ensure_season_links(self, title: str) -> list[SeasonInfo]: cached = self._season_links_cache.get(title) if cached is not None: return list(cached) @@ -1816,7 +1816,7 @@ class SerienstreamPlugin(BasisPlugin): return self._remember_series_result(title, series_url) - def metadata_for(self, title: str) -> Tuple[Dict[str, str], Dict[str, str], Optional[List[Any]]]: + def metadata_for(self, title: str) -> tuple[dict[str, str], dict[str, str], Optional[list[Any]]]: title = (title or "").strip() if not title or not self._requests_available: return {}, {}, None @@ -1833,8 +1833,8 @@ class SerienstreamPlugin(BasisPlugin): self._series_metadata_cache[cache_key] = (dict(info), {}) return info, {}, None - info: Dict[str, str] = dict(cached[0]) if cached else {"title": title} - art: Dict[str, str] = dict(cached[1]) if cached else {} + info: dict[str, str] = dict(cached[0]) if cached else {"title": title} + art: dict[str, str] = dict(cached[1]) if cached else {} info.setdefault("title", title) if series.description: info.setdefault("plot", series.description) @@ -1873,7 +1873,7 @@ class SerienstreamPlugin(BasisPlugin): return entry.url return "" - def _ensure_season_episodes(self, title: str, season_number: int) -> Optional[SeasonInfo]: + def _ensure_season_episodes(self, title: str, season_number: int) -> SeasonInfo | None: seasons = self._season_cache.get(title) or [] for season in seasons: if season.number == season_number and season.episodes: @@ -1903,7 +1903,7 @@ class SerienstreamPlugin(BasisPlugin): self._save_session_season_episodes(target.url, season_info.episodes) return season_info - def _lookup_episode(self, title: str, season_label: str, episode_label: str) -> Optional[EpisodeInfo]: + def _lookup_episode(self, title: str, season_label: str, episode_label: str) -> EpisodeInfo | None: cache_key = (title, season_label) cached = self._episode_label_cache.get(cache_key) if cached: @@ -1917,7 +1917,7 @@ class SerienstreamPlugin(BasisPlugin): return self._episode_label_cache.get(cache_key, {}).get(episode_label) return None - async def search_titles(self, query: str, progress_callback: ProgressCallback = None) -> List[str]: + async def search_titles(self, query: str, progress_callback: ProgressCallback = None) -> list[str]: query = query.strip() if not query: self._series_results.clear() @@ -1952,7 +1952,7 @@ class SerienstreamPlugin(BasisPlugin): _emit_progress(progress_callback, f"Treffer aufbereitet: {len(results)}", 95) return [result.title for result in results] - def _ensure_seasons(self, title: str) -> List[SeasonInfo]: + def _ensure_seasons(self, title: str) -> list[SeasonInfo]: if title in self._season_cache: seasons = self._season_cache[title] # Auch bei Cache-Treffern die URLs loggen, damit nachvollziehbar bleibt, @@ -1986,11 +1986,11 @@ class SerienstreamPlugin(BasisPlugin): self._season_cache[title] = list(seasons) return list(seasons) - def seasons_for(self, title: str) -> List[str]: + def seasons_for(self, title: str) -> list[str]: seasons = self._ensure_seasons(title) return [self._season_label(season.number) for season in seasons] - def episodes_for(self, title: str, season: str) -> List[str]: + def episodes_for(self, title: str, season: str) -> list[str]: number = self._parse_season_number(season) if number is None: return [] @@ -2001,7 +2001,7 @@ class SerienstreamPlugin(BasisPlugin): return labels return [] - def stream_link_for(self, title: str, season: str, episode: str) -> Optional[str]: + def stream_link_for(self, title: str, season: str, episode: str) -> str | None: if not self._requests_available: raise RuntimeError("SerienstreamPlugin kann ohne requests/bs4 keine Stream-Links liefern.") episode_info = self._lookup_episode(title, season, episode) @@ -2030,7 +2030,7 @@ class SerienstreamPlugin(BasisPlugin): return episode_info.url return "" - def available_hosters_for(self, title: str, season: str, episode: str) -> List[str]: + def available_hosters_for(self, title: str, season: str, episode: str) -> list[str]: if not self._requests_available: raise RuntimeError("SerienstreamPlugin kann ohne requests/bs4 keine Hoster laden.") cache_key = (title, season, episode) @@ -2048,7 +2048,7 @@ class SerienstreamPlugin(BasisPlugin): self._hoster_cache[cache_key] = list(names) return list(names) - def latest_episodes(self, page: int = 1) -> List[LatestEpisode]: + def latest_episodes(self, page: int = 1) -> list[LatestEpisode]: """Liefert die neuesten Episoden aus `/neue-episoden`.""" if not self._requests_available: return [] @@ -2069,7 +2069,7 @@ class SerienstreamPlugin(BasisPlugin): self._latest_cache[page] = list(episodes) return list(episodes) - def available_hosters_for_url(self, episode_url: str) -> List[str]: + def available_hosters_for_url(self, episode_url: str) -> list[str]: if not self._requests_available: raise RuntimeError("SerienstreamPlugin kann ohne requests/bs4 keine Hoster laden.") normalized = _absolute_url(episode_url) @@ -2083,7 +2083,7 @@ class SerienstreamPlugin(BasisPlugin): self._latest_hoster_cache[normalized] = list(names) return list(names) - def stream_link_for_url(self, episode_url: str) -> Optional[str]: + def stream_link_for_url(self, episode_url: str) -> str | None: if not self._requests_available: raise RuntimeError("SerienstreamPlugin kann ohne requests/bs4 keine Stream-Links liefern.") normalized = _absolute_url(episode_url) @@ -2098,7 +2098,7 @@ class SerienstreamPlugin(BasisPlugin): except Exception as exc: # pragma: no cover - defensive logging raise RuntimeError(f"Stream-Link konnte nicht geladen werden: {exc}") from exc - def resolve_stream_link(self, link: str) -> Optional[str]: + def resolve_stream_link(self, link: str) -> str | None: if not self._requests_available: raise RuntimeError("SerienstreamPlugin kann ohne requests/bs4 keine Stream-Links aufloesen.") try: @@ -2120,7 +2120,7 @@ class SerienstreamPlugin(BasisPlugin): except Exception as exc: # pragma: no cover - defensive logging raise RuntimeError(f"Stream-Link konnte nicht verfolgt werden: {exc}") from exc - def set_preferred_hosters(self, hosters: List[str]) -> None: + def set_preferred_hosters(self, hosters: list[str]) -> None: normalized = [hoster.strip().lower() for hoster in hosters if hoster.strip()] if normalized: self._preferred_hosters = normalized diff --git a/addon/plugins/topstreamfilm_plugin.py b/addon/plugins/topstreamfilm_plugin.py index 542c432..1281c9e 100644 --- a/addon/plugins/topstreamfilm_plugin.py +++ b/addon/plugins/topstreamfilm_plugin.py @@ -218,8 +218,10 @@ class TopstreamfilmPlugin(BasisPlugin): if directory and not xbmcvfs.exists(directory): xbmcvfs.mkdirs(directory) handle = xbmcvfs.File(path, "w") - handle.write(payload) - handle.close() + try: + handle.write(payload) + finally: + handle.close() else: with open(path, "w", encoding="utf-8") as handle: handle.write(payload) @@ -283,8 +285,10 @@ class TopstreamfilmPlugin(BasisPlugin): if directory and not xbmcvfs.exists(directory): xbmcvfs.mkdirs(directory) handle = xbmcvfs.File(path, "w") - handle.write(payload) - handle.close() + try: + handle.write(payload) + finally: + handle.close() else: with open(path, "w", encoding="utf-8") as handle: handle.write(payload) @@ -371,9 +375,6 @@ class TopstreamfilmPlugin(BasisPlugin): message=message, ) - def capabilities(self) -> set[str]: - return {"genres", "popular_series"} - def _popular_url(self) -> str: return self._absolute_url("/beliebte-filme-online.html") @@ -1162,14 +1163,80 @@ class TopstreamfilmPlugin(BasisPlugin): return hosters.get(first_name) def resolve_stream_link(self, link: str) -> Optional[str]: + from plugin_helpers import resolve_via_resolveurl + return resolve_via_resolveurl(link, fallback_to_link=True) + + def capabilities(self) -> set[str]: + return {"genres", "popular_series", "year_filter", "latest_titles"} + + def years_available(self) -> List[str]: + """Liefert verfügbare Erscheinungsjahre (aktuelles Jahr bis 1980).""" + import datetime + current_year = datetime.date.today().year + return [str(y) for y in range(current_year, 1979, -1)] + + def titles_for_year(self, year: str, page: int = 1) -> List[str]: + """Liefert Titel für ein bestimmtes Erscheinungsjahr. + + URL-Muster: /xfsearch/{year}/ oder /xfsearch/{year}/page/{n}/ + """ + year = (year or "").strip() + if not year or not REQUESTS_AVAILABLE or BeautifulSoup is None: + return [] + page = max(1, int(page or 1)) + base = self._get_base_url() + if page == 1: + url = f"{base}/xfsearch/{year}/" + else: + url = f"{base}/xfsearch/{year}/page/{page}/" try: - from resolveurl_backend import resolve as resolve_with_resolveurl + soup = self._get_soup(url) except Exception: - resolve_with_resolveurl = None - if callable(resolve_with_resolveurl): - resolved = resolve_with_resolveurl(link) - return resolved or link - return link + return [] + hits = self._parse_listing_titles(soup) + titles: List[str] = [] + seen: set[str] = set() + for hit in hits: + if hit.title in seen: + continue + seen.add(hit.title) + self._title_to_url[hit.title] = hit.url + self._store_title_meta(hit.title, plot=hit.description, poster=hit.poster) + titles.append(hit.title) + if titles: + self._save_title_url_cache() + return titles + + def latest_titles(self, page: int = 1) -> List[str]: + """Liefert neu hinzugefügte Filme. + + URL-Muster: /neueste-filme/ oder /neueste-filme/page/{n}/ + """ + if not REQUESTS_AVAILABLE or BeautifulSoup is None: + return [] + page = max(1, int(page or 1)) + base = self._get_base_url() + if page == 1: + url = f"{base}/neueste-filme/" + else: + url = f"{base}/neueste-filme/page/{page}/" + try: + soup = self._get_soup(url) + except Exception: + return [] + hits = self._parse_listing_titles(soup) + titles: List[str] = [] + seen: set[str] = set() + for hit in hits: + if hit.title in seen: + continue + seen.add(hit.title) + self._title_to_url[hit.title] = hit.url + self._store_title_meta(hit.title, plot=hit.description, poster=hit.poster) + titles.append(hit.title) + if titles: + self._save_title_url_cache() + return titles # Alias für die automatische Plugin-Erkennung. diff --git a/addon/regex_patterns.py b/addon/regex_patterns.py index c3c0b08..3ed31b3 100644 --- a/addon/regex_patterns.py +++ b/addon/regex_patterns.py @@ -3,6 +3,7 @@ Keep common patterns in one place to avoid accidental double-escaping (e.g. \"\\\\d\"). """ +from __future__ import annotations SEASON_EPISODE_TAG = r"S\s*(\d+)\s*E\s*(\d+)" SEASON_EPISODE_URL = r"/staffel-(\d+)/episode-(\d+)" diff --git a/addon/resources/settings.xml b/addon/resources/settings.xml index e65da04..daacbaa 100644 --- a/addon/resources/settings.xml +++ b/addon/resources/settings.xml @@ -55,6 +55,17 @@ + + + + + + + + + + + diff --git a/addon/tmdb.py b/addon/tmdb.py index 7ae2d15..162429e 100644 --- a/addon/tmdb.py +++ b/addon/tmdb.py @@ -557,3 +557,51 @@ def lookup_tv_season( continue result[ep_number] = TmdbEpisodeMeta(plot=plot, thumb=thumb, runtime_minutes=runtime_minutes) return result or None + + +# --------------------------------------------------------------------------- +# External IDs (IMDb, TVDb) – für Trakt-Integration +# --------------------------------------------------------------------------- + +@dataclass(frozen=True) +class TmdbExternalIds: + imdb_id: str # z.B. "tt1234567" + tvdb_id: int # TheTVDB-ID + + +def fetch_external_ids( + *, + kind: str, + tmdb_id: int, + api_key: str, + timeout: int = 15, + log: Callable[[str], None] | None = None, + log_responses: bool = False, +) -> Optional[TmdbExternalIds]: + """Ruft IMDb-ID und TVDb-ID via /movie/{id}/external_ids oder /tv/{id}/external_ids ab.""" + if requests is None or not tmdb_id: + return None + api_key = (api_key or "").strip() + if not api_key: + return None + kind = (kind or "").strip() + if kind not in ("movie", "tv"): + return None + params = {"api_key": api_key} + url = f"{TMDB_API_BASE}/{kind}/{tmdb_id}/external_ids?{urlencode(params)}" + status, payload, body_text = _tmdb_get_json( + url=url, timeout=timeout, log=log, log_responses=log_responses, + ) + if callable(log): + log(f"TMDB RESPONSE /{kind}/{{id}}/external_ids status={status}") + if status != 200 or not isinstance(payload, dict): + return None + imdb_id = (payload.get("imdb_id") or "").strip() + tvdb_id = 0 + try: + tvdb_id = int(payload.get("tvdb_id") or 0) + except (ValueError, TypeError): + tvdb_id = 0 + if not imdb_id and not tvdb_id: + return None + return TmdbExternalIds(imdb_id=imdb_id, tvdb_id=tvdb_id) diff --git a/tests/README_LOCAL.md b/tests/README_LOCAL.md new file mode 100644 index 0000000..9b495db --- /dev/null +++ b/tests/README_LOCAL.md @@ -0,0 +1,3 @@ +Diese Tests sind lokal (nicht committen). Ausführen mit: + +pytest -q diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..65b2ad6 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,10 @@ +import sys +from pathlib import Path + +ROOT = Path(__file__).resolve().parents[1] +ADDON = ROOT / "addon" + +for path in (ROOT, ADDON): + value = str(path) + if value not in sys.path: + sys.path.insert(0, value) diff --git a/tests/test_dokustreams_live.py b/tests/test_dokustreams_live.py new file mode 100644 index 0000000..b0cd171 --- /dev/null +++ b/tests/test_dokustreams_live.py @@ -0,0 +1,30 @@ +import re +import shutil +import subprocess + +import pytest + + +@pytest.mark.live +def test_dokustreams_embed_url_via_curl(): + if shutil.which('curl') is None: + pytest.skip('curl not available') + + url = 'https://doku-streams.com/verbrechen/deutsche-im-knast-japan-und-die-disziplin/' + result = subprocess.run( + ['curl', '-L', '-s', '--compressed', url], + check=False, + capture_output=True, + text=False, + ) + assert result.returncode == 0 + html = result.stdout.decode('utf-8', errors='ignore') + assert html + + iframe_match = re.search(r']+src="([^"]+)"', html, re.IGNORECASE) + if iframe_match is None: + iframe_match = re.search(r'"embedUrl"\s*:\s*"([^"]+)"', html) + assert iframe_match is not None + + src = iframe_match.group(1) + assert 'youtube' in src or 'vimeo' in src diff --git a/tests/test_filmpalast_genres.py b/tests/test_filmpalast_genres.py new file mode 100644 index 0000000..13584fa --- /dev/null +++ b/tests/test_filmpalast_genres.py @@ -0,0 +1,41 @@ +from bs4 import BeautifulSoup + +from addon.plugins import filmpalast_plugin as fp + + +def _soup(html: str): + return BeautifulSoup(html, "html.parser") + + +def test_genres_parse_sidebar(monkeypatch): + html = """ + + """ + monkeypatch.setattr(fp, "_get_soup", lambda *args, **kwargs: _soup(html)) + plugin = fp.FilmpalastPlugin() + genres = plugin.genres() + assert genres == ["Action", "Drama"] + + +def test_titles_for_genre_page_parsing(monkeypatch): + html = """ + + + + """ + plugin = fp.FilmpalastPlugin() + plugin._genre_to_url = {"Action": "https://filmpalast.to/search/genre/Action"} + monkeypatch.setattr(fp, "_get_soup", lambda *args, **kwargs: _soup(html)) + + titles = plugin.titles_for_genre_page("Action", 1) + + assert titles == ["Test Film", "Test Show"] + assert plugin.seasons_for("Test Show") == ["Staffel 1"] + diff --git a/tests/test_filmpalast_series_grouping.py b/tests/test_filmpalast_series_grouping.py new file mode 100644 index 0000000..3d9ead6 --- /dev/null +++ b/tests/test_filmpalast_series_grouping.py @@ -0,0 +1,45 @@ +import asyncio + +from addon.plugins.filmpalast_plugin import FilmpalastPlugin, SearchHit + + +def _fake_hits(_query: str) -> list[SearchHit]: + return [ + SearchHit(title="Star Trek S01E01 Pilot", url="https://filmpalast.to/stream/star-trek-s01e01"), + SearchHit(title="Star Trek S01E02 Zweiter Kontakt", url="https://filmpalast.to/stream/star-trek-s01e02"), + SearchHit(title="Ein Hund namens Palma", url="https://filmpalast.to/stream/ein-hund-namens-palma"), + ] + + +def test_search_groups_series_and_movies(monkeypatch): + plugin = FilmpalastPlugin() + monkeypatch.setattr(plugin, "_search_hits", _fake_hits) + + titles = asyncio.run(plugin.search_titles("trek")) + + assert titles == ["Ein Hund namens Palma", "Star Trek"] + + +def test_series_seasons_and_episodes(monkeypatch): + plugin = FilmpalastPlugin() + monkeypatch.setattr(plugin, "_search_hits", _fake_hits) + + asyncio.run(plugin.search_titles("trek")) + + assert plugin.is_movie("Star Trek") is False + assert plugin.seasons_for("Star Trek") == ["Staffel 1"] + assert plugin.episodes_for("Star Trek", "Staffel 1") == [ + "Episode 1 - Pilot", + "Episode 2 - Zweiter Kontakt", + ] + + +def test_movie_path_stays_unchanged(monkeypatch): + plugin = FilmpalastPlugin() + monkeypatch.setattr(plugin, "_search_hits", _fake_hits) + + asyncio.run(plugin.search_titles("hund")) + + assert plugin.is_movie("Ein Hund namens Palma") is True + assert plugin.seasons_for("Ein Hund namens Palma") == ["Film"] + assert plugin.episodes_for("Ein Hund namens Palma", "Film") == ["Stream"] diff --git a/tests/test_serienstream_parser.py b/tests/test_serienstream_parser.py new file mode 100644 index 0000000..c1c2378 --- /dev/null +++ b/tests/test_serienstream_parser.py @@ -0,0 +1,29 @@ +import pytest +from bs4 import BeautifulSoup + +from addon.plugins.serienstream_plugin import _extract_episodes + + +def test_extract_episodes_skips_upcoming(): + html = """ + + + + + + + + + + + + + +
1Ep1VOE
2 + + DEMNÄCHST + — TBA —
+ """ + soup = BeautifulSoup(html, "html.parser") + episodes = _extract_episodes(soup) + assert [e.number for e in episodes] == [1] diff --git a/tests/test_serienstream_perf.py b/tests/test_serienstream_perf.py new file mode 100644 index 0000000..a8e2627 --- /dev/null +++ b/tests/test_serienstream_perf.py @@ -0,0 +1,48 @@ +import os +import time +import pytest + +from addon.plugins.serienstream_plugin import SerienstreamPlugin + + +@pytest.mark.live +@pytest.mark.perf +def test_live_titel_staffel_episode_timing(): + if not os.getenv("LIVE_TESTS"): + pytest.skip("LIVE_TESTS not set") + + title = os.getenv("LIVE_TITLE", "Star Trek: Starfleet Academy") + season = os.getenv("LIVE_SEASON", "Staffel 1") + + max_title_to_season = float(os.getenv("PERF_MAX_TITLE_TO_SEASON", "6.0")) + max_season_to_episodes = float(os.getenv("PERF_MAX_SEASON_TO_EPISODES", "5.0")) + + plugin = SerienstreamPlugin() + + t0 = time.perf_counter() + seasons = plugin.seasons_for(title) + t1 = time.perf_counter() + + assert seasons, f"Keine Staffeln für Titel gefunden: {title}" + assert season in seasons, f"Gewünschte Staffel fehlt: {season}; vorhanden: {seasons}" + + episodes = plugin.episodes_for(title, season) + t2 = time.perf_counter() + + assert episodes, f"Keine Episoden für {title} / {season}" + + title_to_season = t1 - t0 + season_to_episodes = t2 - t1 + + print( + f"PERF title->seasons={title_to_season:.3f}s " + f"season->episodes={season_to_episodes:.3f}s " + f"episodes={len(episodes)}" + ) + + assert title_to_season <= max_title_to_season, ( + f"title->seasons zu langsam: {title_to_season:.3f}s > {max_title_to_season:.3f}s" + ) + assert season_to_episodes <= max_season_to_episodes, ( + f"season->episodes zu langsam: {season_to_episodes:.3f}s > {max_season_to_episodes:.3f}s" + ) diff --git a/tests/test_serienstream_user_actions.py b/tests/test_serienstream_user_actions.py new file mode 100644 index 0000000..96f8f62 --- /dev/null +++ b/tests/test_serienstream_user_actions.py @@ -0,0 +1,239 @@ +import os +import pytest + +try: + from bs4 import BeautifulSoup +except Exception: # pragma: no cover - optional in local env + BeautifulSoup = None + +from addon.plugins import serienstream_plugin as sp + + +pytestmark = pytest.mark.skipif(BeautifulSoup is None, reason="bs4 not available") + + +def _soup(html: str): + return BeautifulSoup(html, "html.parser") + + +def test_search_series_api_first(monkeypatch): + """search_series() kombiniert API-Treffer mit Katalog-Cache (ohne Duplikate).""" + monkeypatch.setattr(sp, "_get_base_url", lambda: "https://s.to") + monkeypatch.setattr(sp, "_search_series_api", lambda q: [ + sp.SeriesResult(title="Star Trek", description="", url="https://s.to/serie/star-trek"), + ]) + # Katalog-Cache: eine bekannte + eine neue URL + cache_items = [ + sp.SeriesResult(title="Star Trek", description="", url="https://s.to/serie/star-trek"), # Duplikat + sp.SeriesResult(title="Star Trek: Academy", description="", url="https://s.to/serie/star-trek-academy"), + ] + monkeypatch.setattr(sp, "_load_catalog_index_from_cache", lambda: cache_items) + results = sp.search_series("trek") + titles = [r.title for r in results] + # API-Treffer zuerst, Duplikate (gleiche URL) werden entfernt + assert titles[0] == "Star Trek" + assert "Star Trek: Academy" in titles + assert titles.count("Star Trek") == 1 + + +def test_search_series_falls_back_to_catalog_cache(monkeypatch): + """Wenn API und Server-Suche leer sind, wird der Katalog-Cache als Fallback genutzt.""" + monkeypatch.setattr(sp, "_get_base_url", lambda: "https://s.to") + # API und Server-Suche liefern nichts + monkeypatch.setattr(sp, "_search_series_api", lambda q: []) + monkeypatch.setattr(sp, "_search_series_server", lambda q: []) + # Katalog-Cache mit Testdaten fuellen + cache_items = [ + sp.SeriesResult(title="Der Hund", description="", url="https://s.to/serie/der-hund"), + sp.SeriesResult(title="Hundeleben", description="", url="https://s.to/serie/hundeleben"), + ] + monkeypatch.setattr(sp, "_load_catalog_index_from_cache", lambda: cache_items) + results = sp.search_series("hund") + titles = [r.title for r in results] + # Nur Ganzwort-Treffer (nicht Hundeleben) + assert titles == ["Der Hund"] + + +def test_extract_season_links(): + html = """ + + """ + seasons = sp._extract_season_links(_soup(html)) + assert seasons == [(1, "https://s.to/serie/x/staffel-1"), (2, "https://s.to/serie/x/staffel-2")] + + +def test_extract_episodes_skips_upcoming_and_tba(): + html = """ + + + + + + + + + + + + + +
1Ep1VOE
2 + + DEMNÄCHST + — TBA —
+ """ + episodes = sp._extract_episodes(_soup(html)) + assert [e.number for e in episodes] == [1] + + +def test_fetch_episode_hoster_names(monkeypatch): + html = """ + + + """ + + def fake_get_soup(url, session=None): + return _soup(html) + + monkeypatch.setattr(sp, "_get_soup", fake_get_soup) + monkeypatch.setattr(sp, "_get_base_url", lambda: "https://s.to") + names = sp.fetch_episode_hoster_names("/serie/x/staffel-1/episode-1") + assert names == ["VOE", "Vidoza"] + + +def test_fetch_episode_stream_link_prefers_requested_hoster(monkeypatch): + html = """ + + + """ + + def fake_get_soup(url, session=None): + return _soup(html) + + monkeypatch.setattr(sp, "_get_soup", fake_get_soup) + monkeypatch.setattr(sp, "_get_base_url", lambda: "https://s.to") + link = sp.fetch_episode_stream_link("/serie/x/staffel-1/episode-1", preferred_hosters=["vidoza"]) + assert link == "https://s.to/redirect/vidoza" + + +def test_extract_latest_episodes(): + html = """ + + Show X + S 1 + E 2 + Heute + + """ + episodes = sp._extract_latest_episodes(_soup(html)) + assert len(episodes) == 1 + assert episodes[0].series_title == "Show X" + assert episodes[0].season == 1 + assert episodes[0].episode == 2 + + +def test_episode_url_for_uses_episode_cache(monkeypatch): + plugin = sp.SerienstreamPlugin() + info = sp.EpisodeInfo( + number=2, + title="Folge 2", + original_title="", + url="https://s.to/serie/x/staffel-1/episode-2", + ) + plugin._episode_label_cache[("Show X", "Staffel 1")] = {"Episode 2: Folge 2": info} + + called = {"lookup": False} + + def _fail_lookup(*_args, **_kwargs): + called["lookup"] = True + return None + + monkeypatch.setattr(plugin, "_lookup_episode", _fail_lookup) + + url = plugin.episode_url_for("Show X", "Staffel 1", "Episode 2: Folge 2") + assert url == "https://s.to/serie/x/staffel-1/episode-2" + assert called["lookup"] is False + + +def test_parse_series_catalog_groups_and_entries(): + html = """ +

Genre A

+
    +
  • + A +
  • +
+

Genre B

+
    +
  • + B +
  • +
+ """ + catalog = sp.parse_series_catalog(_soup(html)) + assert list(catalog.keys()) == ["Genre A", "Genre B"] + assert [e.title for e in catalog["Genre A"]] == ["A"] + assert [e.title for e in catalog["Genre B"]] == ["B"] + + +def test_titles_for_genre_from_catalog(monkeypatch): + html = """ +

Drama

+ + """ + monkeypatch.setattr(sp, "_get_soup_simple", lambda url: _soup(html)) + monkeypatch.setattr(sp, "_get_base_url", lambda: "https://s.to") + plugin = sp.SerienstreamPlugin() + titles = plugin.titles_for_genre("Drama") + assert titles == ["Drama 1"] + + +def test_popular_series_parsing(monkeypatch): + html = """ +
+

Meistgesehen

+ + Popular 1 + + + Popular 2 + +
+ """ + monkeypatch.setattr(sp, "_get_soup_simple", lambda url: _soup(html)) + monkeypatch.setattr(sp, "_get_base_url", lambda: "https://s.to") + plugin = sp.SerienstreamPlugin() + titles = plugin.popular_series() + assert titles == ["Popular 1", "Popular 2"] + + +@pytest.mark.live +def test_live_staffel_page_skips_upcoming(): + if not os.getenv("LIVE_TESTS"): + pytest.skip("LIVE_TESTS not set") + url = "https://s.to/serie/star-trek-starfleet-academy/staffel-1" + soup = sp._get_soup_simple(url) + rows = soup.select("table.episode-table tbody tr.episode-row") + upcoming_rows = [row for row in rows if "upcoming" in (row.get("class") or [])] + episodes = sp._extract_episodes(soup) + assert len(episodes) == len(rows) - len(upcoming_rows) + + +@pytest.mark.live +def test_live_genres_and_titles(): + if not os.getenv("LIVE_TESTS"): + pytest.skip("LIVE_TESTS not set") + plugin = sp.SerienstreamPlugin() + genres = plugin.genres() + assert isinstance(genres, list) and genres + sample = genres[0] + titles = plugin.titles_for_genre(sample) + assert isinstance(titles, list) diff --git a/tests/test_smoke.py b/tests/test_smoke.py new file mode 100644 index 0000000..d662922 --- /dev/null +++ b/tests/test_smoke.py @@ -0,0 +1,28 @@ +from __future__ import annotations + +import sys +from pathlib import Path + + +ROOT = Path(__file__).resolve().parents[1] +ADDON_DIR = ROOT / "addon" + +if str(ADDON_DIR) not in sys.path: + sys.path.insert(0, str(ADDON_DIR)) + + +def test_import_core_modules() -> None: + """Ein einfacher Smoke-Test, der sicherstellt, dass Kernmodule importierbar sind. + + Wichtig: Die Module sind so geschrieben, dass sie auch ohne Kodi-Umgebung + (ohne xbmc/xbmcgui) importiert werden koennen. + """ + + import plugin_interface # noqa: F401 + import plugin_helpers # noqa: F401 + import http_session_pool # noqa: F401 + import tmdb # noqa: F401 + import metadata_utils # noqa: F401 + import resolveurl_backend # noqa: F401 + + diff --git a/tests/test_title_word_matching.py b/tests/test_title_word_matching.py new file mode 100644 index 0000000..bf4fe2b --- /dev/null +++ b/tests/test_title_word_matching.py @@ -0,0 +1,14 @@ +from addon.plugins import aniworld_plugin as ap +from addon.plugins import topstreamfilm_plugin as tp + + +def test_aniworld_matches_whole_words_only(): + assert ap._matches_query("hund", title="Der Hund") + assert not ap._matches_query("hund", title="Thunderstruck") + assert not ap._matches_query("hund", title="Hundeleben") + + +def test_topstream_matches_whole_words_only(): + assert tp._matches_query("hund", title="Der Hund", description="") + assert not tp._matches_query("hund", title="Thunderstruck", description="") + assert not tp._matches_query("hund", title="Hundeleben", description="")