diff --git a/.gitignore b/.gitignore index 6920558..d210053 100644 --- a/.gitignore +++ b/.gitignore @@ -17,3 +17,6 @@ __pycache__/ *.pyc .coverage + +# Plugin runtime caches +/addon/plugins/*_cache.json diff --git a/README.md b/README.md index 79e8abd..55e9d16 100644 --- a/README.md +++ b/README.md @@ -2,40 +2,51 @@ ViewIT Logo -ViewIT ist ein Kodi‑Addon zum Durchsuchen und Abspielen von Inhalten der unterstützten Anbieter. +ViewIT ist ein Kodi Addon. +Es durchsucht Provider und startet Streams. ## Projektstruktur -- `addon/` Kodi‑Addon Quellcode -- `scripts/` Build‑Scripts (arbeiten mit `addon/` + `dist/`) -- `dist/` Build‑Ausgaben (ZIPs) -- `docs/`, `tests/` +- `addon/` Kodi Addon Quellcode +- `scripts/` Build Scripts +- `dist/` Build Ausgaben +- `docs/` Doku +- `tests/` Tests -## Build & Release -- Addon‑Ordner bauen: `./scripts/build_install_addon.sh` → `dist//` -- Kodi‑ZIP bauen: `./scripts/build_kodi_zip.sh` → `dist/-.zip` -- Addon‑Version in `addon/addon.xml` +## Build und Release +- Addon Ordner bauen: `./scripts/build_install_addon.sh` +- Kodi ZIP bauen: `./scripts/build_kodi_zip.sh` +- Version pflegen: `addon/addon.xml` +- Reproduzierbares ZIP: `SOURCE_DATE_EPOCH` optional setzen -## Lokales Kodi-Repository -- Repository bauen (inkl. ZIPs + `addons.xml` + `addons.xml.md5`): `./scripts/build_local_kodi_repo.sh` -- Lokal bereitstellen: `./scripts/serve_local_kodi_repo.sh` -- Standard-URL: `http://127.0.0.1:8080/repo/addons.xml` -- Optional eigene URL beim Build setzen: `REPO_BASE_URL=http://:/repo ./scripts/build_local_kodi_repo.sh` +## Lokales Kodi Repository +- Repository bauen: `./scripts/build_local_kodi_repo.sh` +- Repository starten: `./scripts/serve_local_kodi_repo.sh` +- Standard URL: `http://127.0.0.1:8080/repo/addons.xml` +- Eigene URL beim Build: `REPO_BASE_URL=http://:/repo ./scripts/build_local_kodi_repo.sh` -## Gitea Release-Asset Upload -- ZIP bauen: `./scripts/build_kodi_zip.sh` -- Token setzen: `export GITEA_TOKEN=` -- Asset an Tag hochladen (erstellt Release bei Bedarf): `./scripts/publish_gitea_release.sh` -- Optional: `--tag v0.1.50 --asset dist/plugin.video.viewit-0.1.50.zip` - -## Entwicklung (kurz) -- Hauptlogik: `addon/default.py` +## Entwicklung +- Router: `addon/default.py` - Plugins: `addon/plugins/*_plugin.py` -- Einstellungen: `addon/resources/settings.xml` +- Settings: `addon/resources/settings.xml` -## Tests mit Abdeckung -- Dev-Abhängigkeiten installieren: `./.venv/bin/pip install -r requirements-dev.txt` -- Tests + Coverage starten: `./.venv/bin/pytest` -- Optional (XML-Report): `./.venv/bin/pytest --cov-report=xml` +## TMDB API Key einrichten +- TMDB Account anlegen und API Key (v3) erstellen: `https://www.themoviedb.org/settings/api` +- In Kodi das ViewIT Addon oeffnen: `Einstellungen -> TMDB` +- `TMDB aktivieren` einschalten +- `TMDB API Key` eintragen +- Optional `TMDB Sprache` setzen (z. B. `de-DE`) +- Optional die Anzeige-Optionen aktivieren/deaktivieren: + - `TMDB Beschreibung anzeigen` + - `TMDB Poster und Vorschaubild anzeigen` + - `TMDB Fanart/Backdrop anzeigen` + - `TMDB Bewertung anzeigen` + - `TMDB Stimmen anzeigen` + - `TMDB Besetzung anzeigen` + +## Tests +- Dev Pakete installieren: `./.venv/bin/pip install -r requirements-dev.txt` +- Tests starten: `./.venv/bin/pytest` +- XML Report: `./.venv/bin/pytest --cov-report=xml` ## Dokumentation Siehe `docs/`. diff --git a/addon/__pycache__/http_session_pool.cpython-312.pyc b/addon/__pycache__/http_session_pool.cpython-312.pyc deleted file mode 100644 index 83e6a44..0000000 Binary files a/addon/__pycache__/http_session_pool.cpython-312.pyc and /dev/null differ diff --git a/addon/__pycache__/plugin_helpers.cpython-312.pyc b/addon/__pycache__/plugin_helpers.cpython-312.pyc deleted file mode 100644 index 449c2dd..0000000 Binary files a/addon/__pycache__/plugin_helpers.cpython-312.pyc and /dev/null differ diff --git a/addon/__pycache__/regex_patterns.cpython-312.pyc b/addon/__pycache__/regex_patterns.cpython-312.pyc deleted file mode 100644 index 4d7fc68..0000000 Binary files a/addon/__pycache__/regex_patterns.cpython-312.pyc and /dev/null differ diff --git a/addon/__pycache__/resolveurl_backend.cpython-312.pyc b/addon/__pycache__/resolveurl_backend.cpython-312.pyc deleted file mode 100644 index 1f14f61..0000000 Binary files a/addon/__pycache__/resolveurl_backend.cpython-312.pyc and /dev/null differ diff --git a/addon/__pycache__/tmdb.cpython-312.pyc b/addon/__pycache__/tmdb.cpython-312.pyc deleted file mode 100644 index 6e208ab..0000000 Binary files a/addon/__pycache__/tmdb.cpython-312.pyc and /dev/null differ diff --git a/addon/addon.xml b/addon/addon.xml index e29259a..2ca7853 100644 --- a/addon/addon.xml +++ b/addon/addon.xml @@ -1,5 +1,5 @@ - - + + @@ -10,8 +10,8 @@ video - ViewIt Kodi Plugin - Streaming-Addon für Streamingseiten: Suche, Staffeln/Episoden und Wiedergabe. + Suche und Wiedergabe fuer mehrere Quellen + Findet Titel in unterstuetzten Quellen und startet Filme oder Episoden direkt in Kodi. icon.png diff --git a/addon/default.py b/addon/default.py index ee63d76..db9bc5c 100644 --- a/addon/default.py +++ b/addon/default.py @@ -8,6 +8,7 @@ ruft Plugin-Implementierungen auf und startet die Wiedergabe. from __future__ import annotations import asyncio +import atexit from contextlib import contextmanager from datetime import datetime import importlib.util @@ -16,11 +17,27 @@ import json import os import re import sys +import threading +import time import xml.etree.ElementTree as ET from pathlib import Path from types import ModuleType from urllib.parse import parse_qs, urlencode + +def _ensure_windows_selector_policy() -> None: + """Erzwingt unter Windows einen Selector-Loop (thread-kompatibel in Kodi).""" + if not sys.platform.startswith("win"): + return + try: + current = asyncio.get_event_loop_policy() + if current.__class__.__name__ == "WindowsSelectorEventLoopPolicy": + return + asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) + except Exception: + # Fallback: Wenn die Policy nicht verfügbar ist, arbeitet der Code mit Default-Policy weiter. + return + try: # pragma: no cover - Kodi runtime import xbmc # type: ignore[import-not-found] import xbmcaddon # type: ignore[import-not-found] @@ -87,6 +104,14 @@ except ImportError: # pragma: no cover - allow importing outside Kodi (e.g. lin xbmcplugin = _XbmcPluginStub() from plugin_interface import BasisPlugin +from http_session_pool import close_all_sessions +from plugin_helpers import normalize_resolved_stream_url +from metadata_utils import ( + collect_plugin_metadata as _collect_plugin_metadata, + merge_metadata as _merge_metadata, + metadata_policy as _metadata_policy_impl, + needs_tmdb as _needs_tmdb, +) from tmdb import TmdbCastMember, fetch_tv_episode_credits, lookup_movie, lookup_tv_season, lookup_tv_season_summary, lookup_tv_show PLUGIN_DIR = Path(__file__).with_name("plugins") @@ -101,7 +126,23 @@ _TMDB_LOG_PATH: str | None = None _GENRE_TITLES_CACHE: dict[tuple[str, str], list[str]] = {} _ADDON_INSTANCE = None _PLAYSTATE_CACHE: dict[str, dict[str, object]] | None = None +_PLAYSTATE_LOCK = threading.RLock() +_TMDB_LOCK = threading.RLock() WATCHED_THRESHOLD = 0.9 +POPULAR_MENU_LABEL = "Haeufig gesehen" +LATEST_MENU_LABEL = "Neuste Titel" + +atexit.register(close_all_sessions) + + +def _tmdb_cache_get(cache: dict, key, default=None): + with _TMDB_LOCK: + return cache.get(key, default) + + +def _tmdb_cache_set(cache: dict, key, value) -> None: + with _TMDB_LOCK: + cache[key] = value def _tmdb_prefetch_concurrency() -> int: @@ -140,12 +181,19 @@ def _busy_close() -> None: @contextmanager -def _busy_dialog(): - _busy_open() - try: - yield - finally: - _busy_close() +def _busy_dialog(message: str = "Bitte warten...", *, heading: str = "Bitte warten"): + """Progress-Dialog statt Spinner, mit kurzem Status-Text.""" + with _progress_dialog(heading, message) as progress: + progress(10, message) + + def _update(step_message: str, percent: int | None = None) -> bool: + pct = 50 if percent is None else max(5, min(95, int(percent))) + return progress(pct, step_message or message) + + try: + yield _update + finally: + progress(100, "Fertig") @contextmanager @@ -187,6 +235,42 @@ def _progress_dialog(heading: str, message: str = ""): pass +def _run_with_progress(heading: str, message: str, loader): + """Fuehrt eine Ladefunktion mit sichtbarem Fortschrittsdialog aus.""" + with _progress_dialog(heading, message) as progress: + progress(10, message) + result = loader() + progress(100, "Fertig") + return result + + +def _method_accepts_kwarg(method: object, kwarg_name: str) -> bool: + if not callable(method): + return False + try: + signature = inspect.signature(method) + except Exception: + return False + for param in signature.parameters.values(): + if param.kind == inspect.Parameter.VAR_KEYWORD: + return True + if param.name == kwarg_name and param.kind in ( + inspect.Parameter.POSITIONAL_OR_KEYWORD, + inspect.Parameter.KEYWORD_ONLY, + ): + return True + return False + + +def _call_plugin_search(plugin: BasisPlugin, query: str, *, progress_callback=None): + method = getattr(plugin, "search_titles", None) + if not callable(method): + raise RuntimeError("Plugin hat keine gueltige search_titles Methode.") + if progress_callback is not None and _method_accepts_kwarg(method, "progress_callback"): + return method(query, progress_callback=progress_callback) + return method(query) + + def _get_handle() -> int: return int(sys.argv[1]) if len(sys.argv) > 1 else -1 @@ -226,115 +310,26 @@ def _playstate_path() -> str: def _load_playstate() -> dict[str, dict[str, object]]: - global _PLAYSTATE_CACHE - if _PLAYSTATE_CACHE is not None: - return _PLAYSTATE_CACHE - path = _playstate_path() - try: - if xbmcvfs and xbmcvfs.exists(path): - handle = xbmcvfs.File(path) - raw = handle.read() - handle.close() - else: - with open(path, "r", encoding="utf-8") as handle: - raw = handle.read() - data = json.loads(raw or "{}") - if isinstance(data, dict): - normalized: dict[str, dict[str, object]] = {} - for key, value in data.items(): - if isinstance(key, str) and isinstance(value, dict): - normalized[key] = dict(value) - _PLAYSTATE_CACHE = normalized - return normalized - except Exception: - pass - _PLAYSTATE_CACHE = {} return {} def _save_playstate(state: dict[str, dict[str, object]]) -> None: - global _PLAYSTATE_CACHE - _PLAYSTATE_CACHE = state - path = _playstate_path() - try: - payload = json.dumps(state, ensure_ascii=False, sort_keys=True) - except Exception: - return - try: - if xbmcvfs: - directory = os.path.dirname(path) - if directory and not xbmcvfs.exists(directory): - xbmcvfs.mkdirs(directory) - handle = xbmcvfs.File(path, "w") - handle.write(payload) - handle.close() - else: - with open(path, "w", encoding="utf-8") as handle: - handle.write(payload) - except Exception: - return + return def _get_playstate(key: str) -> dict[str, object]: - return dict(_load_playstate().get(key, {}) or {}) + return {} def _set_playstate(key: str, value: dict[str, object]) -> None: - state = _load_playstate() - if value: - state[key] = dict(value) - else: - state.pop(key, None) - _save_playstate(state) + return def _apply_playstate_to_info(info_labels: dict[str, object], playstate: dict[str, object]) -> dict[str, object]: - info_labels = dict(info_labels or {}) - watched = bool(playstate.get("watched") or False) - resume_position = playstate.get("resume_position") - resume_total = playstate.get("resume_total") - if watched: - info_labels["playcount"] = 1 - info_labels.pop("resume_position", None) - info_labels.pop("resume_total", None) - else: - try: - pos = int(resume_position) if resume_position is not None else 0 - tot = int(resume_total) if resume_total is not None else 0 - except Exception: - pos, tot = 0, 0 - if pos > 0 and tot > 0: - info_labels["resume_position"] = pos - info_labels["resume_total"] = tot - return info_labels - - -def _time_label(seconds: int) -> str: - try: - seconds = int(seconds or 0) - except Exception: - seconds = 0 - if seconds <= 0: - return "" - hours = seconds // 3600 - minutes = (seconds % 3600) // 60 - secs = seconds % 60 - if hours > 0: - return f"{hours:02d}:{minutes:02d}:{secs:02d}" - return f"{minutes:02d}:{secs:02d}" + return dict(info_labels or {}) def _label_with_playstate(label: str, playstate: dict[str, object]) -> str: - watched = bool(playstate.get("watched") or False) - if watched: - return f"✓ {label}" - resume_pos = playstate.get("resume_position") - try: - pos = int(resume_pos) if resume_pos is not None else 0 - except Exception: - pos = 0 - if pos > 0: - return f"↩ {_time_label(pos)} {label}" return label @@ -402,6 +397,59 @@ def _get_setting_bool(setting_id: str, *, default: bool = False) -> bool: return default +def _get_setting_int(setting_id: str, *, default: int = 0) -> int: + if xbmcaddon is None: + return default + addon = _get_addon() + if addon is None: + return default + getter = getattr(addon, "getSettingInt", None) + if callable(getter): + raw_getter = getattr(addon, "getSetting", None) + if callable(raw_getter): + try: + raw = str(raw_getter(setting_id) or "").strip() + except TypeError: + raw = "" + if raw == "": + return default + try: + return int(getter(setting_id)) + except TypeError: + return default + getter = getattr(addon, "getSetting", None) + if callable(getter): + try: + raw = str(getter(setting_id) or "").strip() + except TypeError: + return default + if raw == "": + return default + try: + return int(raw) + except ValueError: + return default + return default + + +def _metadata_policy( + plugin_name: str, + plugin: BasisPlugin, + *, + allow_tmdb: bool, +) -> tuple[bool, bool, bool]: + return _metadata_policy_impl( + plugin_name, + plugin, + allow_tmdb=allow_tmdb, + get_setting_int=_get_setting_int, + ) + + +def _tmdb_list_enabled() -> bool: + return _tmdb_enabled() and _get_setting_bool("tmdb_genre_metadata", default=False) + + def _set_setting_string(setting_id: str, value: str) -> None: if xbmcaddon is None: return @@ -625,11 +673,11 @@ def _tmdb_labels_and_art(title: str) -> tuple[dict[str, str], dict[str, str], li show_cast = _get_setting_bool("tmdb_show_cast", default=False) flags = f"p{int(show_plot)}a{int(show_art)}f{int(show_fanart)}r{int(show_rating)}v{int(show_votes)}c{int(show_cast)}" cache_key = f"{language}|{flags}|{title_key}" - cached = _TMDB_CACHE.get(cache_key) + cached = _tmdb_cache_get(_TMDB_CACHE, cache_key) if cached is not None: info, art = cached # Cast wird nicht in _TMDB_CACHE gehalten (weil es ListItem.setCast betrifft), daher separat cachen: - cast_cached = _TMDB_CAST_CACHE.get(cache_key, []) + cast_cached = _tmdb_cache_get(_TMDB_CAST_CACHE, cache_key, []) return info, art, list(cast_cached) info_labels: dict[str, str] = {"title": title} @@ -687,7 +735,7 @@ def _tmdb_labels_and_art(title: str) -> tuple[dict[str, str], dict[str, str], li if meta: # Nur TV-IDs cachen (für Staffel-/Episoden-Lookups); Movie-IDs würden dort fehlschlagen. if is_tv: - _TMDB_ID_CACHE[title_key] = int(getattr(meta, "tmdb_id", 0) or 0) + _tmdb_cache_set(_TMDB_ID_CACHE, title_key, int(getattr(meta, "tmdb_id", 0) or 0)) info_labels.setdefault("mediatype", "tvshow") else: info_labels.setdefault("mediatype", "movie") @@ -715,8 +763,8 @@ def _tmdb_labels_and_art(title: str) -> tuple[dict[str, str], dict[str, str], li elif log_requests or log_responses: _tmdb_file_log(f"TMDB MISS title={title!r}") - _TMDB_CACHE[cache_key] = (info_labels, art) - _TMDB_CAST_CACHE[cache_key] = list(cast) + _tmdb_cache_set(_TMDB_CACHE, cache_key, (info_labels, art)) + _tmdb_cache_set(_TMDB_CAST_CACHE, cache_key, list(cast)) return info_labels, art, list(cast) @@ -762,10 +810,10 @@ def _tmdb_episode_labels_and_art(*, title: str, season_label: str, episode_label if not _tmdb_enabled(): return {"title": episode_label}, {} title_key = (title or "").strip().casefold() - tmdb_id = _TMDB_ID_CACHE.get(title_key) + tmdb_id = _tmdb_cache_get(_TMDB_ID_CACHE, title_key) if not tmdb_id: _tmdb_labels_and_art(title) - tmdb_id = _TMDB_ID_CACHE.get(title_key) + tmdb_id = _tmdb_cache_get(_TMDB_ID_CACHE, title_key) if not tmdb_id: return {"title": episode_label}, {} @@ -779,7 +827,7 @@ def _tmdb_episode_labels_and_art(*, title: str, season_label: str, episode_label show_art = _get_setting_bool("tmdb_show_art", default=True) flags = f"p{int(show_plot)}a{int(show_art)}" season_key = (tmdb_id, season_number, language, flags) - cached_season = _TMDB_SEASON_CACHE.get(season_key) + cached_season = _tmdb_cache_get(_TMDB_SEASON_CACHE, season_key) if cached_season is None: api_key = _get_setting_string("tmdb_api_key").strip() if not api_key: @@ -812,7 +860,7 @@ def _tmdb_episode_labels_and_art(*, title: str, season_label: str, episode_label if show_art and ep.thumb: art = {"thumb": ep.thumb} mapped[ep_no] = (info, art) - _TMDB_SEASON_CACHE[season_key] = mapped + _tmdb_cache_set(_TMDB_SEASON_CACHE, season_key, mapped) cached_season = mapped return cached_season.get(episode_number, ({"title": episode_label}, {})) @@ -826,10 +874,10 @@ def _tmdb_episode_cast(*, title: str, season_label: str, episode_label: str) -> return [] title_key = (title or "").strip().casefold() - tmdb_id = _TMDB_ID_CACHE.get(title_key) + tmdb_id = _tmdb_cache_get(_TMDB_ID_CACHE, title_key) if not tmdb_id: _tmdb_labels_and_art(title) - tmdb_id = _TMDB_ID_CACHE.get(title_key) + tmdb_id = _tmdb_cache_get(_TMDB_ID_CACHE, title_key) if not tmdb_id: return [] @@ -840,13 +888,13 @@ def _tmdb_episode_cast(*, title: str, season_label: str, episode_label: str) -> language = _get_setting_string("tmdb_language").strip() or "de-DE" cache_key = (tmdb_id, season_number, episode_number, language) - cached = _TMDB_EPISODE_CAST_CACHE.get(cache_key) + cached = _tmdb_cache_get(_TMDB_EPISODE_CAST_CACHE, cache_key) if cached is not None: return list(cached) api_key = _get_setting_string("tmdb_api_key").strip() if not api_key: - _TMDB_EPISODE_CAST_CACHE[cache_key] = [] + _tmdb_cache_set(_TMDB_EPISODE_CAST_CACHE, cache_key, []) return [] log_requests = _get_setting_bool("tmdb_log_requests", default=False) @@ -868,7 +916,7 @@ def _tmdb_episode_cast(*, title: str, season_label: str, episode_label: str) -> f"TMDB ERROR episode_credits_failed tmdb_id={tmdb_id} season={season_number} episode={episode_number} error={exc!r}" ) cast = [] - _TMDB_EPISODE_CAST_CACHE[cache_key] = list(cast) + _tmdb_cache_set(_TMDB_EPISODE_CAST_CACHE, cache_key, list(cast)) return list(cast) @@ -921,6 +969,33 @@ def _normalize_update_info_url(raw: str) -> str: return value.rstrip("/") + "/addons.xml" +UPDATE_CHANNEL_MAIN = 0 +UPDATE_CHANNEL_NIGHTLY = 1 +UPDATE_CHANNEL_CUSTOM = 2 +AUTO_UPDATE_INTERVAL_SEC = 6 * 60 * 60 + + +def _selected_update_channel() -> int: + channel = _get_setting_int("update_channel", default=UPDATE_CHANNEL_MAIN) + if channel not in {UPDATE_CHANNEL_MAIN, UPDATE_CHANNEL_NIGHTLY, UPDATE_CHANNEL_CUSTOM}: + return UPDATE_CHANNEL_MAIN + return channel + + +def _resolve_update_info_url() -> str: + channel = _selected_update_channel() + if channel == UPDATE_CHANNEL_NIGHTLY: + raw = _get_setting_string("update_repo_url_nightly") + elif channel == UPDATE_CHANNEL_CUSTOM: + raw = _get_setting_string("update_repo_url") + else: + raw = _get_setting_string("update_repo_url_main") + info_url = _normalize_update_info_url(raw) + # Legacy-Setting beibehalten, damit bestehende Installationen und alte Builds weiterlaufen. + _set_setting_string("update_repo_url", info_url) + return info_url + + def _repo_addon_xml_path() -> str: if xbmcvfs is None: return "" @@ -963,52 +1038,6 @@ def _settings_key_for_plugin(name: str) -> str: return f"update_version_{safe}" if safe else "update_version_unknown" -def _collect_plugin_metadata(plugin: BasisPlugin, titles: list[str]) -> dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember] | None]]: - getter = getattr(plugin, "metadata_for", None) - if not callable(getter): - return {} - collected: dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember] | None]] = {} - for title in titles: - try: - labels, art, cast = getter(title) - except Exception: - continue - if isinstance(labels, dict) or isinstance(art, dict) or cast: - label_map = {str(k): str(v) for k, v in dict(labels or {}).items() if v} - art_map = {str(k): str(v) for k, v in dict(art or {}).items() if v} - collected[title] = (label_map, art_map, cast if isinstance(cast, list) else None) - return collected - - -def _needs_tmdb(labels: dict[str, str], art: dict[str, str], *, want_plot: bool, want_art: bool) -> bool: - if want_plot and not labels.get("plot"): - return True - if want_art and not (art.get("thumb") or art.get("poster") or art.get("fanart") or art.get("landscape")): - return True - return False - - -def _merge_metadata( - title: str, - tmdb_labels: dict[str, str] | None, - tmdb_art: dict[str, str] | None, - tmdb_cast: list[TmdbCastMember] | None, - plugin_meta: tuple[dict[str, str], dict[str, str], list[TmdbCastMember] | None] | None, -) -> tuple[dict[str, str], dict[str, str], list[TmdbCastMember] | None]: - labels = dict(tmdb_labels or {}) - art = dict(tmdb_art or {}) - cast = tmdb_cast - if plugin_meta is not None: - meta_labels, meta_art, meta_cast = plugin_meta - labels.update({k: str(v) for k, v in dict(meta_labels or {}).items() if v}) - art.update({k: str(v) for k, v in dict(meta_art or {}).items() if v}) - if meta_cast is not None: - cast = meta_cast - if "title" not in labels: - labels["title"] = title - return labels, art, cast - - def _sync_update_version_settings() -> None: addon = _get_addon() addon_version = "0.0.0" @@ -1038,7 +1067,7 @@ def _sync_update_version_settings() -> None: def _show_root_menu() -> None: handle = _get_handle() _log("Root-Menue wird angezeigt.") - _add_directory_item(handle, "Globale Suche", "search") + _add_directory_item(handle, "Suche in allen Quellen", "search") plugins = _discover_plugins() for plugin_name in sorted(plugins.keys(), key=lambda value: value.casefold()): @@ -1053,7 +1082,7 @@ def _show_plugin_menu(plugin_name: str) -> None: plugin_name = (plugin_name or "").strip() plugin = _discover_plugins().get(plugin_name) if not plugin: - xbmcgui.Dialog().notification("Plugin", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Quelle", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return @@ -1061,11 +1090,8 @@ def _show_plugin_menu(plugin_name: str) -> None: _add_directory_item(handle, "Suche", "plugin_search", {"plugin": plugin_name}, is_folder=True) - if _plugin_has_capability(plugin, "new_titles"): - _add_directory_item(handle, "Neue Titel", "new_titles", {"plugin": plugin_name, "page": "1"}, is_folder=True) - - if _plugin_has_capability(plugin, "latest_episodes"): - _add_directory_item(handle, "Neueste Folgen", "latest_episodes", {"plugin": plugin_name, "page": "1"}, is_folder=True) + if _plugin_has_capability(plugin, "new_titles") or _plugin_has_capability(plugin, "latest_episodes"): + _add_directory_item(handle, LATEST_MENU_LABEL, "latest_titles", {"plugin": plugin_name, "page": "1"}, is_folder=True) if _plugin_has_capability(plugin, "genres"): _add_directory_item(handle, "Genres", "genres", {"plugin": plugin_name}, is_folder=True) @@ -1077,7 +1103,7 @@ def _show_plugin_menu(plugin_name: str) -> None: _add_directory_item(handle, "Serien", "series_catalog", {"plugin": plugin_name, "page": "1"}, is_folder=True) if _plugin_has_capability(plugin, "popular_series"): - _add_directory_item(handle, "Meist gesehen", "popular", {"plugin": plugin_name, "page": "1"}, is_folder=True) + _add_directory_item(handle, POPULAR_MENU_LABEL, "popular", {"plugin": plugin_name, "page": "1"}, is_folder=True) xbmcplugin.endOfDirectory(handle) @@ -1086,7 +1112,7 @@ def _show_plugin_search(plugin_name: str) -> None: plugin_name = (plugin_name or "").strip() plugin = _discover_plugins().get(plugin_name) if not plugin: - xbmcgui.Dialog().notification("Suche", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Suche", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) _show_root_menu() return @@ -1107,7 +1133,7 @@ def _show_plugin_search_results(plugin_name: str, query: str) -> None: query = (query or "").strip() plugin = _discover_plugins().get(plugin_name) if not plugin: - xbmcgui.Dialog().notification("Suche", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Suche", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return @@ -1118,20 +1144,34 @@ def _show_plugin_search_results(plugin_name: str, query: str) -> None: list_items: list[dict[str, object]] = [] canceled = False try: - with _progress_dialog("Suche läuft", f"{plugin_name} (1/1) starte…") as progress: - canceled = progress(5, f"{plugin_name} (1/1) Suche…") - results = _run_async(plugin.search_titles(query)) - results = [str(t).strip() for t in (results or []) if t and str(t).strip()] + with _progress_dialog("Suche laeuft", f"{plugin_name} (1/1) startet...") as progress: + canceled = progress(5, f"{plugin_name} (1/1) Suche...") + plugin_progress = lambda msg="", pct=None: progress( # noqa: E731 - kompakte Callback-Bruecke + max(5, min(95, int(pct))) if pct is not None else 20, + f"{plugin_name} (1/1) {str(msg or 'Suche...').strip()}", + ) + search_coro = _call_plugin_search(plugin, query, progress_callback=plugin_progress) + try: + results = _run_async(search_coro) + except Exception: + if inspect.iscoroutine(search_coro): + try: + search_coro.close() + except Exception: + pass + raise + results = _clean_search_titles([str(t).strip() for t in (results or []) if t and str(t).strip()]) results.sort(key=lambda value: value.casefold()) - plugin_meta = _collect_plugin_metadata(plugin, results) + use_source, show_tmdb, prefer_source = _metadata_policy( + plugin_name, plugin, allow_tmdb=_tmdb_enabled() + ) + plugin_meta = _collect_plugin_metadata(plugin, results) if use_source else {} tmdb_prefetched: dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]] = {} - show_tmdb = _tmdb_enabled() show_plot = _get_setting_bool("tmdb_show_plot", default=True) show_art = _get_setting_bool("tmdb_show_art", default=True) - prefer_source = bool(getattr(plugin, "prefer_source_metadata", False)) - tmdb_titles = list(results) - if show_tmdb and prefer_source: + tmdb_titles = list(results) if show_tmdb else [] + if show_tmdb and prefer_source and use_source: tmdb_titles = [] for title in results: meta = plugin_meta.get(title) @@ -1140,7 +1180,7 @@ def _show_plugin_search_results(plugin_name: str, query: str) -> None: if _needs_tmdb(meta_labels, meta_art, want_plot=show_plot, want_art=show_art): tmdb_titles.append(title) if show_tmdb and tmdb_titles and not canceled: - canceled = progress(35, f"{plugin_name} (1/1) Metadaten…") + canceled = progress(35, f"{plugin_name} (1/1) Metadaten...") tmdb_prefetched = _tmdb_labels_and_art_bulk(list(tmdb_titles)) total_results = max(1, len(results)) @@ -1230,11 +1270,16 @@ def _discover_plugins() -> dict[str, BasisPlugin]: except Exception as exc: xbmc.log(f"Plugin-Datei {file_path.name} konnte nicht geladen werden: {exc}", xbmc.LOGWARNING) continue - plugin_classes = [ - obj - for obj in module.__dict__.values() - if inspect.isclass(obj) and issubclass(obj, BasisPlugin) and obj is not BasisPlugin - ] + preferred = getattr(module, "Plugin", None) + if inspect.isclass(preferred) and issubclass(preferred, BasisPlugin) and preferred is not BasisPlugin: + plugin_classes = [preferred] + else: + plugin_classes = [ + obj + for obj in module.__dict__.values() + if inspect.isclass(obj) and issubclass(obj, BasisPlugin) and obj is not BasisPlugin + ] + plugin_classes.sort(key=lambda cls: cls.__name__.casefold()) for cls in plugin_classes: try: instance = cls() @@ -1245,24 +1290,55 @@ def _discover_plugins() -> dict[str, BasisPlugin]: reason = getattr(instance, "unavailable_reason", "Nicht verfuegbar.") xbmc.log(f"Plugin {cls.__name__} deaktiviert: {reason}", xbmc.LOGWARNING) continue - plugins[instance.name] = instance + plugin_name = str(getattr(instance, "name", "") or "").strip() + if not plugin_name: + xbmc.log( + f"Plugin {cls.__name__} wurde ohne Name registriert und wird uebersprungen.", + xbmc.LOGWARNING, + ) + continue + if plugin_name in plugins: + xbmc.log( + f"Plugin-Name doppelt ({plugin_name}), {cls.__name__} wird uebersprungen.", + xbmc.LOGWARNING, + ) + continue + plugins[plugin_name] = instance + plugins = dict(sorted(plugins.items(), key=lambda item: item[0].casefold())) _PLUGIN_CACHE = plugins return plugins def _run_async(coro): """Fuehrt eine Coroutine aus, auch wenn Kodi bereits einen Event-Loop hat.""" + _ensure_windows_selector_policy() + + def _run_with_asyncio_run(): + return asyncio.run(coro) + try: - loop = asyncio.get_event_loop() + running_loop = asyncio.get_running_loop() except RuntimeError: - loop = None - if loop and loop.is_running(): - temp_loop = asyncio.new_event_loop() - try: - return temp_loop.run_until_complete(coro) - finally: - temp_loop.close() - return asyncio.run(coro) + running_loop = None + + if running_loop and running_loop.is_running(): + result_box: dict[str, object] = {} + error_box: dict[str, BaseException] = {} + + def _worker() -> None: + try: + result_box["value"] = _run_with_asyncio_run() + except BaseException as exc: # pragma: no cover - defensive + error_box["error"] = exc + + worker = threading.Thread(target=_worker, name="viewit-async-runner") + worker.start() + worker.join() + if "error" in error_box: + raise error_box["error"] + return result_box.get("value") + + return _run_with_asyncio_run() def _series_url_params(plugin: BasisPlugin, title: str) -> dict[str, str]: @@ -1276,10 +1352,37 @@ def _series_url_params(plugin: BasisPlugin, title: str) -> dict[str, str]: return {"series_url": series_url} if series_url else {} +def _clean_search_titles(values: list[str]) -> list[str]: + """Filtert offensichtliche Platzhalter und dedupliziert Treffer.""" + blocked = { + "stream", + "streams", + "film", + "movie", + "play", + "details", + "details/play", + } + cleaned: list[str] = [] + seen: set[str] = set() + for raw in values: + title = (raw or "").strip() + if not title: + continue + key = title.casefold() + if key in blocked: + continue + if key in seen: + continue + seen.add(key) + cleaned.append(title) + return cleaned + + def _show_search() -> None: _log("Suche gestartet.") dialog = xbmcgui.Dialog() - query = dialog.input("Serientitel eingeben", type=xbmcgui.INPUT_ALPHANUM).strip() + query = dialog.input("Titel eingeben", type=xbmcgui.INPUT_ALPHANUM).strip() if not query: _log("Suche abgebrochen (leere Eingabe).", xbmc.LOGDEBUG) _show_root_menu() @@ -1294,35 +1397,46 @@ def _show_search_results(query: str) -> None: _set_content(handle, "tvshows") plugins = _discover_plugins() if not plugins: - xbmcgui.Dialog().notification("Suche", "Keine Plugins gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Suche", "Keine Quellen gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return list_items: list[dict[str, object]] = [] canceled = False plugin_entries = list(plugins.items()) total_plugins = max(1, len(plugin_entries)) - with _progress_dialog("Suche läuft", "Suche gestartet…") as progress: + with _progress_dialog("Suche laeuft", "Suche startet...") as progress: for plugin_index, (plugin_name, plugin) in enumerate(plugin_entries, start=1): range_start = int(((plugin_index - 1) / float(total_plugins)) * 100) range_end = int((plugin_index / float(total_plugins)) * 100) - canceled = progress(range_start, f"{plugin_name} ({plugin_index}/{total_plugins}) Suche…") + canceled = progress(range_start, f"{plugin_name} ({plugin_index}/{total_plugins}) Suche...") if canceled: break + plugin_progress = lambda msg="", pct=None: progress( # noqa: E731 - kompakte Callback-Bruecke + max(range_start, min(range_end, int(pct))) if pct is not None else range_start + 20, + f"{plugin_name} ({plugin_index}/{total_plugins}) {str(msg or 'Suche...').strip()}", + ) + search_coro = _call_plugin_search(plugin, query, progress_callback=plugin_progress) try: - results = _run_async(plugin.search_titles(query)) + results = _run_async(search_coro) except Exception as exc: + if inspect.iscoroutine(search_coro): + try: + search_coro.close() + except Exception: + pass _log(f"Suche fehlgeschlagen ({plugin_name}): {exc}", xbmc.LOGWARNING) continue - results = [str(t).strip() for t in (results or []) if t and str(t).strip()] + results = _clean_search_titles([str(t).strip() for t in (results or []) if t and str(t).strip()]) _log(f"Treffer ({plugin_name}): {len(results)}", xbmc.LOGDEBUG) - plugin_meta = _collect_plugin_metadata(plugin, results) + use_source, show_tmdb, prefer_source = _metadata_policy( + plugin_name, plugin, allow_tmdb=_tmdb_enabled() + ) + plugin_meta = _collect_plugin_metadata(plugin, results) if use_source else {} tmdb_prefetched: dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]] = {} - show_tmdb = _tmdb_enabled() show_plot = _get_setting_bool("tmdb_show_plot", default=True) show_art = _get_setting_bool("tmdb_show_art", default=True) - prefer_source = bool(getattr(plugin, "prefer_source_metadata", False)) - tmdb_titles = list(results) - if show_tmdb and prefer_source: + tmdb_titles = list(results) if show_tmdb else [] + if show_tmdb and prefer_source and use_source: tmdb_titles = [] for title in results: meta = plugin_meta.get(title) @@ -1333,7 +1447,7 @@ def _show_search_results(query: str) -> None: if show_tmdb and tmdb_titles: canceled = progress( range_start + int((range_end - range_start) * 0.35), - f"{plugin_name} ({plugin_index}/{total_plugins}) Metadaten…", + f"{plugin_name} ({plugin_index}/{total_plugins}) Metadaten...", ) if canceled: break @@ -1376,7 +1490,7 @@ def _show_search_results(query: str) -> None: if canceled: break if not canceled: - progress(100, "Suche abgeschlossen") + progress(100, "Suche fertig") if canceled and not list_items: xbmcgui.Dialog().notification("Suche", "Suche abgebrochen.", xbmcgui.NOTIFICATION_INFO, 2500) xbmcplugin.endOfDirectory(handle) @@ -1396,12 +1510,79 @@ def _show_search_results(query: str) -> None: xbmcplugin.endOfDirectory(handle) +def _movie_seed_for_title(plugin: BasisPlugin, title: str, seasons: list[str]) -> tuple[str, str] | None: + """Ermittelt ein Film-Seed (Season/Episode), um direkt Provider anzeigen zu können.""" + if not seasons or len(seasons) != 1: + return None + season = str(seasons[0] or "").strip() + if not season: + return None + try: + episodes = [str(value or "").strip() for value in (plugin.episodes_for(title, season) or [])] + except Exception: + return None + episodes = [value for value in episodes if value] + if len(episodes) != 1: + return None + episode = episodes[0] + season_key = season.casefold() + episode_key = episode.casefold() + title_key = (title or "").strip().casefold() + generic_seasons = {"film", "movie", "stream"} + generic_episodes = {"stream", "film", "play", title_key} + if season_key in generic_seasons and episode_key in generic_episodes: + return (season, episode) + return None + + +def _show_movie_streams( + plugin_name: str, + title: str, + season: str, + episode: str, + *, + series_url: str = "", +) -> None: + handle = _get_handle() + plugin = _discover_plugins().get(plugin_name) + if plugin is None: + xbmcgui.Dialog().notification("Streams", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcplugin.endOfDirectory(handle) + return + + if series_url: + remember_series_url = getattr(plugin, "remember_series_url", None) + if callable(remember_series_url): + try: + remember_series_url(title, series_url) + except Exception: + pass + + xbmcplugin.setPluginCategory(handle, f"{title} - Streams") + _set_content(handle, "videos") + + base_params = {"plugin": plugin_name, "title": title, "season": season, "episode": episode} + if series_url: + base_params["series_url"] = series_url + + # Hoster bleiben im Auswahldialog der Wiedergabe (wie bisher). + _add_directory_item( + handle, + title, + "play_episode", + dict(base_params), + is_folder=False, + info_labels={"title": title, "mediatype": "movie"}, + ) + xbmcplugin.endOfDirectory(handle) + + def _show_seasons(plugin_name: str, title: str, series_url: str = "") -> None: handle = _get_handle() _log(f"Staffeln laden: {plugin_name} / {title}") plugin = _discover_plugins().get(plugin_name) if plugin is None: - xbmcgui.Dialog().notification("Staffeln", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Staffeln", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return if series_url: @@ -1412,67 +1593,16 @@ def _show_seasons(plugin_name: str, title: str, series_url: str = "") -> None: except Exception: pass - # Einschalten liefert Filme. Für Playback soll nach dem Öffnen des Titels direkt ein - # einzelnes abspielbares Item angezeigt werden: -> ( abspielbar). - # Wichtig: ohne zusätzliche Netzwerkanfragen (sonst bleibt Kodi ggf. im Busy-Spinner hängen). - if (plugin_name or "").casefold() == "einschalten" and _get_setting_bool("einschalten_enable_playback", default=False): - xbmcplugin.setPluginCategory(handle, title) - _set_content(handle, "movies") - playstate = _title_playstate(plugin_name, title) - info_labels: dict[str, object] = {"title": title, "mediatype": "movie"} - info_labels = _apply_playstate_to_info(info_labels, playstate) - display_label = _label_with_playstate(title, playstate) - movie_params = {"plugin": plugin_name, "title": title} - if series_url: - movie_params["series_url"] = series_url - _add_directory_item( - handle, - display_label, - "play_movie", - movie_params, - is_folder=False, - info_labels=info_labels, - ) - xbmcplugin.endOfDirectory(handle) - return - - # Optional: Plugins können schnell (ohne Detail-Request) sagen, ob ein Titel ein Film ist. - # Dann zeigen wir direkt ein einzelnes abspielbares Item: -> (). - is_movie = getattr(plugin, "is_movie", None) - if callable(is_movie): - try: - if bool(is_movie(title)): - xbmcplugin.setPluginCategory(handle, title) - _set_content(handle, "movies") - playstate = _title_playstate(plugin_name, title) - info_labels: dict[str, object] = {"title": title, "mediatype": "movie"} - info_labels = _apply_playstate_to_info(info_labels, playstate) - display_label = _label_with_playstate(title, playstate) - movie_params = {"plugin": plugin_name, "title": title} - if series_url: - movie_params["series_url"] = series_url - else: - movie_params.update(_series_url_params(plugin, title)) - _add_directory_item( - handle, - display_label, - "play_movie", - movie_params, - is_folder=False, - info_labels=info_labels, - ) - xbmcplugin.endOfDirectory(handle) - return - except Exception: - pass - + use_source, show_tmdb, _prefer_source = _metadata_policy( + plugin_name, plugin, allow_tmdb=_tmdb_enabled() + ) title_info_labels: dict[str, str] | None = None title_art: dict[str, str] | None = None title_cast: list[TmdbCastMember] | None = None meta_getter = getattr(plugin, "metadata_for", None) - if callable(meta_getter): + if use_source and callable(meta_getter): try: - with _busy_dialog(): + with _busy_dialog("Metadaten werden geladen..."): meta_labels, meta_art, meta_cast = meta_getter(title) if isinstance(meta_labels, dict): title_info_labels = {str(k): str(v) for k, v in meta_labels.items() if v} @@ -1488,17 +1618,38 @@ def _show_seasons(plugin_name: str, title: str, series_url: str = "") -> None: seasons = plugin.seasons_for(title) except Exception as exc: _log(f"Staffeln laden fehlgeschlagen ({plugin_name}): {exc}", xbmc.LOGWARNING) - xbmcgui.Dialog().notification("Staffeln", "Konnte Staffeln nicht laden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Staffeln", "Staffeln konnten nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return + movie_seed = _movie_seed_for_title(plugin, title, seasons) + if movie_seed is not None: + # Dieser Action-Pfad wurde als Verzeichnis aufgerufen. Ohne endOfDirectory() + # bleibt Kodi im Busy-Zustand, auch wenn wir direkt in die Wiedergabe springen. + try: + xbmcplugin.endOfDirectory(handle, succeeded=False) + except Exception: + try: + xbmcplugin.endOfDirectory(handle) + except Exception: + pass + _play_episode( + plugin_name, + title, + movie_seed[0], + movie_seed[1], + series_url=series_url, + ) + return + count = len(seasons) suffix = "Staffel" if count == 1 else "Staffeln" xbmcplugin.setPluginCategory(handle, f"{title} ({count} {suffix})") _set_content(handle, "seasons") # Staffel-Metadaten (Plot/Poster) optional via TMDB. - _tmdb_labels_and_art(title) - api_key = _get_setting_string("tmdb_api_key").strip() + if show_tmdb: + _tmdb_labels_and_art(title) + api_key = _get_setting_string("tmdb_api_key").strip() if show_tmdb else "" language = _get_setting_string("tmdb_language").strip() or "de-DE" show_plot = _get_setting_bool("tmdb_show_plot", default=True) show_art = _get_setting_bool("tmdb_show_art", default=True) @@ -1511,8 +1662,8 @@ def _show_seasons(plugin_name: str, title: str, series_url: str = "") -> None: art: dict[str, str] | None = None season_number = _extract_first_int(season) if api_key and season_number is not None: - cache_key = (_TMDB_ID_CACHE.get((title or "").strip().casefold(), 0), season_number, language, flags) - cached = _TMDB_SEASON_SUMMARY_CACHE.get(cache_key) + cache_key = (_tmdb_cache_get(_TMDB_ID_CACHE, (title or "").strip().casefold(), 0), season_number, language, flags) + cached = _tmdb_cache_get(_TMDB_SEASON_SUMMARY_CACHE, cache_key) if cached is None and cache_key[0]: try: meta = lookup_tv_season_summary( @@ -1535,7 +1686,7 @@ def _show_seasons(plugin_name: str, title: str, series_url: str = "") -> None: if show_art and meta.poster: art_map = {"thumb": meta.poster, "poster": meta.poster} cached = (labels, art_map) - _TMDB_SEASON_SUMMARY_CACHE[cache_key] = cached + _tmdb_cache_set(_TMDB_SEASON_SUMMARY_CACHE, cache_key, cached) if cached is not None: info_labels, art = cached merged_labels = dict(info_labels or {}) @@ -1568,7 +1719,7 @@ def _show_episodes(plugin_name: str, title: str, season: str, series_url: str = _log(f"Episoden laden: {plugin_name} / {title} / {season}") plugin = _discover_plugins().get(plugin_name) if plugin is None: - xbmcgui.Dialog().notification("Episoden", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Episoden", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return if series_url: @@ -1587,13 +1738,42 @@ def _show_episodes(plugin_name: str, title: str, season: str, series_url: str = episodes = list(plugin.episodes_for(title, season)) if episodes: - show_info, show_art, show_cast = _tmdb_labels_and_art(title) + episode_url_getter = getattr(plugin, "episode_url_for", None) + supports_direct_episode_url = callable(getattr(plugin, "stream_link_for_url", None)) + use_source, show_tmdb, _prefer_source = _metadata_policy( + plugin_name, plugin, allow_tmdb=_tmdb_enabled() + ) + show_info: dict[str, str] = {} + show_art: dict[str, str] = {} + show_cast: list[TmdbCastMember] | None = None + if show_tmdb: + show_info, show_art, show_cast = _tmdb_labels_and_art(title) + elif use_source: + meta_getter = getattr(plugin, "metadata_for", None) + if callable(meta_getter): + try: + with _busy_dialog("Episoden-Metadaten werden geladen..."): + meta_labels, meta_art, meta_cast = meta_getter(title) + if isinstance(meta_labels, dict): + show_info = {str(k): str(v) for k, v in meta_labels.items() if v} + if isinstance(meta_art, dict): + show_art = {str(k): str(v) for k, v in meta_art.items() if v} + if isinstance(meta_cast, list): + show_cast = meta_cast # noqa: PGH003 + except Exception: + pass + show_fanart = (show_art or {}).get("fanart") if isinstance(show_art, dict) else "" show_poster = (show_art or {}).get("poster") if isinstance(show_art, dict) else "" - with _busy_dialog(): + with _busy_dialog("Episoden werden aufbereitet..."): for episode in episodes: - info_labels, art = _tmdb_episode_labels_and_art(title=title, season_label=season, episode_label=episode) - episode_cast = _tmdb_episode_cast(title=title, season_label=season, episode_label=episode) + if show_tmdb: + info_labels, art = _tmdb_episode_labels_and_art( + title=title, season_label=season, episode_label=episode + ) + episode_cast = _tmdb_episode_cast(title=title, season_label=season, episode_label=episode) + else: + info_labels, art, episode_cast = {}, {}, [] merged_info = dict(show_info or {}) merged_info.update(dict(info_labels or {})) merged_art: dict[str, str] = {} @@ -1623,11 +1803,25 @@ def _show_episodes(plugin_name: str, title: str, season: str, series_url: str = merged_info = _apply_playstate_to_info(merged_info, _get_playstate(key)) display_label = episode + play_params = { + "plugin": plugin_name, + "title": title, + "season": season, + "episode": episode, + "series_url": series_url, + } + if supports_direct_episode_url and callable(episode_url_getter): + try: + episode_url = str(episode_url_getter(title, season, episode) or "").strip() + except Exception: + episode_url = "" + if episode_url: + play_params["url"] = episode_url _add_directory_item( handle, display_label, "play_episode", - {"plugin": plugin_name, "title": title, "season": season, "episode": episode}, + play_params, is_folder=False, info_labels=merged_info, art=merged_art, @@ -1649,7 +1843,7 @@ def _show_genre_sources() -> None: sources.append((plugin_name, plugin)) if not sources: - xbmcgui.Dialog().notification("Genres", "Keine Genre-Quellen gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Genres", "Keine Quellen mit Genres gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return @@ -1669,11 +1863,15 @@ def _show_genres(plugin_name: str) -> None: _log(f"Genres laden: {plugin_name}") plugin = _discover_plugins().get(plugin_name) if plugin is None: - xbmcgui.Dialog().notification("Genres", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Genres", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return try: - genres = plugin.genres() + genres = _run_with_progress( + "Genres", + f"{plugin_name}: Genres werden geladen...", + lambda: plugin.genres(), + ) except Exception as exc: _log(f"Genres konnten nicht geladen werden ({plugin_name}): {exc}", xbmc.LOGWARNING) xbmcgui.Dialog().notification("Genres", "Genres konnten nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000) @@ -1706,7 +1904,7 @@ def _show_categories(plugin_name: str) -> None: _log(f"Kategorien laden: {plugin_name}") plugin = _discover_plugins().get(plugin_name) if plugin is None: - xbmcgui.Dialog().notification("Kategorien", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Kategorien", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return getter = getattr(plugin, "categories", None) @@ -1715,7 +1913,11 @@ def _show_categories(plugin_name: str) -> None: xbmcplugin.endOfDirectory(handle) return try: - categories = list(getter() or []) + categories = _run_with_progress( + "Kategorien", + f"{plugin_name}: Kategorien werden geladen...", + lambda: list(getter() or []), + ) except Exception as exc: _log(f"Kategorien konnten nicht geladen werden ({plugin_name}): {exc}", xbmc.LOGWARNING) xbmcgui.Dialog().notification("Kategorien", "Kategorien konnten nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000) @@ -1739,14 +1941,14 @@ def _show_category_titles_page(plugin_name: str, category: str, page: int = 1) - handle = _get_handle() plugin = _discover_plugins().get(plugin_name) if plugin is None: - xbmcgui.Dialog().notification("Kategorien", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Kategorien", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return page = max(1, int(page or 1)) paging_getter = getattr(plugin, "titles_for_genre_page", None) if not callable(paging_getter): - xbmcgui.Dialog().notification("Kategorien", "Paging nicht verfuegbar.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Kategorien", "Seitenwechsel nicht verfuegbar.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return @@ -1774,7 +1976,11 @@ def _show_category_titles_page(plugin_name: str, category: str, page: int = 1) - ) try: - titles = list(paging_getter(category, page) or []) + titles = _run_with_progress( + "Kategorien", + f"{plugin_name}: {category} Seite {page} wird geladen...", + lambda: list(paging_getter(category, page) or []), + ) except Exception as exc: _log(f"Kategorie-Seite konnte nicht geladen werden ({plugin_name}/{category} p{page}): {exc}", xbmc.LOGWARNING) xbmcgui.Dialog().notification("Kategorien", "Seite konnte nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000) @@ -1784,16 +1990,16 @@ def _show_category_titles_page(plugin_name: str, category: str, page: int = 1) - titles = [str(t).strip() for t in titles if t and str(t).strip()] titles.sort(key=lambda value: value.casefold()) - show_tmdb = _get_setting_bool("tmdb_genre_metadata", default=False) if titles: - plugin_meta = _collect_plugin_metadata(plugin, titles) - show_tmdb = _tmdb_enabled() + use_source, show_tmdb, prefer_source = _metadata_policy( + plugin_name, plugin, allow_tmdb=_tmdb_list_enabled() + ) + plugin_meta = _collect_plugin_metadata(plugin, titles) if use_source else {} show_plot = _get_setting_bool("tmdb_show_plot", default=True) show_art = _get_setting_bool("tmdb_show_art", default=True) - prefer_source = bool(getattr(plugin, "prefer_source_metadata", False)) tmdb_prefetched: dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]] = {} - tmdb_titles = list(titles) - if show_tmdb and prefer_source: + tmdb_titles = list(titles) if show_tmdb else [] + if show_tmdb and prefer_source and use_source: tmdb_titles = [] for title in titles: meta = plugin_meta.get(title) @@ -1802,53 +2008,33 @@ def _show_category_titles_page(plugin_name: str, category: str, page: int = 1) - if _needs_tmdb(meta_labels, meta_art, want_plot=show_plot, want_art=show_art): tmdb_titles.append(title) if show_tmdb and tmdb_titles: - with _busy_dialog(): + with _busy_dialog("Genre-Liste wird geladen..."): tmdb_prefetched = _tmdb_labels_and_art_bulk(tmdb_titles) - if show_tmdb: - for title in titles: - tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) - meta = plugin_meta.get(title) - info_labels, art, cast = _merge_metadata(title, tmdb_info, tmdb_art, tmdb_cast, meta) - info_labels.setdefault("mediatype", "tvshow") - if (info_labels.get("mediatype") or "").strip().casefold() == "tvshow": - info_labels.setdefault("tvshowtitle", title) - playstate = _title_playstate(plugin_name, title) - info_labels = _apply_playstate_to_info(dict(info_labels), playstate) - display_label = _label_with_duration(title, info_labels) - display_label = _label_with_playstate(display_label, playstate) - direct_play = bool( - plugin_name.casefold() == "einschalten" - and _get_setting_bool("einschalten_enable_playback", default=False) - ) - _add_directory_item( - handle, - display_label, - "play_movie" if direct_play else "seasons", - {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, - is_folder=not direct_play, - info_labels=info_labels, - art=art, - cast=cast, - ) - else: - for title in titles: - playstate = _title_playstate(plugin_name, title) - meta = plugin_meta.get(title) - info_labels, art, cast = _merge_metadata(title, {}, {}, None, meta) - direct_play = bool( - plugin_name.casefold() == "einschalten" - and _get_setting_bool("einschalten_enable_playback", default=False) - ) - _add_directory_item( - handle, - _label_with_playstate(title, playstate), - "play_movie" if direct_play else "seasons", - {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, - is_folder=not direct_play, - info_labels=_apply_playstate_to_info(info_labels, playstate), - art=art, - cast=cast, - ) + for title in titles: + tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) if show_tmdb else ({}, {}, []) + meta = plugin_meta.get(title) + info_labels, art, cast = _merge_metadata(title, tmdb_info, tmdb_art, tmdb_cast, meta) + info_labels.setdefault("mediatype", "tvshow") + if (info_labels.get("mediatype") or "").strip().casefold() == "tvshow": + info_labels.setdefault("tvshowtitle", title) + playstate = _title_playstate(plugin_name, title) + info_labels = _apply_playstate_to_info(dict(info_labels), playstate) + display_label = _label_with_duration(title, info_labels) + display_label = _label_with_playstate(display_label, playstate) + direct_play = bool( + plugin_name.casefold() == "einschalten" + and _get_setting_bool("einschalten_enable_playback", default=False) + ) + _add_directory_item( + handle, + display_label, + "play_movie" if direct_play else "seasons", + {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, + is_folder=not direct_play, + info_labels=info_labels, + art=art, + cast=cast, + ) show_next = False if total_pages is not None: @@ -1864,7 +2050,7 @@ def _show_category_titles_page(plugin_name: str, category: str, page: int = 1) - if show_next: _add_directory_item( handle, - "Nächste Seite", + "Naechste Seite", "category_titles_page", {"plugin": plugin_name, "category": category, "page": str(page + 1)}, is_folder=True, @@ -1875,14 +2061,14 @@ def _show_genre_titles_page(plugin_name: str, genre: str, page: int = 1) -> None handle = _get_handle() plugin = _discover_plugins().get(plugin_name) if plugin is None: - xbmcgui.Dialog().notification("Genres", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Genres", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return page = max(1, int(page or 1)) paging_getter = getattr(plugin, "titles_for_genre_page", None) if not callable(paging_getter): - xbmcgui.Dialog().notification("Genres", "Paging nicht verfügbar.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Genres", "Seitenwechsel nicht verfuegbar.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return @@ -1910,7 +2096,11 @@ def _show_genre_titles_page(plugin_name: str, genre: str, page: int = 1) -> None ) try: - titles = list(paging_getter(genre, page) or []) + titles = _run_with_progress( + "Genres", + f"{plugin_name}: {genre} Seite {page} wird geladen...", + lambda: list(paging_getter(genre, page) or []), + ) except Exception as exc: _log(f"Genre-Seite konnte nicht geladen werden ({plugin_name}/{genre} p{page}): {exc}", xbmc.LOGWARNING) xbmcgui.Dialog().notification("Genres", "Seite konnte nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000) @@ -1920,16 +2110,16 @@ def _show_genre_titles_page(plugin_name: str, genre: str, page: int = 1) -> None titles = [str(t).strip() for t in titles if t and str(t).strip()] titles.sort(key=lambda value: value.casefold()) - show_tmdb = _get_setting_bool("tmdb_genre_metadata", default=False) if titles: - plugin_meta = _collect_plugin_metadata(plugin, titles) - show_tmdb = show_tmdb and _tmdb_enabled() + use_source, show_tmdb, prefer_source = _metadata_policy( + plugin_name, plugin, allow_tmdb=_tmdb_list_enabled() + ) + plugin_meta = _collect_plugin_metadata(plugin, titles) if use_source else {} show_plot = _get_setting_bool("tmdb_show_plot", default=True) show_art = _get_setting_bool("tmdb_show_art", default=True) - prefer_source = bool(getattr(plugin, "prefer_source_metadata", False)) tmdb_prefetched: dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]] = {} - tmdb_titles = list(titles) - if show_tmdb and prefer_source: + tmdb_titles = list(titles) if show_tmdb else [] + if show_tmdb and prefer_source and use_source: tmdb_titles = [] for title in titles: meta = plugin_meta.get(title) @@ -1938,7 +2128,7 @@ def _show_genre_titles_page(plugin_name: str, genre: str, page: int = 1) -> None if _needs_tmdb(meta_labels, meta_art, want_plot=show_plot, want_art=show_art): tmdb_titles.append(title) if show_tmdb and tmdb_titles: - with _busy_dialog(): + with _busy_dialog("Genre-Seite wird geladen..."): tmdb_prefetched = _tmdb_labels_and_art_bulk(tmdb_titles) for title in titles: tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) if show_tmdb else ({}, {}, []) @@ -1980,7 +2170,7 @@ def _show_genre_titles_page(plugin_name: str, genre: str, page: int = 1) -> None if show_next: _add_directory_item( handle, - "Nächste Seite", + "Naechste Seite", "genre_titles_page", {"plugin": plugin_name, "genre": genre, "page": str(page + 1)}, is_folder=True, @@ -1993,16 +2183,20 @@ def _show_alpha_index(plugin_name: str) -> None: _log(f"A-Z laden: {plugin_name}") plugin = _discover_plugins().get(plugin_name) if plugin is None: - xbmcgui.Dialog().notification("A-Z", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("A-Z", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return getter = getattr(plugin, "alpha_index", None) if not callable(getter): - xbmcgui.Dialog().notification("A-Z", "A-Z nicht verfügbar.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("A-Z", "A-Z nicht verfuegbar.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return try: - letters = list(getter() or []) + letters = _run_with_progress( + "A-Z", + f"{plugin_name}: Index wird geladen...", + lambda: list(getter() or []), + ) except Exception as exc: _log(f"A-Z konnte nicht geladen werden ({plugin_name}): {exc}", xbmc.LOGWARNING) xbmcgui.Dialog().notification("A-Z", "A-Z konnte nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000) @@ -2026,14 +2220,14 @@ def _show_alpha_titles_page(plugin_name: str, letter: str, page: int = 1) -> Non handle = _get_handle() plugin = _discover_plugins().get(plugin_name) if plugin is None: - xbmcgui.Dialog().notification("A-Z", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("A-Z", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return page = max(1, int(page or 1)) paging_getter = getattr(plugin, "titles_for_alpha_page", None) if not callable(paging_getter): - xbmcgui.Dialog().notification("A-Z", "Paging nicht verfügbar.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("A-Z", "Seitenwechsel nicht verfuegbar.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return @@ -2061,7 +2255,11 @@ def _show_alpha_titles_page(plugin_name: str, letter: str, page: int = 1) -> Non ) try: - titles = list(paging_getter(letter, page) or []) + titles = _run_with_progress( + "A-Z", + f"{plugin_name}: {letter} Seite {page} wird geladen...", + lambda: list(paging_getter(letter, page) or []), + ) except Exception as exc: _log(f"A-Z Seite konnte nicht geladen werden ({plugin_name}/{letter} p{page}): {exc}", xbmc.LOGWARNING) xbmcgui.Dialog().notification("A-Z", "Seite konnte nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000) @@ -2071,50 +2269,52 @@ def _show_alpha_titles_page(plugin_name: str, letter: str, page: int = 1) -> Non titles = [str(t).strip() for t in titles if t and str(t).strip()] titles.sort(key=lambda value: value.casefold()) - show_tmdb = _get_setting_bool("tmdb_genre_metadata", default=False) if titles: - if show_tmdb: - with _busy_dialog(): - tmdb_prefetched = _tmdb_labels_and_art_bulk(titles) - for title in titles: - info_labels, art, cast = tmdb_prefetched.get(title, _tmdb_labels_and_art(title)) - info_labels = dict(info_labels or {}) - info_labels.setdefault("mediatype", "tvshow") - if (info_labels.get("mediatype") or "").strip().casefold() == "tvshow": - info_labels.setdefault("tvshowtitle", title) - playstate = _title_playstate(plugin_name, title) - info_labels = _apply_playstate_to_info(dict(info_labels), playstate) - display_label = _label_with_duration(title, info_labels) - display_label = _label_with_playstate(display_label, playstate) - direct_play = bool( - plugin_name.casefold() == "einschalten" - and _get_setting_bool("einschalten_enable_playback", default=False) - ) - _add_directory_item( - handle, - display_label, - "play_movie" if direct_play else "seasons", - {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, - is_folder=not direct_play, - info_labels=info_labels, - art=art, - cast=cast, - ) - else: + use_source, show_tmdb, prefer_source = _metadata_policy( + plugin_name, plugin, allow_tmdb=_tmdb_list_enabled() + ) + plugin_meta = _collect_plugin_metadata(plugin, titles) if use_source else {} + show_plot = _get_setting_bool("tmdb_show_plot", default=True) + show_art = _get_setting_bool("tmdb_show_art", default=True) + tmdb_prefetched: dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]] = {} + tmdb_titles = list(titles) if show_tmdb else [] + if show_tmdb and prefer_source and use_source: + tmdb_titles = [] for title in titles: - playstate = _title_playstate(plugin_name, title) - direct_play = bool( - plugin_name.casefold() == "einschalten" - and _get_setting_bool("einschalten_enable_playback", default=False) - ) - _add_directory_item( - handle, - _label_with_playstate(title, playstate), - "play_movie" if direct_play else "seasons", - {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, - is_folder=not direct_play, - info_labels=_apply_playstate_to_info({"title": title}, playstate), - ) + meta = plugin_meta.get(title) + meta_labels = meta[0] if meta else {} + meta_art = meta[1] if meta else {} + if _needs_tmdb(meta_labels, meta_art, want_plot=show_plot, want_art=show_art): + tmdb_titles.append(title) + if show_tmdb and tmdb_titles: + with _busy_dialog("A-Z Liste wird geladen..."): + tmdb_prefetched = _tmdb_labels_and_art_bulk(tmdb_titles) + for title in titles: + tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) if show_tmdb else ({}, {}, []) + meta = plugin_meta.get(title) + info_labels, art, cast = _merge_metadata(title, tmdb_info, tmdb_art, tmdb_cast, meta) + info_labels = dict(info_labels or {}) + info_labels.setdefault("mediatype", "tvshow") + if (info_labels.get("mediatype") or "").strip().casefold() == "tvshow": + info_labels.setdefault("tvshowtitle", title) + playstate = _title_playstate(plugin_name, title) + info_labels = _apply_playstate_to_info(dict(info_labels), playstate) + display_label = _label_with_duration(title, info_labels) + display_label = _label_with_playstate(display_label, playstate) + direct_play = bool( + plugin_name.casefold() == "einschalten" + and _get_setting_bool("einschalten_enable_playback", default=False) + ) + _add_directory_item( + handle, + display_label, + "play_movie" if direct_play else "seasons", + {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, + is_folder=not direct_play, + info_labels=info_labels, + art=art, + cast=cast, + ) show_next = False if total_pages is not None: @@ -2123,7 +2323,7 @@ def _show_alpha_titles_page(plugin_name: str, letter: str, page: int = 1) -> Non if show_next: _add_directory_item( handle, - "Nächste Seite", + "Naechste Seite", "alpha_titles_page", {"plugin": plugin_name, "letter": letter, "page": str(page + 1)}, is_folder=True, @@ -2136,14 +2336,14 @@ def _show_series_catalog(plugin_name: str, page: int = 1) -> None: plugin_name = (plugin_name or "").strip() plugin = _discover_plugins().get(plugin_name) if plugin is None: - xbmcgui.Dialog().notification("Serien", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Serien", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return page = max(1, int(page or 1)) paging_getter = getattr(plugin, "series_catalog_page", None) if not callable(paging_getter): - xbmcgui.Dialog().notification("Serien", "Serien nicht verfügbar.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Serien", "Serienkatalog nicht verfuegbar.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return @@ -2171,7 +2371,11 @@ def _show_series_catalog(plugin_name: str, page: int = 1) -> None: ) try: - titles = list(paging_getter(page) or []) + titles = _run_with_progress( + "Serien", + f"{plugin_name}: Seite {page} wird geladen...", + lambda: list(paging_getter(page) or []), + ) except Exception as exc: _log(f"Serien konnten nicht geladen werden ({plugin_name} p{page}): {exc}", xbmc.LOGWARNING) xbmcgui.Dialog().notification("Serien", "Serien konnten nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000) @@ -2181,42 +2385,48 @@ def _show_series_catalog(plugin_name: str, page: int = 1) -> None: titles = [str(t).strip() for t in titles if t and str(t).strip()] titles.sort(key=lambda value: value.casefold()) - show_tmdb = _get_setting_bool("tmdb_genre_metadata", default=False) if titles: - if show_tmdb: - with _busy_dialog(): - tmdb_prefetched = _tmdb_labels_and_art_bulk(titles) - for title in titles: - info_labels, art, cast = tmdb_prefetched.get(title, _tmdb_labels_and_art(title)) - info_labels = dict(info_labels or {}) - info_labels.setdefault("mediatype", "tvshow") - if (info_labels.get("mediatype") or "").strip().casefold() == "tvshow": - info_labels.setdefault("tvshowtitle", title) - playstate = _title_playstate(plugin_name, title) - info_labels = _apply_playstate_to_info(dict(info_labels), playstate) - display_label = _label_with_duration(title, info_labels) - display_label = _label_with_playstate(display_label, playstate) - _add_directory_item( - handle, - display_label, - "seasons", - {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, - is_folder=True, - info_labels=info_labels, - art=art, - cast=cast, - ) - else: + use_source, show_tmdb, prefer_source = _metadata_policy( + plugin_name, plugin, allow_tmdb=_tmdb_list_enabled() + ) + plugin_meta = _collect_plugin_metadata(plugin, titles) if use_source else {} + show_plot = _get_setting_bool("tmdb_show_plot", default=True) + show_art = _get_setting_bool("tmdb_show_art", default=True) + tmdb_prefetched: dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]] = {} + tmdb_titles = list(titles) if show_tmdb else [] + if show_tmdb and prefer_source and use_source: + tmdb_titles = [] for title in titles: - playstate = _title_playstate(plugin_name, title) - _add_directory_item( - handle, - _label_with_playstate(title, playstate), - "seasons", - {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, - is_folder=True, - info_labels=_apply_playstate_to_info({"title": title}, playstate), - ) + meta = plugin_meta.get(title) + meta_labels = meta[0] if meta else {} + meta_art = meta[1] if meta else {} + if _needs_tmdb(meta_labels, meta_art, want_plot=show_plot, want_art=show_art): + tmdb_titles.append(title) + if show_tmdb and tmdb_titles: + with _busy_dialog("A-Z Seite wird geladen..."): + tmdb_prefetched = _tmdb_labels_and_art_bulk(tmdb_titles) + for title in titles: + tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) if show_tmdb else ({}, {}, []) + meta = plugin_meta.get(title) + info_labels, art, cast = _merge_metadata(title, tmdb_info, tmdb_art, tmdb_cast, meta) + info_labels = dict(info_labels or {}) + info_labels.setdefault("mediatype", "tvshow") + if (info_labels.get("mediatype") or "").strip().casefold() == "tvshow": + info_labels.setdefault("tvshowtitle", title) + playstate = _title_playstate(plugin_name, title) + info_labels = _apply_playstate_to_info(dict(info_labels), playstate) + display_label = _label_with_duration(title, info_labels) + display_label = _label_with_playstate(display_label, playstate) + _add_directory_item( + handle, + display_label, + "seasons", + {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, + is_folder=True, + info_labels=info_labels, + art=art, + cast=cast, + ) show_next = False if total_pages is not None: @@ -2232,7 +2442,7 @@ def _show_series_catalog(plugin_name: str, page: int = 1) -> None: if show_next: _add_directory_item( handle, - "Nächste Seite", + "Naechste Seite", "series_catalog", {"plugin": plugin_name, "page": str(page + 1)}, is_folder=True, @@ -2370,22 +2580,30 @@ def _show_popular(plugin_name: str | None = None, page: int = 1) -> None: if plugin_name: plugin = _discover_plugins().get(plugin_name) if plugin is None or not _plugin_has_capability(plugin, "popular_series"): - xbmcgui.Dialog().notification("Beliebte Serien", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification(POPULAR_MENU_LABEL, "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return try: popular_getter = getattr(plugin, "popular_series", None) if callable(popular_getter): - titles = list(popular_getter() or []) + titles = _run_with_progress( + POPULAR_MENU_LABEL, + f"{plugin_name}: Liste wird geladen...", + lambda: list(popular_getter() or []), + ) else: label = _popular_genre_label(plugin) if not label: titles = [] else: - titles = list(plugin.titles_for_genre(label) or []) + titles = _run_with_progress( + POPULAR_MENU_LABEL, + f"{plugin_name}: Liste wird geladen...", + lambda: list(plugin.titles_for_genre(label) or []), + ) except Exception as exc: - _log(f"Beliebte Serien konnten nicht geladen werden ({plugin_name}): {exc}", xbmc.LOGWARNING) - xbmcgui.Dialog().notification("Beliebte Serien", "Serien konnten nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000) + _log(f"{POPULAR_MENU_LABEL} konnte nicht geladen werden ({plugin_name}): {exc}", xbmc.LOGWARNING) + xbmcgui.Dialog().notification(POPULAR_MENU_LABEL, "Serien konnten nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return @@ -2394,7 +2612,7 @@ def _show_popular(plugin_name: str | None = None, page: int = 1) -> None: total = len(titles) total_pages = max(1, (total + page_size - 1) // page_size) page = min(page, total_pages) - xbmcplugin.setPluginCategory(handle, f"Beliebte Serien [{plugin_name}] ({page}/{total_pages})") + xbmcplugin.setPluginCategory(handle, f"{POPULAR_MENU_LABEL} [{plugin_name}] ({page}/{total_pages})") _set_content(handle, "tvshows") if total_pages > 1 and page > 1: @@ -2410,16 +2628,16 @@ def _show_popular(plugin_name: str | None = None, page: int = 1) -> None: end = start + page_size page_items = titles[start:end] - show_tmdb = _get_setting_bool("tmdb_genre_metadata", default=False) if page_items: - plugin_meta = _collect_plugin_metadata(plugin, page_items) - show_tmdb = show_tmdb and _tmdb_enabled() + use_source, show_tmdb, prefer_source = _metadata_policy( + plugin_name, plugin, allow_tmdb=_tmdb_list_enabled() + ) + plugin_meta = _collect_plugin_metadata(plugin, page_items) if use_source else {} show_plot = _get_setting_bool("tmdb_show_plot", default=True) show_art = _get_setting_bool("tmdb_show_art", default=True) - prefer_source = bool(getattr(plugin, "prefer_source_metadata", False)) tmdb_prefetched: dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]] = {} - tmdb_titles = list(page_items) - if show_tmdb and prefer_source: + tmdb_titles = list(page_items) if show_tmdb else [] + if show_tmdb and prefer_source and use_source: tmdb_titles = [] for title in page_items: meta = plugin_meta.get(title) @@ -2428,10 +2646,10 @@ def _show_popular(plugin_name: str | None = None, page: int = 1) -> None: if _needs_tmdb(meta_labels, meta_art, want_plot=show_plot, want_art=show_art): tmdb_titles.append(title) if show_tmdb and tmdb_titles: - with _busy_dialog(): + with _busy_dialog(f"{POPULAR_MENU_LABEL} wird geladen..."): tmdb_prefetched = _tmdb_labels_and_art_bulk(tmdb_titles) for title in page_items: - tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) + tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) if show_tmdb else ({}, {}, []) meta = plugin_meta.get(title) info_labels, art, cast = _merge_metadata(title, tmdb_info, tmdb_art, tmdb_cast, meta) info_labels.setdefault("mediatype", "tvshow") @@ -2455,7 +2673,7 @@ def _show_popular(plugin_name: str | None = None, page: int = 1) -> None: if total_pages > 1 and page < total_pages: _add_directory_item( handle, - "Nächste Seite", + "Naechste Seite", "popular", {"plugin": plugin_name, "page": str(page + 1)}, is_folder=True, @@ -2465,15 +2683,15 @@ def _show_popular(plugin_name: str | None = None, page: int = 1) -> None: sources = _plugins_with_popular() if not sources: - xbmcgui.Dialog().notification("Beliebte Serien", "Keine Quellen gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification(POPULAR_MENU_LABEL, "Keine Quellen gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return - xbmcplugin.setPluginCategory(handle, "Beliebte Serien") + xbmcplugin.setPluginCategory(handle, POPULAR_MENU_LABEL) for name, plugin, _label in sources: _add_directory_item( handle, - f"Beliebte Serien [{plugin.name}]", + f"{POPULAR_MENU_LABEL} [{plugin.name}]", "popular", {"plugin": name, "page": "1"}, is_folder=True, @@ -2481,7 +2699,7 @@ def _show_popular(plugin_name: str | None = None, page: int = 1) -> None: xbmcplugin.endOfDirectory(handle) -def _show_new_titles(plugin_name: str, page: int = 1) -> None: +def _show_new_titles(plugin_name: str, page: int = 1, *, action_name: str = "new_titles") -> None: handle = _get_handle() page_size = 10 page = max(1, int(page or 1)) @@ -2489,13 +2707,13 @@ def _show_new_titles(plugin_name: str, page: int = 1) -> None: plugin_name = (plugin_name or "").strip() plugin = _discover_plugins().get(plugin_name) if plugin is None or not _plugin_has_capability(plugin, "new_titles"): - xbmcgui.Dialog().notification("Neue Titel", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification(LATEST_MENU_LABEL, "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return getter = getattr(plugin, "new_titles", None) if not callable(getter): - xbmcgui.Dialog().notification("Neue Titel", "Nicht verfügbar.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification(LATEST_MENU_LABEL, "Diese Liste ist nicht verfuegbar.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return @@ -2503,31 +2721,39 @@ def _show_new_titles(plugin_name: str, page: int = 1) -> None: has_more_getter = getattr(plugin, "new_titles_has_more", None) if callable(paging_getter): - xbmcplugin.setPluginCategory(handle, f"Neue Titel [{plugin_name}] ({page})") + xbmcplugin.setPluginCategory(handle, f"{LATEST_MENU_LABEL} [{plugin_name}] ({page})") _set_content(handle, "movies" if plugin_name.casefold() == "einschalten" else "tvshows") if page > 1: _add_directory_item( handle, "Vorherige Seite", - "new_titles", + action_name, {"plugin": plugin_name, "page": str(page - 1)}, is_folder=True, ) try: - page_items = list(paging_getter(page) or []) + page_items = _run_with_progress( + LATEST_MENU_LABEL, + f"{plugin_name}: Seite {page} wird geladen...", + lambda: list(paging_getter(page) or []), + ) except Exception as exc: - _log(f"Neue Titel konnten nicht geladen werden ({plugin_name} p{page}): {exc}", xbmc.LOGWARNING) - xbmcgui.Dialog().notification("Neue Titel", "Titel konnten nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000) + _log(f"{LATEST_MENU_LABEL} konnten nicht geladen werden ({plugin_name} p{page}): {exc}", xbmc.LOGWARNING) + xbmcgui.Dialog().notification(LATEST_MENU_LABEL, "Titel konnten nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return page_items = [str(t).strip() for t in page_items if t and str(t).strip()] page_items.sort(key=lambda value: value.casefold()) else: try: - titles = list(getter() or []) + titles = _run_with_progress( + LATEST_MENU_LABEL, + f"{plugin_name}: Liste wird geladen...", + lambda: list(getter() or []), + ) except Exception as exc: - _log(f"Neue Titel konnten nicht geladen werden ({plugin_name}): {exc}", xbmc.LOGWARNING) - xbmcgui.Dialog().notification("Neue Titel", "Titel konnten nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000) + _log(f"{LATEST_MENU_LABEL} konnten nicht geladen werden ({plugin_name}): {exc}", xbmc.LOGWARNING) + xbmcgui.Dialog().notification(LATEST_MENU_LABEL, "Titel konnten nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return @@ -2536,21 +2762,21 @@ def _show_new_titles(plugin_name: str, page: int = 1) -> None: total = len(titles) if total == 0: xbmcgui.Dialog().notification( - "Neue Titel", - "Keine Titel gefunden (Basis-URL/Index prüfen).", + LATEST_MENU_LABEL, + "Keine Titel gefunden. Bitte Basis-URL oder Index pruefen.", xbmcgui.NOTIFICATION_INFO, 4000, ) total_pages = max(1, (total + page_size - 1) // page_size) page = min(page, total_pages) - xbmcplugin.setPluginCategory(handle, f"Neue Titel [{plugin_name}] ({page}/{total_pages})") + xbmcplugin.setPluginCategory(handle, f"{LATEST_MENU_LABEL} [{plugin_name}] ({page}/{total_pages})") _set_content(handle, "movies" if plugin_name.casefold() == "einschalten" else "tvshows") if total_pages > 1 and page > 1: _add_directory_item( handle, "Vorherige Seite", - "new_titles", + action_name, {"plugin": plugin_name, "page": str(page - 1)}, is_folder=True, ) @@ -2558,48 +2784,50 @@ def _show_new_titles(plugin_name: str, page: int = 1) -> None: start = (page - 1) * page_size end = start + page_size page_items = titles[start:end] - show_tmdb = _get_setting_bool("tmdb_genre_metadata", default=False) if page_items: - if show_tmdb: - with _busy_dialog(): - tmdb_prefetched = _tmdb_labels_and_art_bulk(page_items) - for title in page_items: - info_labels, art, cast = tmdb_prefetched.get(title, _tmdb_labels_and_art(title)) - info_labels = dict(info_labels or {}) - info_labels.setdefault("mediatype", "movie") - playstate = _title_playstate(plugin_name, title) - info_labels = _apply_playstate_to_info(dict(info_labels), playstate) - display_label = _label_with_duration(title, info_labels) - display_label = _label_with_playstate(display_label, playstate) - direct_play = bool( - plugin_name.casefold() == "einschalten" - and _get_setting_bool("einschalten_enable_playback", default=False) - ) - _add_directory_item( - handle, - display_label, - "play_movie" if direct_play else "seasons", - {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, - is_folder=not direct_play, - info_labels=info_labels, - art=art, - cast=cast, - ) - else: + use_source, show_tmdb, prefer_source = _metadata_policy( + plugin_name, plugin, allow_tmdb=_tmdb_list_enabled() + ) + plugin_meta = _collect_plugin_metadata(plugin, page_items) if use_source else {} + show_plot = _get_setting_bool("tmdb_show_plot", default=True) + show_art = _get_setting_bool("tmdb_show_art", default=True) + tmdb_prefetched: dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]] = {} + tmdb_titles = list(page_items) if show_tmdb else [] + if show_tmdb and prefer_source and use_source: + tmdb_titles = [] for title in page_items: - playstate = _title_playstate(plugin_name, title) - direct_play = bool( - plugin_name.casefold() == "einschalten" - and _get_setting_bool("einschalten_enable_playback", default=False) - ) - _add_directory_item( - handle, - _label_with_playstate(title, playstate), - "play_movie" if direct_play else "seasons", - {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, - is_folder=not direct_play, - info_labels=_apply_playstate_to_info({"title": title}, playstate), - ) + meta = plugin_meta.get(title) + meta_labels = meta[0] if meta else {} + meta_art = meta[1] if meta else {} + if _needs_tmdb(meta_labels, meta_art, want_plot=show_plot, want_art=show_art): + tmdb_titles.append(title) + if show_tmdb and tmdb_titles: + with _busy_dialog(f"{LATEST_MENU_LABEL} wird geladen..."): + tmdb_prefetched = _tmdb_labels_and_art_bulk(tmdb_titles) + for title in page_items: + tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) if show_tmdb else ({}, {}, []) + meta = plugin_meta.get(title) + info_labels, art, cast = _merge_metadata(title, tmdb_info, tmdb_art, tmdb_cast, meta) + info_labels = dict(info_labels or {}) + info_labels.setdefault("mediatype", "movie") + playstate = _title_playstate(plugin_name, title) + info_labels = _apply_playstate_to_info(dict(info_labels), playstate) + display_label = _label_with_duration(title, info_labels) + display_label = _label_with_playstate(display_label, playstate) + direct_play = bool( + plugin_name.casefold() == "einschalten" + and _get_setting_bool("einschalten_enable_playback", default=False) + ) + _add_directory_item( + handle, + display_label, + "play_movie" if direct_play else "seasons", + {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, + is_folder=not direct_play, + info_labels=info_labels, + art=art, + cast=cast, + ) show_next = False if callable(paging_getter) and callable(has_more_getter): @@ -2613,8 +2841,8 @@ def _show_new_titles(plugin_name: str, page: int = 1) -> None: if show_next: _add_directory_item( handle, - "Nächste Seite", - "new_titles", + "Naechste Seite", + action_name, {"plugin": plugin_name, "page": str(page + 1)}, is_folder=True, ) @@ -2626,25 +2854,28 @@ def _show_latest_episodes(plugin_name: str, page: int = 1) -> None: plugin_name = (plugin_name or "").strip() plugin = _discover_plugins().get(plugin_name) if not plugin: - xbmcgui.Dialog().notification("Neueste Folgen", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification(LATEST_MENU_LABEL, "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return getter = getattr(plugin, "latest_episodes", None) if not callable(getter): - xbmcgui.Dialog().notification("Neueste Folgen", "Nicht unterstützt.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification(LATEST_MENU_LABEL, "Diese Quelle bietet das nicht an.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return - xbmcplugin.setPluginCategory(handle, f"{plugin_name}: Neueste Folgen") + xbmcplugin.setPluginCategory(handle, f"{plugin_name}: {LATEST_MENU_LABEL}") _set_content(handle, "episodes") try: - with _busy_dialog(): - entries = list(getter(page) or []) + entries = _run_with_progress( + LATEST_MENU_LABEL, + f"{plugin_name}: Seite {page} wird geladen...", + lambda: list(getter(page) or []), + ) except Exception as exc: - _log(f"Neueste Folgen fehlgeschlagen ({plugin_name}): {exc}", xbmc.LOGWARNING) - xbmcgui.Dialog().notification("Neueste Folgen", "Abruf fehlgeschlagen.", xbmcgui.NOTIFICATION_INFO, 3000) + _log(f"{LATEST_MENU_LABEL} fehlgeschlagen ({plugin_name}): {exc}", xbmc.LOGWARNING) + xbmcgui.Dialog().notification(LATEST_MENU_LABEL, "Abruf fehlgeschlagen.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return @@ -2697,13 +2928,32 @@ def _show_latest_episodes(plugin_name: str, page: int = 1) -> None: xbmcplugin.endOfDirectory(handle) +def _show_latest_titles(plugin_name: str, page: int = 1) -> None: + plugin_name = (plugin_name or "").strip() + plugin = _discover_plugins().get(plugin_name) + if plugin is None: + handle = _get_handle() + xbmcgui.Dialog().notification(LATEST_MENU_LABEL, "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcplugin.endOfDirectory(handle) + return + if _plugin_has_capability(plugin, "latest_episodes"): + _show_latest_episodes(plugin_name, page) + return + if _plugin_has_capability(plugin, "new_titles"): + _show_new_titles(plugin_name, page, action_name="latest_titles") + return + handle = _get_handle() + xbmcgui.Dialog().notification(LATEST_MENU_LABEL, "Diese Quelle bietet das nicht an.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcplugin.endOfDirectory(handle) + + def _show_genre_series_group(plugin_name: str, genre: str, group_code: str, page: int = 1) -> None: handle = _get_handle() page_size = 10 page = max(1, int(page or 1)) plugin = _discover_plugins().get(plugin_name) if plugin is None: - xbmcgui.Dialog().notification("Genres", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Genres", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return @@ -2711,7 +2961,12 @@ def _show_genre_series_group(plugin_name: str, genre: str, group_code: str, page grouped_has_more = getattr(plugin, "genre_group_has_more", None) if callable(grouped_paging): try: - page_items = [str(t).strip() for t in list(grouped_paging(genre, group_code, page, page_size) or []) if t and str(t).strip()] + raw_items = _run_with_progress( + "Genres", + f"{plugin_name}: {genre} [{group_code}] Seite {page} wird geladen...", + lambda: list(grouped_paging(genre, group_code, page, page_size) or []), + ) + page_items = [str(t).strip() for t in raw_items if t and str(t).strip()] except Exception as exc: _log(f"Genre-Serien konnten nicht geladen werden ({plugin_name}/{genre}/{group_code} p{page}): {exc}", xbmc.LOGWARNING) xbmcgui.Dialog().notification("Genres", "Serien konnten nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000) @@ -2719,7 +2974,9 @@ def _show_genre_series_group(plugin_name: str, genre: str, group_code: str, page return xbmcplugin.setPluginCategory(handle, f"{genre} [{group_code}] ({page})") - show_tmdb = _get_setting_bool("tmdb_genre_metadata", default=False) + use_source, show_tmdb, prefer_source = _metadata_policy( + plugin_name, plugin, allow_tmdb=_tmdb_list_enabled() + ) if page > 1: _add_directory_item( handle, @@ -2729,40 +2986,44 @@ def _show_genre_series_group(plugin_name: str, genre: str, group_code: str, page is_folder=True, ) if page_items: - if show_tmdb: - with _busy_dialog(): - tmdb_prefetched = _tmdb_labels_and_art_bulk(page_items) - for title in page_items: - info_labels, art, cast = tmdb_prefetched.get(title, _tmdb_labels_and_art(title)) - info_labels = dict(info_labels or {}) - info_labels.setdefault("mediatype", "tvshow") - if (info_labels.get("mediatype") or "").strip().casefold() == "tvshow": - info_labels.setdefault("tvshowtitle", title) - playstate = _title_playstate(plugin_name, title) - info_labels = _apply_playstate_to_info(dict(info_labels), playstate) - display_label = _label_with_duration(title, info_labels) - display_label = _label_with_playstate(display_label, playstate) - _add_directory_item( - handle, - display_label, - "seasons", - {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, - is_folder=True, - info_labels=info_labels, - art=art, - cast=cast, - ) - else: + plugin_meta = _collect_plugin_metadata(plugin, page_items) if use_source else {} + show_plot = _get_setting_bool("tmdb_show_plot", default=True) + show_art = _get_setting_bool("tmdb_show_art", default=True) + tmdb_prefetched: dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]] = {} + tmdb_titles = list(page_items) if show_tmdb else [] + if show_tmdb and prefer_source and use_source: + tmdb_titles = [] for title in page_items: - playstate = _title_playstate(plugin_name, title) - _add_directory_item( - handle, - _label_with_playstate(title, playstate), - "seasons", - {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, - is_folder=True, - info_labels=_apply_playstate_to_info({"title": title}, playstate), - ) + meta = plugin_meta.get(title) + meta_labels = meta[0] if meta else {} + meta_art = meta[1] if meta else {} + if _needs_tmdb(meta_labels, meta_art, want_plot=show_plot, want_art=show_art): + tmdb_titles.append(title) + if show_tmdb and tmdb_titles: + with _busy_dialog("Genre-Gruppe wird geladen..."): + tmdb_prefetched = _tmdb_labels_and_art_bulk(tmdb_titles) + for title in page_items: + tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) if show_tmdb else ({}, {}, []) + meta = plugin_meta.get(title) + info_labels, art, cast = _merge_metadata(title, tmdb_info, tmdb_art, tmdb_cast, meta) + info_labels = dict(info_labels or {}) + info_labels.setdefault("mediatype", "tvshow") + if (info_labels.get("mediatype") or "").strip().casefold() == "tvshow": + info_labels.setdefault("tvshowtitle", title) + playstate = _title_playstate(plugin_name, title) + info_labels = _apply_playstate_to_info(dict(info_labels), playstate) + display_label = _label_with_duration(title, info_labels) + display_label = _label_with_playstate(display_label, playstate) + _add_directory_item( + handle, + display_label, + "seasons", + {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, + is_folder=True, + info_labels=info_labels, + art=art, + cast=cast, + ) show_next = False if callable(grouped_has_more): try: @@ -2774,7 +3035,7 @@ def _show_genre_series_group(plugin_name: str, genre: str, group_code: str, page if show_next: _add_directory_item( handle, - "Nächste Seite", + "Naechste Seite", "genre_series_group", {"plugin": plugin_name, "genre": genre, "group": group_code, "page": str(page + 1)}, is_folder=True, @@ -2808,48 +3069,54 @@ def _show_genre_series_group(plugin_name: str, genre: str, group_code: str, page start = (page - 1) * page_size end = start + page_size page_items = filtered[start:end] - show_tmdb = _get_setting_bool("tmdb_genre_metadata", default=False) + use_source, show_tmdb, prefer_source = _metadata_policy( + plugin_name, plugin, allow_tmdb=_tmdb_list_enabled() + ) if page_items: - if show_tmdb: - with _busy_dialog(): - tmdb_prefetched = _tmdb_labels_and_art_bulk(page_items) - for title in page_items: - info_labels, art, cast = tmdb_prefetched.get(title, _tmdb_labels_and_art(title)) - info_labels = dict(info_labels or {}) - info_labels.setdefault("mediatype", "tvshow") - if (info_labels.get("mediatype") or "").strip().casefold() == "tvshow": - info_labels.setdefault("tvshowtitle", title) - playstate = _title_playstate(plugin_name, title) - info_labels = _apply_playstate_to_info(dict(info_labels), playstate) - display_label = _label_with_duration(title, info_labels) - display_label = _label_with_playstate(display_label, playstate) - _add_directory_item( - handle, - display_label, - "seasons", - {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, - is_folder=True, - info_labels=info_labels, - art=art, - cast=cast, - ) - else: + plugin_meta = _collect_plugin_metadata(plugin, page_items) if use_source else {} + show_plot = _get_setting_bool("tmdb_show_plot", default=True) + show_art = _get_setting_bool("tmdb_show_art", default=True) + tmdb_prefetched: dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]] = {} + tmdb_titles = list(page_items) if show_tmdb else [] + if show_tmdb and prefer_source and use_source: + tmdb_titles = [] for title in page_items: - playstate = _title_playstate(plugin_name, title) - _add_directory_item( - handle, - _label_with_playstate(title, playstate), - "seasons", - {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, - is_folder=True, - info_labels=_apply_playstate_to_info({"title": title}, playstate), - ) + meta = plugin_meta.get(title) + meta_labels = meta[0] if meta else {} + meta_art = meta[1] if meta else {} + if _needs_tmdb(meta_labels, meta_art, want_plot=show_plot, want_art=show_art): + tmdb_titles.append(title) + if show_tmdb and tmdb_titles: + with _busy_dialog("Genre-Serien werden geladen..."): + tmdb_prefetched = _tmdb_labels_and_art_bulk(tmdb_titles) + for title in page_items: + tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) if show_tmdb else ({}, {}, []) + meta = plugin_meta.get(title) + info_labels, art, cast = _merge_metadata(title, tmdb_info, tmdb_art, tmdb_cast, meta) + info_labels = dict(info_labels or {}) + info_labels.setdefault("mediatype", "tvshow") + if (info_labels.get("mediatype") or "").strip().casefold() == "tvshow": + info_labels.setdefault("tvshowtitle", title) + playstate = _title_playstate(plugin_name, title) + info_labels = _apply_playstate_to_info(dict(info_labels), playstate) + display_label = _label_with_duration(title, info_labels) + display_label = _label_with_playstate(display_label, playstate) + _add_directory_item( + handle, + display_label, + "seasons", + {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, + is_folder=True, + info_labels=info_labels, + art=art, + cast=cast, + ) if total_pages > 1 and page < total_pages: _add_directory_item( handle, - "Nächste Seite", + "Naechste Seite", "genre_series_group", {"plugin": plugin_name, "genre": genre, "group": group_code, "page": str(page + 1)}, is_folder=True, @@ -2865,27 +3132,44 @@ def _open_settings() -> None: addon.openSettings() -def _run_update_check() -> None: - """Stoesst Kodi-Repo- und Addon-Updates an und informiert den Benutzer.""" +def _run_update_check(*, silent: bool = False) -> None: + """Stoesst Kodi-Repo- und Addon-Updates an.""" if xbmc is None: # pragma: no cover - outside Kodi return try: - info_url = _normalize_update_info_url(_get_setting_string("update_repo_url")) - _set_setting_string("update_repo_url", info_url) + info_url = _resolve_update_info_url() _sync_update_version_settings() _update_repository_source(info_url) builtin = getattr(xbmc, "executebuiltin", None) if callable(builtin): builtin("UpdateAddonRepos") builtin("UpdateLocalAddons") - builtin("ActivateWindow(addonbrowser,addons://updates/)") - xbmcgui.Dialog().notification("ViewIT Update", "Update-Pruefung gestartet.", xbmcgui.NOTIFICATION_INFO, 4000) + if not silent: + builtin("ActivateWindow(addonbrowser,addons://updates/)") + if not silent: + xbmcgui.Dialog().notification("Updates", "Update-Check gestartet.", xbmcgui.NOTIFICATION_INFO, 4000) except Exception as exc: _log(f"Update-Pruefung fehlgeschlagen: {exc}", xbmc.LOGWARNING) - try: - xbmcgui.Dialog().notification("ViewIT Update", "Update-Pruefung fehlgeschlagen.", xbmcgui.NOTIFICATION_ERROR, 4000) - except Exception: - pass + if not silent: + try: + xbmcgui.Dialog().notification("Updates", "Update-Check fehlgeschlagen.", xbmcgui.NOTIFICATION_ERROR, 4000) + except Exception: + pass + + +def _maybe_run_auto_update_check(action: str | None) -> None: + action = (action or "").strip() + # Auto-Check nur beim Root-Menue, nicht in jedem Untermenue. + if action: + return + if not _get_setting_bool("auto_update_enabled", default=False): + return + now = int(time.time()) + last = _get_setting_int("auto_update_last_ts", default=0) + if last > 0 and (now - last) < AUTO_UPDATE_INTERVAL_SEC: + return + _set_setting_string("auto_update_last_ts", str(now)) + _run_update_check(silent=True) def _extract_first_int(value: str) -> int | None: @@ -2959,65 +3243,12 @@ def _play_final_link( def _track_playback_and_update_state(key: str) -> None: - if not key: - return - monitor = xbmc.Monitor() if xbmc is not None and hasattr(xbmc, "Monitor") else None - player = xbmc.Player() + return - # Wait for playback start. - started = False - for _ in range(30): - try: - if player.isPlayingVideo(): - started = True - break - except Exception: - pass - if monitor and monitor.waitForAbort(0.5): - return - if not started: - return - last_pos = 0.0 - total = 0.0 - while True: - try: - if not player.isPlayingVideo(): - break - last_pos = float(player.getTime() or 0.0) - total = float(player.getTotalTime() or 0.0) - except Exception: - pass - if monitor and monitor.waitForAbort(1.0): - return - - if total <= 0.0: - return - percent = max(0.0, min(1.0, last_pos / total)) - state: dict[str, object] = {"last_position": int(last_pos), "resume_total": int(total), "percent": percent} - if percent >= WATCHED_THRESHOLD: - state["watched"] = True - state["resume_position"] = 0 - elif last_pos > 0: - state["watched"] = False - state["resume_position"] = int(last_pos) - _set_playstate(key, state) - - # Zusätzlich aggregiert speichern, damit Titel-/Staffel-Listen "gesehen/fortsetzen" - # anzeigen können (für Filme/Serien gleichermaßen). - try: - parts = str(key).split("\t") - if len(parts) == 4: - plugin_name, title, season, _episode = parts - plugin_name = (plugin_name or "").strip() - title = (title or "").strip() - season = (season or "").strip() - if plugin_name and title: - _set_playstate(_playstate_key(plugin_name=plugin_name, title=title, season="", episode=""), state) - if season: - _set_playstate(_playstate_key(plugin_name=plugin_name, title=title, season=season, episode=""), state) - except Exception: - pass +def _track_playback_and_update_state_async(key: str) -> None: + # Eigenes Resume/Watched ist deaktiviert; Kodi verwaltet das selbst. + return def _play_episode( @@ -3026,29 +3257,62 @@ def _play_episode( season: str, episode: str, *, + forced_hoster: str = "", + episode_url: str = "", + series_url: str = "", resolve_handle: int | None = None, ) -> None: + episode_url = (episode_url or "").strip() + if episode_url: + _play_episode_url( + plugin_name, + title=title, + season_number=_extract_first_int(season) or 0, + episode_number=_extract_first_int(episode) or 0, + episode_url=episode_url, + season_label_override=season, + episode_label_override=episode, + resolve_handle=resolve_handle, + ) + return + + series_url = (series_url or "").strip() + if series_url: + plugin_for_url = _discover_plugins().get(plugin_name) + remember_series_url = getattr(plugin_for_url, "remember_series_url", None) if plugin_for_url is not None else None + if callable(remember_series_url): + try: + remember_series_url(title, series_url) + except Exception: + pass + _log(f"Play anfordern: {plugin_name} / {title} / {season} / {episode}") plugin = _discover_plugins().get(plugin_name) if plugin is None: - xbmcgui.Dialog().notification("Play", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Wiedergabe", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) return available_hosters: list[str] = [] hoster_getter = getattr(plugin, "available_hosters_for", None) if callable(hoster_getter): try: - with _busy_dialog(): + with _busy_dialog("Hoster werden geladen..."): available_hosters = list(hoster_getter(title, season, episode) or []) except Exception as exc: _log(f"Hoster laden fehlgeschlagen ({plugin_name}): {exc}", xbmc.LOGWARNING) selected_hoster: str | None = None + forced_hoster = (forced_hoster or "").strip() if available_hosters: - if len(available_hosters) == 1: + if forced_hoster: + for hoster in available_hosters: + if hoster.casefold() == forced_hoster.casefold(): + selected_hoster = hoster + break + if selected_hoster is None and len(available_hosters) == 1: selected_hoster = available_hosters[0] - else: - selected_index = xbmcgui.Dialog().select("Hoster wählen", available_hosters) + elif selected_hoster is None: + selected_index = xbmcgui.Dialog().select("Hoster waehlen", available_hosters) if selected_index is None or selected_index < 0: _log("Play abgebrochen (kein Hoster gewählt).", xbmc.LOGDEBUG) return @@ -3066,11 +3330,12 @@ def _play_episode( try: link = plugin.stream_link_for(title, season, episode) if not link: - _log("Kein Stream-Link gefunden.", xbmc.LOGWARNING) - xbmcgui.Dialog().notification("Play", "Kein Stream-Link gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + _log("Kein Stream gefunden.", xbmc.LOGWARNING) + xbmcgui.Dialog().notification("Wiedergabe", "Kein Stream gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) return _log(f"Stream-Link: {link}", xbmc.LOGDEBUG) final_link = plugin.resolve_stream_link(link) or link + final_link = normalize_resolved_stream_url(final_link, source_url=link) finally: if restore_hosters is not None and callable(preferred_setter): preferred_setter(restore_hosters) @@ -3092,7 +3357,7 @@ def _play_episode( cast=cast, resolve_handle=resolve_handle, ) - _track_playback_and_update_state( + _track_playback_and_update_state_async( _playstate_key(plugin_name=plugin_name, title=title, season=season, episode=episode) ) @@ -3104,21 +3369,25 @@ def _play_episode_url( season_number: int, episode_number: int, episode_url: str, + season_label_override: str = "", + episode_label_override: str = "", resolve_handle: int | None = None, ) -> None: - season_label = f"Staffel {season_number}" if season_number > 0 else "" - episode_label = f"Episode {episode_number}" if episode_number > 0 else "" + season_label = (season_label_override or "").strip() or (f"Staffel {season_number}" if season_number > 0 else "") + episode_label = (episode_label_override or "").strip() or ( + f"Episode {episode_number}" if episode_number > 0 else "" + ) _log(f"Play (URL) anfordern: {plugin_name} / {title} / {season_label} / {episode_label} / {episode_url}") plugin = _discover_plugins().get(plugin_name) if plugin is None: - xbmcgui.Dialog().notification("Play", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Wiedergabe", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) return available_hosters: list[str] = [] hoster_getter = getattr(plugin, "available_hosters_for_url", None) if callable(hoster_getter): try: - with _busy_dialog(): + with _busy_dialog("Hoster werden geladen..."): available_hosters = list(hoster_getter(episode_url) or []) except Exception as exc: _log(f"Hoster laden fehlgeschlagen ({plugin_name}): {exc}", xbmc.LOGWARNING) @@ -3128,7 +3397,7 @@ def _play_episode_url( if len(available_hosters) == 1: selected_hoster = available_hosters[0] else: - selected_index = xbmcgui.Dialog().select("Hoster wählen", available_hosters) + selected_index = xbmcgui.Dialog().select("Hoster waehlen", available_hosters) if selected_index is None or selected_index < 0: _log("Play abgebrochen (kein Hoster gewählt).", xbmc.LOGDEBUG) return @@ -3145,15 +3414,16 @@ def _play_episode_url( try: link_getter = getattr(plugin, "stream_link_for_url", None) if not callable(link_getter): - xbmcgui.Dialog().notification("Play", "Nicht unterstützt.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Wiedergabe", "Diese Funktion wird von der Quelle nicht unterstuetzt.", xbmcgui.NOTIFICATION_INFO, 3000) return link = link_getter(episode_url) if not link: - _log("Kein Stream-Link gefunden.", xbmc.LOGWARNING) - xbmcgui.Dialog().notification("Play", "Kein Stream-Link gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + _log("Kein Stream gefunden.", xbmc.LOGWARNING) + xbmcgui.Dialog().notification("Wiedergabe", "Kein Stream gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) return _log(f"Stream-Link: {link}", xbmc.LOGDEBUG) final_link = plugin.resolve_stream_link(link) or link + final_link = normalize_resolved_stream_url(final_link, source_url=link) finally: if restore_hosters is not None and callable(preferred_setter): preferred_setter(restore_hosters) @@ -3176,7 +3446,7 @@ def _play_episode_url( cast=cast, resolve_handle=resolve_handle, ) - _track_playback_and_update_state( + _track_playback_and_update_state_async( _playstate_key(plugin_name=plugin_name, title=title, season=season_label, episode=episode_label) ) @@ -3193,6 +3463,7 @@ def run() -> None: params = _parse_params() action = params.get("action") _log(f"Action: {action}", xbmc.LOGDEBUG) + _maybe_run_auto_update_check(action) if action == "search": _show_search() elif action == "plugin_menu": @@ -3205,6 +3476,11 @@ def run() -> None: _show_genres(params.get("plugin", "")) elif action == "categories": _show_categories(params.get("plugin", "")) + elif action == "latest_titles": + _show_latest_titles( + params.get("plugin", ""), + _parse_positive_int(params.get("page", "1"), default=1), + ) elif action == "new_titles": _show_new_titles( params.get("plugin", ""), @@ -3276,6 +3552,9 @@ def run() -> None: params.get("title", ""), params.get("season", ""), params.get("episode", ""), + forced_hoster=params.get("hoster", ""), + episode_url=params.get("url", ""), + series_url=params.get("series_url", ""), resolve_handle=_get_handle(), ) elif action == "play_movie": diff --git a/addon/http_session_pool.py b/addon/http_session_pool.py index 725fa43..3abda8e 100644 --- a/addon/http_session_pool.py +++ b/addon/http_session_pool.py @@ -32,3 +32,12 @@ def get_requests_session(key: str, *, headers: Optional[dict[str, str]] = None): pass return session + +def close_all_sessions() -> None: + """Close and clear all pooled sessions.""" + for session in list(_SESSIONS.values()): + try: + session.close() + except Exception: + pass + _SESSIONS.clear() diff --git a/addon/metadata_utils.py b/addon/metadata_utils.py new file mode 100644 index 0000000..b58f229 --- /dev/null +++ b/addon/metadata_utils.py @@ -0,0 +1,93 @@ +from __future__ import annotations + +import re + +from plugin_interface import BasisPlugin +from tmdb import TmdbCastMember + +METADATA_MODE_AUTO = 0 +METADATA_MODE_SOURCE = 1 +METADATA_MODE_TMDB = 2 +METADATA_MODE_MIX = 3 + + +def metadata_setting_id(plugin_name: str) -> str: + safe = re.sub(r"[^a-z0-9]+", "_", (plugin_name or "").strip().casefold()).strip("_") + return f"{safe}_metadata_source" if safe else "metadata_source" + + +def plugin_supports_metadata(plugin: BasisPlugin) -> bool: + try: + return plugin.__class__.metadata_for is not BasisPlugin.metadata_for + except Exception: + return False + + +def metadata_policy( + plugin_name: str, + plugin: BasisPlugin, + *, + allow_tmdb: bool, + get_setting_int=None, +) -> tuple[bool, bool, bool]: + if not callable(get_setting_int): + return plugin_supports_metadata(plugin), allow_tmdb, bool(getattr(plugin, "prefer_source_metadata", False)) + mode = get_setting_int(metadata_setting_id(plugin_name), default=METADATA_MODE_AUTO) + supports_source = plugin_supports_metadata(plugin) + if mode == METADATA_MODE_SOURCE: + return supports_source, False, True + if mode == METADATA_MODE_TMDB: + return False, allow_tmdb, False + if mode == METADATA_MODE_MIX: + return supports_source, allow_tmdb, True + prefer_source = bool(getattr(plugin, "prefer_source_metadata", False)) + return supports_source, allow_tmdb, prefer_source + + +def collect_plugin_metadata( + plugin: BasisPlugin, + titles: list[str], +) -> dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember] | None]]: + getter = getattr(plugin, "metadata_for", None) + if not callable(getter): + return {} + collected: dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember] | None]] = {} + for title in titles: + try: + labels, art, cast = getter(title) + except Exception: + continue + if isinstance(labels, dict) or isinstance(art, dict) or cast: + label_map = {str(k): str(v) for k, v in dict(labels or {}).items() if v} + art_map = {str(k): str(v) for k, v in dict(art or {}).items() if v} + collected[title] = (label_map, art_map, cast if isinstance(cast, list) else None) + return collected + + +def needs_tmdb(labels: dict[str, str], art: dict[str, str], *, want_plot: bool, want_art: bool) -> bool: + if want_plot and not labels.get("plot"): + return True + if want_art and not (art.get("thumb") or art.get("poster") or art.get("fanart") or art.get("landscape")): + return True + return False + + +def merge_metadata( + title: str, + tmdb_labels: dict[str, str] | None, + tmdb_art: dict[str, str] | None, + tmdb_cast: list[TmdbCastMember] | None, + plugin_meta: tuple[dict[str, str], dict[str, str], list[TmdbCastMember] | None] | None, +) -> tuple[dict[str, str], dict[str, str], list[TmdbCastMember] | None]: + labels = dict(tmdb_labels or {}) + art = dict(tmdb_art or {}) + cast = tmdb_cast + if plugin_meta is not None: + meta_labels, meta_art, meta_cast = plugin_meta + labels.update({k: str(v) for k, v in dict(meta_labels or {}).items() if v}) + art.update({k: str(v) for k, v in dict(meta_art or {}).items() if v}) + if meta_cast is not None: + cast = meta_cast + if "title" not in labels: + labels["title"] = title + return labels, art, cast diff --git a/addon/plugin_helpers.py b/addon/plugin_helpers.py index a21c038..31c4d42 100644 --- a/addon/plugin_helpers.py +++ b/addon/plugin_helpers.py @@ -15,7 +15,9 @@ from __future__ import annotations from datetime import datetime import hashlib import os +import re from typing import Optional +from urllib.parse import parse_qsl, urlencode try: # pragma: no cover - Kodi runtime import xbmcaddon # type: ignore[import-not-found] @@ -237,3 +239,40 @@ def dump_response_html( max_files = get_setting_int(addon_id, max_files_setting_id, default=200) _prune_dump_files(log_dir, prefix=filename_prefix, max_files=max_files) _append_text_file(path, content) + + +def normalize_resolved_stream_url(final_url: str, *, source_url: str = "") -> str: + """Normalisiert hoster-spezifische Header im finalen Stream-Link. + + `final_url` kann ein Kodi-Header-Suffix enthalten: `url|Key=Value&...`. + Die Funktion passt nur bekannte Problemfaelle an und laesst sonst alles unveraendert. + """ + + url = (final_url or "").strip() + if not url: + return "" + normalized = _normalize_supervideo_serversicuro(url, source_url=source_url) + return normalized + + +def _normalize_supervideo_serversicuro(final_url: str, *, source_url: str = "") -> str: + if "serversicuro.cc/hls/" not in final_url.casefold() or "|" not in final_url: + return final_url + + source = (source_url or "").strip() + code_match = re.search( + r"supervideo\.(?:tv|cc)/(?:e/)?([a-z0-9]+)(?:\\.html)?", + source, + flags=re.IGNORECASE, + ) + if not code_match: + return final_url + + code = (code_match.group(1) or "").strip() + if not code: + return final_url + + media_url, header_suffix = final_url.split("|", 1) + headers = dict(parse_qsl(header_suffix, keep_blank_values=True)) + headers["Referer"] = f"https://supervideo.cc/e/{code}" + return f"{media_url}|{urlencode(headers)}" diff --git a/addon/plugin_interface.py b/addon/plugin_interface.py index f8c266d..83948d7 100644 --- a/addon/plugin_interface.py +++ b/addon/plugin_interface.py @@ -4,7 +4,7 @@ from __future__ import annotations from abc import ABC, abstractmethod -from typing import List, Optional, Set +from typing import Any, Callable, Dict, List, Optional, Set, Tuple class BasisPlugin(ABC): @@ -12,9 +12,14 @@ class BasisPlugin(ABC): name: str version: str = "0.0.0" + prefer_source_metadata: bool = False @abstractmethod - async def search_titles(self, query: str) -> List[str]: + async def search_titles( + self, + query: str, + progress_callback: Optional[Callable[[str, Optional[int]], Any]] = None, + ) -> List[str]: """Liefert eine Liste aller Treffer fuer die Suche.""" @abstractmethod @@ -29,6 +34,10 @@ class BasisPlugin(ABC): """Optional: Liefert den Stream-Link fuer eine konkrete Folge.""" return None + def metadata_for(self, title: str) -> Tuple[Dict[str, str], Dict[str, str], Optional[List[Any]]]: + """Optional: Liefert Info-Labels, Art und Cast fuer einen Titel.""" + return {}, {}, None + def resolve_stream_link(self, link: str) -> Optional[str]: """Optional: Folgt einem Stream-Link und liefert die finale URL.""" return None diff --git a/addon/plugins/__pycache__/__init__.cpython-312.pyc b/addon/plugins/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 26e3918..0000000 Binary files a/addon/plugins/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/addon/plugins/__pycache__/_template_plugin.cpython-312.pyc b/addon/plugins/__pycache__/_template_plugin.cpython-312.pyc deleted file mode 100644 index 4d0c98b..0000000 Binary files a/addon/plugins/__pycache__/_template_plugin.cpython-312.pyc and /dev/null differ diff --git a/addon/plugins/__pycache__/einschalten_plugin.cpython-312.pyc b/addon/plugins/__pycache__/einschalten_plugin.cpython-312.pyc deleted file mode 100644 index b5da04f..0000000 Binary files a/addon/plugins/__pycache__/einschalten_plugin.cpython-312.pyc and /dev/null differ diff --git a/addon/plugins/_template_plugin.py b/addon/plugins/_template_plugin.py index a5244e2..5096443 100644 --- a/addon/plugins/_template_plugin.py +++ b/addon/plugins/_template_plugin.py @@ -9,7 +9,7 @@ Zum Verwenden: from __future__ import annotations from dataclasses import dataclass -from typing import TYPE_CHECKING, Any, List, Optional, TypeAlias +from typing import TYPE_CHECKING, Any, Callable, List, Optional try: # pragma: no cover - optional dependency import requests @@ -34,8 +34,8 @@ if TYPE_CHECKING: # pragma: no cover from requests import Session as RequestsSession from bs4 import BeautifulSoup as BeautifulSoupT # type: ignore[import-not-found] else: # pragma: no cover - RequestsSession: TypeAlias = Any - BeautifulSoupT: TypeAlias = Any + RequestsSession = Any + BeautifulSoupT = Any ADDON_ID = "plugin.video.viewit" @@ -88,9 +88,13 @@ class TemplatePlugin(BasisPlugin): self._session = session return self._session - async def search_titles(self, query: str) -> List[str]: + async def search_titles( + self, + query: str, + progress_callback: Optional[Callable[[str, Optional[int]], Any]] = None, + ) -> List[str]: """TODO: Suche auf der Zielseite implementieren.""" - _ = query + _ = (query, progress_callback) return [] def seasons_for(self, title: str) -> List[str]: diff --git a/addon/plugins/aniworld_plugin.py b/addon/plugins/aniworld_plugin.py index 7271a7a..943e865 100644 --- a/addon/plugins/aniworld_plugin.py +++ b/addon/plugins/aniworld_plugin.py @@ -13,7 +13,8 @@ import hashlib import json import re import time -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, TypeAlias +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple +from urllib.parse import quote try: # pragma: no cover - optional dependency import requests @@ -43,8 +44,8 @@ if TYPE_CHECKING: # pragma: no cover from requests import Session as RequestsSession from bs4 import BeautifulSoup as BeautifulSoupT # type: ignore[import-not-found] else: # pragma: no cover - RequestsSession: TypeAlias = Any - BeautifulSoupT: TypeAlias = Any + RequestsSession = Any + BeautifulSoupT = Any SETTING_BASE_URL = "aniworld_base_url" @@ -69,6 +70,16 @@ HEADERS = { SESSION_CACHE_TTL_SECONDS = 300 SESSION_CACHE_PREFIX = "viewit.aniworld" SESSION_CACHE_MAX_TITLE_URLS = 800 +ProgressCallback = Optional[Callable[[str, Optional[int]], Any]] + + +def _emit_progress(callback: ProgressCallback, message: str, percent: Optional[int] = None) -> None: + if not callable(callback): + return + try: + callback(str(message or ""), None if percent is None else int(percent)) + except Exception: + return @dataclass @@ -126,7 +137,7 @@ def _latest_episodes_url() -> str: def _search_url(query: str) -> str: - return f"{_get_base_url()}/search?q={query}" + return f"{_get_base_url()}/search?q={quote((query or '').strip())}" def _search_api_url() -> str: @@ -289,37 +300,56 @@ def _get_soup(url: str, *, session: Optional[RequestsSession] = None) -> Beautif _ensure_requests() _log_visit(url) sess = session or get_requests_session("aniworld", headers=HEADERS) + response = None try: response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT) response.raise_for_status() except Exception as exc: _log_error(f"GET {url} failed: {exc}") raise - if response.url and response.url != url: - _log_url(response.url, kind="REDIRECT") - _log_response_html(url, response.text) - if _looks_like_cloudflare_challenge(response.text): - raise RuntimeError("Cloudflare-Schutz erkannt. requests reicht ggf. nicht aus.") - return BeautifulSoup(response.text, "html.parser") + try: + final_url = (response.url or url) if response is not None else url + body = (response.text or "") if response is not None else "" + if final_url != url: + _log_url(final_url, kind="REDIRECT") + _log_response_html(url, body) + if _looks_like_cloudflare_challenge(body): + raise RuntimeError("Cloudflare-Schutz erkannt. requests reicht ggf. nicht aus.") + return BeautifulSoup(body, "html.parser") + finally: + if response is not None: + try: + response.close() + except Exception: + pass def _get_html_simple(url: str) -> str: _ensure_requests() _log_visit(url) sess = get_requests_session("aniworld", headers=HEADERS) + response = None try: response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT) response.raise_for_status() except Exception as exc: _log_error(f"GET {url} failed: {exc}") raise - if response.url and response.url != url: - _log_url(response.url, kind="REDIRECT") - body = response.text - _log_response_html(url, body) - if _looks_like_cloudflare_challenge(body): - raise RuntimeError("Cloudflare-Schutz erkannt. requests reicht ggf. nicht aus.") - return body + try: + final_url = (response.url or url) if response is not None else url + body = (response.text or "") if response is not None else "" + if final_url != url: + _log_url(final_url, kind="REDIRECT") + _log_response_html(url, body) + if _looks_like_cloudflare_challenge(body): + raise RuntimeError("Cloudflare-Schutz erkannt. requests reicht ggf. nicht aus.") + return body + finally: + if response is not None: + try: + response.close() + except Exception: + pass def _get_soup_simple(url: str) -> BeautifulSoupT: @@ -351,17 +381,27 @@ def _post_json(url: str, *, payload: Dict[str, str], session: Optional[RequestsS _ensure_requests() _log_visit(url) sess = session or get_requests_session("aniworld", headers=HEADERS) - response = sess.post(url, data=payload, headers=HEADERS, timeout=DEFAULT_TIMEOUT) - response.raise_for_status() - if response.url and response.url != url: - _log_url(response.url, kind="REDIRECT") - _log_response_html(url, response.text) - if _looks_like_cloudflare_challenge(response.text): - raise RuntimeError("Cloudflare-Schutz erkannt. requests reicht ggf. nicht aus.") + response = None try: - return response.json() - except Exception: - return None + response = sess.post(url, data=payload, headers=HEADERS, timeout=DEFAULT_TIMEOUT) + response.raise_for_status() + final_url = (response.url or url) if response is not None else url + body = (response.text or "") if response is not None else "" + if final_url != url: + _log_url(final_url, kind="REDIRECT") + _log_response_html(url, body) + if _looks_like_cloudflare_challenge(body): + raise RuntimeError("Cloudflare-Schutz erkannt. requests reicht ggf. nicht aus.") + try: + return response.json() + except Exception: + return None + finally: + if response is not None: + try: + response.close() + except Exception: + pass def _extract_canonical_url(soup: BeautifulSoupT, fallback: str) -> str: @@ -555,10 +595,18 @@ def resolve_redirect(target_url: str) -> Optional[str]: _log_visit(normalized_url) session = get_requests_session("aniworld", headers=HEADERS) _get_soup(_get_base_url(), session=session) - response = session.get(normalized_url, headers=HEADERS, timeout=DEFAULT_TIMEOUT, allow_redirects=True) - if response.url: - _log_url(response.url, kind="RESOLVED") - return response.url if response.url else None + response = None + try: + response = session.get(normalized_url, headers=HEADERS, timeout=DEFAULT_TIMEOUT, allow_redirects=True) + if response.url: + _log_url(response.url, kind="RESOLVED") + return response.url if response.url else None + finally: + if response is not None: + try: + response.close() + except Exception: + pass def fetch_episode_hoster_names(episode_url: str) -> List[str]: @@ -629,11 +677,12 @@ def fetch_episode_stream_link( return resolved -def search_animes(query: str) -> List[SeriesResult]: +def search_animes(query: str, *, progress_callback: ProgressCallback = None) -> List[SeriesResult]: _ensure_requests() query = (query or "").strip() if not query: return [] + _emit_progress(progress_callback, "AniWorld API-Suche", 15) session = get_requests_session("aniworld", headers=HEADERS) try: session.get(_get_base_url(), headers=HEADERS, timeout=DEFAULT_TIMEOUT) @@ -643,7 +692,9 @@ def search_animes(query: str) -> List[SeriesResult]: results: List[SeriesResult] = [] seen: set[str] = set() if isinstance(data, list): - for entry in data: + for idx, entry in enumerate(data, start=1): + if idx == 1 or idx % 50 == 0: + _emit_progress(progress_callback, f"API auswerten {idx}/{len(data)}", 35) if not isinstance(entry, dict): continue title = _strip_html((entry.get("title") or "").strip()) @@ -665,10 +716,16 @@ def search_animes(query: str) -> List[SeriesResult]: seen.add(key) description = (entry.get("description") or "").strip() results.append(SeriesResult(title=title, description=description, url=url)) + _emit_progress(progress_callback, f"API-Treffer: {len(results)}", 85) return results - soup = _get_soup_simple(_search_url(requests.utils.quote(query))) - for anchor in soup.select("a[href^='/anime/stream/'][href]"): + _emit_progress(progress_callback, "HTML-Suche (Fallback)", 55) + soup = _get_soup_simple(_search_url(query)) + anchors = soup.select("a[href^='/anime/stream/'][href]") + total_anchors = max(1, len(anchors)) + for idx, anchor in enumerate(anchors, start=1): + if idx == 1 or idx % 100 == 0: + _emit_progress(progress_callback, f"HTML auswerten {idx}/{total_anchors}", 70) href = (anchor.get("href") or "").strip() if not href or "/staffel-" in href or "/episode-" in href: continue @@ -686,6 +743,7 @@ def search_animes(query: str) -> List[SeriesResult]: continue seen.add(key) results.append(SeriesResult(title=title, description="", url=url)) + _emit_progress(progress_callback, f"HTML-Treffer: {len(results)}", 85) return results @@ -696,6 +754,7 @@ class AniworldPlugin(BasisPlugin): def __init__(self) -> None: self._anime_results: Dict[str, SeriesResult] = {} self._title_url_cache: Dict[str, str] = self._load_title_url_cache() + self._title_meta: Dict[str, tuple[str, str]] = {} self._genre_names_cache: Optional[List[str]] = None self._season_cache: Dict[str, List[SeasonInfo]] = {} self._season_links_cache: Dict[str, List[SeasonInfo]] = {} @@ -760,8 +819,135 @@ class AniworldPlugin(BasisPlugin): changed = True if changed and persist: self._save_title_url_cache() + if description: + old_plot, old_poster = self._title_meta.get(title, ("", "")) + self._title_meta[title] = (description.strip() or old_plot, old_poster) return changed + def _store_title_meta(self, title: str, *, plot: str = "", poster: str = "") -> None: + title = (title or "").strip() + if not title: + return + old_plot, old_poster = self._title_meta.get(title, ("", "")) + merged_plot = (plot or old_plot or "").strip() + merged_poster = (poster or old_poster or "").strip() + self._title_meta[title] = (merged_plot, merged_poster) + + @staticmethod + def _is_series_image_url(url: str) -> bool: + value = (url or "").strip().casefold() + if not value: + return False + blocked = ( + "/public/img/facebook", + "/public/img/logo", + "aniworld-logo", + "favicon", + "/public/img/german.svg", + "/public/img/japanese-", + ) + return not any(marker in value for marker in blocked) + + @staticmethod + def _extract_style_url(style_value: str) -> str: + style_value = (style_value or "").strip() + if not style_value: + return "" + match = re.search(r"url\((['\"]?)(.*?)\1\)", style_value, flags=re.IGNORECASE) + if not match: + return "" + return (match.group(2) or "").strip() + + def _extract_series_metadata(self, soup: BeautifulSoupT) -> tuple[str, str, str]: + if not soup: + return "", "", "" + plot = "" + poster = "" + fanart = "" + + root = soup.select_one("#series") or soup + + description_node = root.select_one("p.seri_des") + if description_node is not None: + full_text = (description_node.get("data-full-description") or "").strip() + short_text = (description_node.get_text(" ", strip=True) or "").strip() + plot = full_text or short_text + + if not plot: + for selector in ("meta[property='og:description']", "meta[name='description']"): + node = soup.select_one(selector) + if node is None: + continue + content = (node.get("content") or "").strip() + if content: + plot = content + break + if not plot: + for selector in (".series-description", ".seri_des", ".description", "article p"): + node = soup.select_one(selector) + if node is None: + continue + text = (node.get_text(" ", strip=True) or "").strip() + if text: + plot = text + break + + cover = root.select_one("div.seriesCoverBox img[itemprop='image'], div.seriesCoverBox img") + if cover is not None: + for attr in ("data-src", "src"): + value = (cover.get(attr) or "").strip() + if value: + candidate = _absolute_url(value) + if self._is_series_image_url(candidate): + poster = candidate + break + + if not poster: + for selector in ("meta[property='og:image']", "meta[name='twitter:image']"): + node = soup.select_one(selector) + if node is None: + continue + content = (node.get("content") or "").strip() + if content: + candidate = _absolute_url(content) + if self._is_series_image_url(candidate): + poster = candidate + break + if not poster: + for selector in ("img.seriesCoverBox", ".seriesCoverBox img"): + image = soup.select_one(selector) + if image is None: + continue + value = (image.get("data-src") or image.get("src") or "").strip() + if value: + candidate = _absolute_url(value) + if self._is_series_image_url(candidate): + poster = candidate + break + + backdrop_node = root.select_one("section.title .backdrop, .SeriesSection .backdrop, .backdrop") + if backdrop_node is not None: + raw_style = (backdrop_node.get("style") or "").strip() + style_url = self._extract_style_url(raw_style) + if style_url: + candidate = _absolute_url(style_url) + if self._is_series_image_url(candidate): + fanart = candidate + + if not fanart: + for selector in ("meta[property='og:image']",): + node = soup.select_one(selector) + if node is None: + continue + content = (node.get("content") or "").strip() + if content: + candidate = _absolute_url(content) + if self._is_series_image_url(candidate): + fanart = candidate + break + + return plot, poster, fanart + @staticmethod def _season_links_cache_name(series_url: str) -> str: digest = hashlib.sha1((series_url or "").encode("utf-8")).hexdigest()[:20] @@ -893,6 +1079,43 @@ class AniworldPlugin(BasisPlugin): return None + def metadata_for(self, title: str) -> tuple[dict[str, str], dict[str, str], list[object] | None]: + title = (title or "").strip() + if not title: + return {}, {}, None + + info: dict[str, str] = {"title": title} + art: dict[str, str] = {} + cached_plot, cached_poster = self._title_meta.get(title, ("", "")) + if cached_plot: + info["plot"] = cached_plot + if cached_poster: + art = {"thumb": cached_poster, "poster": cached_poster} + if "plot" in info and art: + return info, art, None + + series = self._find_series_by_title(title) + if series is None or not series.url: + return info, art, None + if series.description and "plot" not in info: + info["plot"] = series.description + + try: + soup = _get_soup(series.url, session=get_requests_session("aniworld", headers=HEADERS)) + plot, poster, fanart = self._extract_series_metadata(soup) + except Exception: + plot, poster, fanart = "", "", "" + + if plot: + info["plot"] = plot + if poster: + art = {"thumb": poster, "poster": poster} + if fanart: + art["fanart"] = fanart + art["landscape"] = fanart + self._store_title_meta(title, plot=info.get("plot", ""), poster=poster) + return info, art, None + def _ensure_popular(self) -> List[SeriesResult]: if self._popular_cache is not None: return list(self._popular_cache) @@ -1151,7 +1374,7 @@ class AniworldPlugin(BasisPlugin): return self._episode_label_cache.get(cache_key, {}).get(episode_label) return None - async def search_titles(self, query: str) -> List[str]: + async def search_titles(self, query: str, progress_callback: ProgressCallback = None) -> List[str]: query = (query or "").strip() if not query: self._anime_results.clear() @@ -1163,7 +1386,8 @@ class AniworldPlugin(BasisPlugin): if not self._requests_available: raise RuntimeError("AniworldPlugin kann ohne requests/bs4 nicht suchen.") try: - results = search_animes(query) + _emit_progress(progress_callback, "AniWorld Suche startet", 10) + results = search_animes(query, progress_callback=progress_callback) except Exception as exc: # pragma: no cover self._anime_results.clear() self._season_cache.clear() @@ -1178,6 +1402,7 @@ class AniworldPlugin(BasisPlugin): self._season_cache.clear() self._season_links_cache.clear() self._episode_label_cache.clear() + _emit_progress(progress_callback, f"Treffer aufbereitet: {len(results)}", 95) return [result.title for result in results] def _ensure_seasons(self, title: str) -> List[SeasonInfo]: @@ -1213,6 +1438,18 @@ class AniworldPlugin(BasisPlugin): _log_url(link, kind="FOUND") return link + def episode_url_for(self, title: str, season: str, episode: str) -> str: + cache_key = (title, season) + cached = self._episode_label_cache.get(cache_key) + if cached: + info = cached.get(episode) + if info and info.url: + return info.url + episode_info = self._lookup_episode(title, season, episode) + if episode_info and episode_info.url: + return episode_info.url + return "" + def available_hosters_for(self, title: str, season: str, episode: str) -> List[str]: if not self._requests_available: raise RuntimeError("AniworldPlugin kann ohne requests/bs4 keine Hoster laden.") diff --git a/addon/plugins/dokustreams_plugin.py b/addon/plugins/dokustreams_plugin.py index 047a652..58fffc8 100644 --- a/addon/plugins/dokustreams_plugin.py +++ b/addon/plugins/dokustreams_plugin.py @@ -5,7 +5,7 @@ from __future__ import annotations from dataclasses import dataclass import re from urllib.parse import quote -from typing import TYPE_CHECKING, Any, Dict, List, Optional, TypeAlias +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional try: # pragma: no cover - optional dependency import requests @@ -27,8 +27,8 @@ if TYPE_CHECKING: # pragma: no cover from requests import Session as RequestsSession from bs4 import BeautifulSoup as BeautifulSoupT # type: ignore[import-not-found] else: # pragma: no cover - RequestsSession: TypeAlias = Any - BeautifulSoupT: TypeAlias = Any + RequestsSession = Any + BeautifulSoupT = Any ADDON_ID = "plugin.video.viewit" @@ -44,6 +44,16 @@ SETTING_LOG_URLS = "log_urls_dokustreams" SETTING_DUMP_HTML = "dump_html_dokustreams" SETTING_SHOW_URL_INFO = "show_url_info_dokustreams" SETTING_LOG_ERRORS = "log_errors_dokustreams" +ProgressCallback = Optional[Callable[[str, Optional[int]], Any]] + + +def _emit_progress(callback: ProgressCallback, message: str, percent: Optional[int] = None) -> None: + if not callable(callback): + return + try: + callback(str(message or ""), None if percent is None else int(percent)) + except Exception: + return HEADERS = { "User-Agent": "Mozilla/5.0 (Kodi; ViewIt) AppleWebKit/537.36 (KHTML, like Gecko)", "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", @@ -213,16 +223,26 @@ def _get_soup(url: str, *, session: Optional[RequestsSession] = None) -> Beautif raise RuntimeError("requests/bs4 sind nicht verfuegbar.") _log_visit(url) sess = session or get_requests_session("dokustreams", headers=HEADERS) + response = None try: response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT) response.raise_for_status() except Exception as exc: _log_error_message(f"GET {url} failed: {exc}") raise - if response.url and response.url != url: - _log_url_event(response.url, kind="REDIRECT") - _log_response_html(url, response.text) - return BeautifulSoup(response.text, "html.parser") + try: + final_url = (response.url or url) if response is not None else url + body = (response.text or "") if response is not None else "" + if final_url != url: + _log_url_event(final_url, kind="REDIRECT") + _log_response_html(url, body) + return BeautifulSoup(body, "html.parser") + finally: + if response is not None: + try: + response.close() + except Exception: + pass class DokuStreamsPlugin(BasisPlugin): @@ -247,14 +267,17 @@ class DokuStreamsPlugin(BasisPlugin): if REQUESTS_IMPORT_ERROR: print(f"DokuStreamsPlugin Importfehler: {REQUESTS_IMPORT_ERROR}") - async def search_titles(self, query: str) -> List[str]: + async def search_titles(self, query: str, progress_callback: ProgressCallback = None) -> List[str]: + _emit_progress(progress_callback, "Doku-Streams Suche", 15) hits = self._search_hits(query) + _emit_progress(progress_callback, f"Treffer verarbeiten ({len(hits)})", 70) self._title_to_url = {hit.title: hit.url for hit in hits if hit.title and hit.url} for hit in hits: if hit.title: self._title_meta[hit.title] = (hit.plot, hit.poster) titles = [hit.title for hit in hits if hit.title] titles.sort(key=lambda value: value.casefold()) + _emit_progress(progress_callback, f"Fertig: {len(titles)} Treffer", 95) return titles def _search_hits(self, query: str) -> List[SearchHit]: diff --git a/addon/plugins/einschalten_plugin.py b/addon/plugins/einschalten_plugin.py index 62f35b5..b6aea00 100644 --- a/addon/plugins/einschalten_plugin.py +++ b/addon/plugins/einschalten_plugin.py @@ -11,7 +11,7 @@ from __future__ import annotations import json import re from dataclasses import dataclass -from typing import Any, Dict, List, Optional, Set +from typing import Any, Callable, Dict, List, Optional, Set from urllib.parse import urlencode, urljoin, urlsplit try: # pragma: no cover - optional dependency (Kodi dependency) @@ -43,7 +43,7 @@ SETTING_DUMP_HTML = "dump_html_einschalten" SETTING_SHOW_URL_INFO = "show_url_info_einschalten" SETTING_LOG_ERRORS = "log_errors_einschalten" -DEFAULT_BASE_URL = "" +DEFAULT_BASE_URL = "https://einschalten.in" DEFAULT_INDEX_PATH = "/" DEFAULT_NEW_TITLES_PATH = "/movies/new" DEFAULT_SEARCH_PATH = "/search" @@ -56,6 +56,16 @@ HEADERS = { "Accept-Language": "de-DE,de;q=0.9,en;q=0.8", "Connection": "keep-alive", } +ProgressCallback = Optional[Callable[[str, Optional[int]], Any]] + + +def _emit_progress(callback: ProgressCallback, message: str, percent: Optional[int] = None) -> None: + if not callable(callback): + return + try: + callback(str(message or ""), None if percent is None else int(percent)) + except Exception: + return @dataclass(frozen=True) @@ -526,6 +536,34 @@ class EinschaltenPlugin(BasisPlugin): self._session = requests.Session() return self._session + def _http_get_text(self, url: str, *, timeout: int = 20) -> tuple[str, str]: + _log_url(url, kind="GET") + _notify_url(url) + sess = self._get_session() + response = None + try: + response = sess.get(url, headers=HEADERS, timeout=timeout) + response.raise_for_status() + final_url = (response.url or url) if response is not None else url + body = (response.text or "") if response is not None else "" + _log_url(final_url, kind="OK") + _log_response_html(final_url, body) + return final_url, body + finally: + if response is not None: + try: + response.close() + except Exception: + pass + + def _http_get_json(self, url: str, *, timeout: int = 20) -> tuple[str, Any]: + final_url, body = self._http_get_text(url, timeout=timeout) + try: + payload = json.loads(body or "{}") + except Exception: + payload = {} + return final_url, payload + def _get_base_url(self) -> str: base = _get_setting_text(SETTING_BASE_URL, default=DEFAULT_BASE_URL).strip() return base.rstrip("/") @@ -646,15 +684,9 @@ class EinschaltenPlugin(BasisPlugin): if not url: return "" try: - _log_url(url, kind="GET") - _notify_url(url) - sess = self._get_session() - resp = sess.get(url, headers=HEADERS, timeout=20) - resp.raise_for_status() - _log_url(resp.url or url, kind="OK") - _log_response_html(resp.url or url, resp.text) - self._detail_html_by_id[movie_id] = resp.text or "" - return resp.text or "" + _, body = self._http_get_text(url, timeout=20) + self._detail_html_by_id[movie_id] = body + return body except Exception as exc: _log_error(f"GET {url} failed: {exc}") return "" @@ -667,16 +699,8 @@ class EinschaltenPlugin(BasisPlugin): if not url: return {} try: - _log_url(url, kind="GET") - _notify_url(url) - sess = self._get_session() - resp = sess.get(url, headers=HEADERS, timeout=20) - resp.raise_for_status() - _log_url(resp.url or url, kind="OK") - # Some backends may return JSON with a JSON content-type; for debugging we still dump text. - _log_response_html(resp.url or url, resp.text) - data = resp.json() - return dict(data) if isinstance(data, dict) else {} + _, data = self._http_get_json(url, timeout=20) + return data except Exception as exc: _log_error(f"GET {url} failed: {exc}") return {} @@ -741,14 +765,8 @@ class EinschaltenPlugin(BasisPlugin): if not url: return [] try: - _log_url(url, kind="GET") - _notify_url(url) - sess = self._get_session() - resp = sess.get(url, headers=HEADERS, timeout=20) - resp.raise_for_status() - _log_url(resp.url or url, kind="OK") - _log_response_html(resp.url or url, resp.text) - payload = _extract_ng_state_payload(resp.text) + _, body = self._http_get_text(url, timeout=20) + payload = _extract_ng_state_payload(body) return _parse_ng_state_movies(payload) except Exception: return [] @@ -759,14 +777,8 @@ class EinschaltenPlugin(BasisPlugin): if not url: return [] try: - _log_url(url, kind="GET") - _notify_url(url) - sess = self._get_session() - resp = sess.get(url, headers=HEADERS, timeout=20) - resp.raise_for_status() - _log_url(resp.url or url, kind="OK") - _log_response_html(resp.url or url, resp.text) - payload = _extract_ng_state_payload(resp.text) + _, body = self._http_get_text(url, timeout=20) + payload = _extract_ng_state_payload(body) movies = _parse_ng_state_movies(payload) _log_debug_line(f"parse_ng_state_movies:count={len(movies)}") if movies: @@ -784,14 +796,8 @@ class EinschaltenPlugin(BasisPlugin): if page > 1: url = f"{url}?{urlencode({'page': str(page)})}" try: - _log_url(url, kind="GET") - _notify_url(url) - sess = self._get_session() - resp = sess.get(url, headers=HEADERS, timeout=20) - resp.raise_for_status() - _log_url(resp.url or url, kind="OK") - _log_response_html(resp.url or url, resp.text) - payload = _extract_ng_state_payload(resp.text) + _, body = self._http_get_text(url, timeout=20) + payload = _extract_ng_state_payload(body) movies, has_more, current_page = _parse_ng_state_movies_with_pagination(payload) _log_debug_line(f"parse_ng_state_movies_page:page={page} count={len(movies)}") if has_more is not None: @@ -844,14 +850,8 @@ class EinschaltenPlugin(BasisPlugin): if not url: return [] try: - _log_url(url, kind="GET") - _notify_url(url) - sess = self._get_session() - resp = sess.get(url, headers=HEADERS, timeout=20) - resp.raise_for_status() - _log_url(resp.url or url, kind="OK") - _log_response_html(resp.url or url, resp.text) - payload = _extract_ng_state_payload(resp.text) + _, body = self._http_get_text(url, timeout=20) + payload = _extract_ng_state_payload(body) results = _parse_ng_state_search_results(payload) return _filter_movies_by_title(query, results) except Exception: @@ -867,13 +867,7 @@ class EinschaltenPlugin(BasisPlugin): api_url = self._api_genres_url() if api_url: try: - _log_url(api_url, kind="GET") - _notify_url(api_url) - sess = self._get_session() - resp = sess.get(api_url, headers=HEADERS, timeout=20) - resp.raise_for_status() - _log_url(resp.url or api_url, kind="OK") - payload = resp.json() + _, payload = self._http_get_json(api_url, timeout=20) if isinstance(payload, list): parsed: Dict[str, int] = {} for item in payload: @@ -900,14 +894,8 @@ class EinschaltenPlugin(BasisPlugin): if not url: return try: - _log_url(url, kind="GET") - _notify_url(url) - sess = self._get_session() - resp = sess.get(url, headers=HEADERS, timeout=20) - resp.raise_for_status() - _log_url(resp.url or url, kind="OK") - _log_response_html(resp.url or url, resp.text) - payload = _extract_ng_state_payload(resp.text) + _, body = self._http_get_text(url, timeout=20) + payload = _extract_ng_state_payload(body) parsed = _parse_ng_state_genres(payload) if parsed: self._genre_id_by_name.clear() @@ -915,7 +903,7 @@ class EinschaltenPlugin(BasisPlugin): except Exception: return - async def search_titles(self, query: str) -> List[str]: + async def search_titles(self, query: str, progress_callback: ProgressCallback = None) -> List[str]: if not REQUESTS_AVAILABLE: return [] query = (query or "").strip() @@ -924,9 +912,12 @@ class EinschaltenPlugin(BasisPlugin): if not self._get_base_url(): return [] + _emit_progress(progress_callback, "Einschalten Suche", 15) movies = self._fetch_search_movies(query) if not movies: + _emit_progress(progress_callback, "Fallback: Index filtern", 45) movies = _filter_movies_by_title(query, self._load_movies()) + _emit_progress(progress_callback, f"Treffer verarbeiten ({len(movies)})", 75) titles: List[str] = [] seen: set[str] = set() for movie in movies: @@ -936,6 +927,7 @@ class EinschaltenPlugin(BasisPlugin): self._id_by_title[movie.title] = movie.id titles.append(movie.title) titles.sort(key=lambda value: value.casefold()) + _emit_progress(progress_callback, f"Fertig: {len(titles)} Treffer", 95) return titles def genres(self) -> List[str]: @@ -971,14 +963,8 @@ class EinschaltenPlugin(BasisPlugin): if not url: return [] try: - _log_url(url, kind="GET") - _notify_url(url) - sess = self._get_session() - resp = sess.get(url, headers=HEADERS, timeout=20) - resp.raise_for_status() - _log_url(resp.url or url, kind="OK") - _log_response_html(resp.url or url, resp.text) - payload = _extract_ng_state_payload(resp.text) + _, body = self._http_get_text(url, timeout=20) + payload = _extract_ng_state_payload(body) except Exception: return [] if not isinstance(payload, dict): @@ -1079,3 +1065,7 @@ class EinschaltenPlugin(BasisPlugin): return [] # Backwards compatible: first page only. UI uses paging via `new_titles_page`. return self.new_titles_page(1) + + +# Alias für die automatische Plugin-Erkennung. +Plugin = EinschaltenPlugin diff --git a/addon/plugins/filmpalast_plugin.py b/addon/plugins/filmpalast_plugin.py index 82c6509..cbabffd 100644 --- a/addon/plugins/filmpalast_plugin.py +++ b/addon/plugins/filmpalast_plugin.py @@ -11,7 +11,7 @@ from dataclasses import dataclass import re from urllib.parse import quote, urlencode from urllib.parse import urljoin -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, TypeAlias +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple try: # pragma: no cover - optional dependency import requests @@ -33,8 +33,8 @@ if TYPE_CHECKING: # pragma: no cover from requests import Session as RequestsSession from bs4 import BeautifulSoup as BeautifulSoupT # type: ignore[import-not-found] else: # pragma: no cover - RequestsSession: TypeAlias = Any - BeautifulSoupT: TypeAlias = Any + RequestsSession = Any + BeautifulSoupT = Any ADDON_ID = "plugin.video.viewit" @@ -53,6 +53,16 @@ SETTING_LOG_URLS = "log_urls_filmpalast" SETTING_DUMP_HTML = "dump_html_filmpalast" SETTING_SHOW_URL_INFO = "show_url_info_filmpalast" SETTING_LOG_ERRORS = "log_errors_filmpalast" +ProgressCallback = Optional[Callable[[str, Optional[int]], Any]] + + +def _emit_progress(callback: ProgressCallback, message: str, percent: Optional[int] = None) -> None: + if not callable(callback): + return + try: + callback(str(message or ""), None if percent is None else int(percent)) + except Exception: + return HEADERS = { "User-Agent": "Mozilla/5.0 (Kodi; ViewIt) AppleWebKit/537.36 (KHTML, like Gecko)", "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", @@ -206,16 +216,26 @@ def _get_soup(url: str, *, session: Optional[RequestsSession] = None) -> Beautif raise RuntimeError("requests/bs4 sind nicht verfuegbar.") _log_visit(url) sess = session or get_requests_session("filmpalast", headers=HEADERS) + response = None try: response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT) response.raise_for_status() except Exception as exc: _log_error_message(f"GET {url} failed: {exc}") raise - if response.url and response.url != url: - _log_url_event(response.url, kind="REDIRECT") - _log_response_html(url, response.text) - return BeautifulSoup(response.text, "html.parser") + try: + final_url = (response.url or url) if response is not None else url + body = (response.text or "") if response is not None else "" + if final_url != url: + _log_url_event(final_url, kind="REDIRECT") + _log_response_html(url, body) + return BeautifulSoup(body, "html.parser") + finally: + if response is not None: + try: + response.close() + except Exception: + pass class FilmpalastPlugin(BasisPlugin): @@ -224,6 +244,7 @@ class FilmpalastPlugin(BasisPlugin): def __init__(self) -> None: self._title_to_url: Dict[str, str] = {} + self._title_meta: Dict[str, tuple[str, str]] = {} self._series_entries: Dict[str, Dict[int, Dict[int, EpisodeEntry]]] = {} self._hoster_cache: Dict[str, Dict[str, str]] = {} self._genre_to_url: Dict[str, str] = {} @@ -352,6 +373,7 @@ class FilmpalastPlugin(BasisPlugin): seen_titles: set[str] = set() seen_urls: set[str] = set() for base_url, params in search_requests: + response = None try: request_url = base_url if not params else f"{base_url}?{urlencode(params)}" _log_url_event(request_url, kind="GET") @@ -365,6 +387,12 @@ class FilmpalastPlugin(BasisPlugin): except Exception as exc: _log_error_message(f"search request failed ({base_url}): {exc}") continue + finally: + if response is not None: + try: + response.close() + except Exception: + pass anchors = soup.select("article.liste h2 a[href], article.liste h3 a[href]") if not anchors: @@ -466,9 +494,13 @@ class FilmpalastPlugin(BasisPlugin): titles.sort(key=lambda value: value.casefold()) return titles - async def search_titles(self, query: str) -> List[str]: + async def search_titles(self, query: str, progress_callback: ProgressCallback = None) -> List[str]: + _emit_progress(progress_callback, "Filmpalast Suche", 15) hits = self._search_hits(query) - return self._apply_hits_to_title_index(hits) + _emit_progress(progress_callback, f"Treffer verarbeiten ({len(hits)})", 70) + titles = self._apply_hits_to_title_index(hits) + _emit_progress(progress_callback, f"Fertig: {len(titles)} Treffer", 95) + return titles def _parse_genres(self, soup: BeautifulSoupT) -> Dict[str, str]: genres: Dict[str, str] = {} @@ -691,6 +723,64 @@ class FilmpalastPlugin(BasisPlugin): return hit.url return "" + def _store_title_meta(self, title: str, *, plot: str = "", poster: str = "") -> None: + title = (title or "").strip() + if not title: + return + old_plot, old_poster = self._title_meta.get(title, ("", "")) + merged_plot = (plot or old_plot or "").strip() + merged_poster = (poster or old_poster or "").strip() + self._title_meta[title] = (merged_plot, merged_poster) + + def _extract_detail_metadata(self, soup: BeautifulSoupT) -> tuple[str, str]: + if not soup: + return "", "" + root = soup.select_one("div#content[role='main']") or soup + detail = root.select_one("article.detail") or root + plot = "" + poster = "" + + # Filmpalast Detailseite: bevorzugt den dedizierten Filmhandlung-Block. + plot_node = detail.select_one( + "li[itemtype='http://schema.org/Movie'] span[itemprop='description']" + ) + if plot_node is not None: + plot = (plot_node.get_text(" ", strip=True) or "").strip() + if not plot: + hidden_plot = detail.select_one("cite span.hidden") + if hidden_plot is not None: + plot = (hidden_plot.get_text(" ", strip=True) or "").strip() + if not plot: + for selector in ("meta[property='og:description']", "meta[name='description']"): + node = root.select_one(selector) + if node is None: + continue + content = (node.get("content") or "").strip() + if content: + plot = content + break + + # Filmpalast Detailseite: Cover liegt stabil in `img.cover2`. + cover = detail.select_one("img.cover2") + if cover is not None: + value = (cover.get("data-src") or cover.get("src") or "").strip() + if value: + candidate = _absolute_url(value) + lower = candidate.casefold() + if "/themes/" not in lower and "spacer.gif" not in lower and "/files/movies/" in lower: + poster = candidate + if not poster: + thumb_node = detail.select_one("li[itemtype='http://schema.org/Movie'] img[itemprop='image']") + if thumb_node is not None: + value = (thumb_node.get("data-src") or thumb_node.get("src") or "").strip() + if value: + candidate = _absolute_url(value) + lower = candidate.casefold() + if "/themes/" not in lower and "spacer.gif" not in lower and "/files/movies/" in lower: + poster = candidate + + return plot, poster + def remember_series_url(self, title: str, series_url: str) -> None: title = (title or "").strip() series_url = (series_url or "").strip() @@ -711,6 +801,52 @@ class FilmpalastPlugin(BasisPlugin): return _series_hint_value(series_key) return "" + def metadata_for(self, title: str) -> tuple[dict[str, str], dict[str, str], list[object] | None]: + title = (title or "").strip() + if not title: + return {}, {}, None + + info: dict[str, str] = {"title": title} + art: dict[str, str] = {} + cached_plot, cached_poster = self._title_meta.get(title, ("", "")) + if cached_plot: + info["plot"] = cached_plot + if cached_poster: + art = {"thumb": cached_poster, "poster": cached_poster} + if "plot" in info and art: + return info, art, None + + detail_url = self._ensure_title_url(title) + if not detail_url: + series_key = self._series_key_for_title(title) or self._ensure_series_entries_for_title(title) + if series_key: + seasons = self._series_entries.get(series_key, {}) + first_entry: Optional[EpisodeEntry] = None + for season_number in sorted(seasons.keys()): + episodes = seasons.get(season_number, {}) + for episode_number in sorted(episodes.keys()): + first_entry = episodes.get(episode_number) + if first_entry is not None: + break + if first_entry is not None: + break + detail_url = first_entry.url if first_entry is not None else "" + if not detail_url: + return info, art, None + + try: + soup = _get_soup(detail_url, session=get_requests_session("filmpalast", headers=HEADERS)) + plot, poster = self._extract_detail_metadata(soup) + except Exception: + plot, poster = "", "" + + if plot: + info["plot"] = plot + if poster: + art = {"thumb": poster, "poster": poster} + self._store_title_meta(title, plot=info.get("plot", ""), poster=poster) + return info, art, None + def is_movie(self, title: str) -> bool: title = (title or "").strip() if not title: @@ -820,11 +956,23 @@ class FilmpalastPlugin(BasisPlugin): def available_hosters_for(self, title: str, season: str, episode: str) -> List[str]: detail_url = self._detail_url_for_selection(title, season, episode) - hosters = self._hosters_for_detail_url(detail_url) - return list(hosters.keys()) + return self.available_hosters_for_url(detail_url) def stream_link_for(self, title: str, season: str, episode: str) -> Optional[str]: detail_url = self._detail_url_for_selection(title, season, episode) + return self.stream_link_for_url(detail_url) + + def episode_url_for(self, title: str, season: str, episode: str) -> str: + detail_url = self._detail_url_for_selection(title, season, episode) + return (detail_url or "").strip() + + def available_hosters_for_url(self, episode_url: str) -> List[str]: + detail_url = (episode_url or "").strip() + hosters = self._hosters_for_detail_url(detail_url) + return list(hosters.keys()) + + def stream_link_for_url(self, episode_url: str) -> Optional[str]: + detail_url = (episode_url or "").strip() if not detail_url: return None hosters = self._hosters_for_detail_url(detail_url) @@ -901,6 +1049,7 @@ class FilmpalastPlugin(BasisPlugin): redirected = link if self._requests_available: + response = None try: session = get_requests_session("filmpalast", headers=HEADERS) response = session.get(link, headers=HEADERS, timeout=DEFAULT_TIMEOUT, allow_redirects=True) @@ -908,6 +1057,12 @@ class FilmpalastPlugin(BasisPlugin): redirected = (response.url or link).strip() or link except Exception: redirected = link + finally: + if response is not None: + try: + response.close() + except Exception: + pass # 2) Danach optional die Redirect-URL nochmals auflösen. if callable(resolve_with_resolveurl) and redirected and redirected != link: @@ -922,3 +1077,7 @@ class FilmpalastPlugin(BasisPlugin): _log_url_event(redirected, kind="FINAL") return redirected return None + + +# Alias für die automatische Plugin-Erkennung. +Plugin = FilmpalastPlugin diff --git a/addon/plugins/serienstream_plugin.py b/addon/plugins/serienstream_plugin.py index d2f67f3..cfdb1f0 100644 --- a/addon/plugins/serienstream_plugin.py +++ b/addon/plugins/serienstream_plugin.py @@ -17,7 +17,8 @@ import os import re import time import unicodedata -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, TypeAlias +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple +from urllib.parse import quote try: # pragma: no cover - optional dependency import requests @@ -49,14 +50,15 @@ if TYPE_CHECKING: # pragma: no cover from requests import Session as RequestsSession from bs4 import BeautifulSoup as BeautifulSoupT # type: ignore[import-not-found] else: # pragma: no cover - RequestsSession: TypeAlias = Any - BeautifulSoupT: TypeAlias = Any + RequestsSession = Any + BeautifulSoupT = Any SETTING_BASE_URL = "serienstream_base_url" DEFAULT_BASE_URL = "https://s.to" DEFAULT_PREFERRED_HOSTERS = ["voe"] DEFAULT_TIMEOUT = 20 +SEARCH_TIMEOUT = 8 ADDON_ID = "plugin.video.viewit" GLOBAL_SETTING_LOG_URLS = "debug_log_urls" GLOBAL_SETTING_DUMP_HTML = "debug_dump_html" @@ -75,6 +77,19 @@ HEADERS = { SESSION_CACHE_TTL_SECONDS = 300 SESSION_CACHE_PREFIX = "viewit.serienstream" SESSION_CACHE_MAX_TITLE_URLS = 800 +CATALOG_SEARCH_TTL_SECONDS = 600 +CATALOG_SEARCH_CACHE_KEY = "catalog_index" +_CATALOG_INDEX_MEMORY: tuple[float, List["SeriesResult"]] = (0.0, []) +ProgressCallback = Optional[Callable[[str, Optional[int]], Any]] + + +def _emit_progress(callback: ProgressCallback, message: str, percent: Optional[int] = None) -> None: + if not callable(callback): + return + try: + callback(str(message or ""), None if percent is None else int(percent)) + except Exception: + return @dataclass @@ -111,6 +126,57 @@ class SeasonInfo: episodes: List[EpisodeInfo] +def _extract_series_metadata(soup: BeautifulSoupT) -> Tuple[Dict[str, str], Dict[str, str]]: + info: Dict[str, str] = {} + art: Dict[str, str] = {} + if not soup: + return info, art + + title_tag = soup.select_one("h1") + title = (title_tag.get_text(" ", strip=True) if title_tag else "").strip() + if title: + info["title"] = title + + description = "" + desc_tag = soup.select_one(".series-description .description-text") + if desc_tag: + description = (desc_tag.get_text(" ", strip=True) or "").strip() + if not description: + meta_desc = soup.select_one("meta[property='og:description'], meta[name='description']") + if meta_desc: + description = (meta_desc.get("content") or "").strip() + if description: + info["plot"] = description + + poster = "" + poster_tag = soup.select_one( + ".show-cover-mobile img[data-src], .show-cover-mobile img[src], .col-3 img[data-src], .col-3 img[src]" + ) + if poster_tag: + poster = (poster_tag.get("data-src") or poster_tag.get("src") or "").strip() + if not poster: + for candidate in soup.select("img[data-src], img[src]"): + url = (candidate.get("data-src") or candidate.get("src") or "").strip() + if "/media/images/channel/" in url: + poster = url + break + if poster: + poster = _absolute_url(poster) + art["poster"] = poster + art["thumb"] = poster + + fanart = "" + fanart_tag = soup.select_one("meta[property='og:image']") + if fanart_tag: + fanart = (fanart_tag.get("content") or "").strip() + if fanart: + fanart = _absolute_url(fanart) + art["fanart"] = fanart + art["landscape"] = fanart + + return info, art + + def _get_base_url() -> str: base = get_setting_string(ADDON_ID, SETTING_BASE_URL, default=DEFAULT_BASE_URL).strip() if not base: @@ -342,37 +408,56 @@ def _get_soup(url: str, *, session: Optional[RequestsSession] = None) -> Beautif _ensure_requests() _log_visit(url) sess = session or get_requests_session("serienstream", headers=HEADERS) + response = None try: response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT) response.raise_for_status() except Exception as exc: _log_error(f"GET {url} failed: {exc}") raise - if response.url and response.url != url: - _log_url(response.url, kind="REDIRECT") - _log_response_html(url, response.text) - if _looks_like_cloudflare_challenge(response.text): - raise RuntimeError("Cloudflare-Schutz erkannt. requests reicht ggf. nicht aus.") - return BeautifulSoup(response.text, "html.parser") + try: + final_url = (response.url or url) if response is not None else url + body = (response.text or "") if response is not None else "" + if final_url != url: + _log_url(final_url, kind="REDIRECT") + _log_response_html(url, body) + if _looks_like_cloudflare_challenge(body): + raise RuntimeError("Cloudflare-Schutz erkannt. requests reicht ggf. nicht aus.") + return BeautifulSoup(body, "html.parser") + finally: + if response is not None: + try: + response.close() + except Exception: + pass def _get_html_simple(url: str) -> str: _ensure_requests() _log_visit(url) sess = get_requests_session("serienstream", headers=HEADERS) + response = None try: response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT) response.raise_for_status() except Exception as exc: _log_error(f"GET {url} failed: {exc}") raise - if response.url and response.url != url: - _log_url(response.url, kind="REDIRECT") - body = response.text - _log_response_html(url, body) - if _looks_like_cloudflare_challenge(body): - raise RuntimeError("Cloudflare-Schutz erkannt. requests reicht ggf. nicht aus.") - return body + try: + final_url = (response.url or url) if response is not None else url + body = (response.text or "") if response is not None else "" + if final_url != url: + _log_url(final_url, kind="REDIRECT") + _log_response_html(url, body) + if _looks_like_cloudflare_challenge(body): + raise RuntimeError("Cloudflare-Schutz erkannt. requests reicht ggf. nicht aus.") + return body + finally: + if response is not None: + try: + response.close() + except Exception: + pass def _get_soup_simple(url: str) -> BeautifulSoupT: @@ -400,20 +485,238 @@ def _extract_genre_names_from_html(body: str) -> List[str]: return names -def search_series(query: str) -> List[SeriesResult]: - """Sucht Serien im (/serien)-Katalog (Genre-liste) nach Titel/Alt-Titel.""" +def _strip_tags(value: str) -> str: + return re.sub(r"<[^>]+>", " ", value or "") + + +def _search_series_api(query: str) -> List[SeriesResult]: + query = (query or "").strip() + if not query: + return [] + _ensure_requests() + sess = get_requests_session("serienstream", headers=HEADERS) + terms = [query] + if " " in query: + # Fallback: einzelne Tokens liefern in der API oft bessere Treffer. + terms.extend([token for token in query.split() if token]) + seen_urls: set[str] = set() + for term in terms: + response = None + try: + response = sess.get( + f"{_get_base_url()}/api/search/suggest", + params={"term": term}, + headers=HEADERS, + timeout=SEARCH_TIMEOUT, + ) + response.raise_for_status() + except Exception: + continue + try: + payload = response.json() + except Exception: + continue + finally: + if response is not None: + try: + response.close() + except Exception: + pass + shows = payload.get("shows") if isinstance(payload, dict) else None + if not isinstance(shows, list): + continue + results: List[SeriesResult] = [] + for item in shows: + if not isinstance(item, dict): + continue + title = (item.get("name") or "").strip() + href = (item.get("url") or "").strip() + if not title or not href: + continue + url_abs = _absolute_url(href) + if not url_abs or url_abs in seen_urls: + continue + if "/staffel-" in url_abs or "/episode-" in url_abs: + continue + seen_urls.add(url_abs) + results.append(SeriesResult(title=title, description="", url=url_abs)) + if not results: + continue + filtered = [entry for entry in results if _matches_query(query, title=entry.title)] + if filtered: + return filtered + # Falls nur Token-Suche möglich war, zumindest die Ergebnisse liefern. + if term != query: + return results + return [] + + +def _search_series_server(query: str) -> List[SeriesResult]: + if not query: + return [] + api_results = _search_series_api(query) + if api_results: + return api_results + base = _get_base_url() + search_url = f"{base}/search?q={quote(query)}" + alt_url = f"{base}/suche?q={quote(query)}" + for url in (search_url, alt_url): + try: + body = _get_html_simple(url) + except Exception: + continue + if not body: + continue + soup = BeautifulSoup(body, "html.parser") + root = soup.select_one(".search-results-list") + if root is None: + continue + seen_urls: set[str] = set() + results: List[SeriesResult] = [] + for card in root.select(".cover-card"): + anchor = card.select_one("a[href*='/serie/']") + if not anchor: + continue + href = (anchor.get("href") or "").strip() + url_abs = _absolute_url(href) + if not url_abs or url_abs in seen_urls: + continue + if "/staffel-" in url_abs or "/episode-" in url_abs: + continue + title_tag = card.select_one(".show-title") or card.select_one("h3") or card.select_one("h4") + title = (title_tag.get_text(" ", strip=True) if title_tag else anchor.get_text(" ", strip=True)).strip() + if not title: + continue + seen_urls.add(url_abs) + results.append(SeriesResult(title=title, description="", url=url_abs)) + if results: + return results + return [] + + +def _extract_catalog_index_from_html(body: str, *, progress_callback: ProgressCallback = None) -> List[SeriesResult]: + items: List[SeriesResult] = [] + if not body: + return items + seen_urls: set[str] = set() + item_re = re.compile( + r"]*class=[\"'][^\"']*series-item[^\"']*[\"'][^>]*>(.*?)", + re.IGNORECASE | re.DOTALL, + ) + anchor_re = re.compile(r"]+href=[\"']([^\"']+)[\"'][^>]*>(.*?)", re.IGNORECASE | re.DOTALL) + data_search_re = re.compile(r"data-search=[\"']([^\"']*)[\"']", re.IGNORECASE) + for idx, match in enumerate(item_re.finditer(body), start=1): + if idx == 1 or idx % 200 == 0: + _emit_progress(progress_callback, f"Katalog parsen {idx}", 62) + block = match.group(0) + inner = match.group(1) or "" + anchor_match = anchor_re.search(inner) + if not anchor_match: + continue + href = (anchor_match.group(1) or "").strip() + url = _absolute_url(href) + if not url or "/serie/" not in url or "/staffel-" in url or "/episode-" in url: + continue + if url in seen_urls: + continue + seen_urls.add(url) + title_raw = anchor_match.group(2) or "" + title = unescape(re.sub(r"\s+", " ", _strip_tags(title_raw))).strip() + if not title: + continue + search_match = data_search_re.search(block) + description = (search_match.group(1) or "").strip() if search_match else "" + items.append(SeriesResult(title=title, description=description, url=url)) + return items + + +def _catalog_index_from_soup(soup: BeautifulSoupT) -> List[SeriesResult]: + items: List[SeriesResult] = [] + if not soup: + return items + seen_urls: set[str] = set() + for item in soup.select("li.series-item"): + anchor = item.find("a", href=True) + if not anchor: + continue + href = (anchor.get("href") or "").strip() + url = _absolute_url(href) + if not url or "/serie/" not in url or "/staffel-" in url or "/episode-" in url: + continue + if url in seen_urls: + continue + seen_urls.add(url) + title = (anchor.get_text(" ", strip=True) or "").strip() + if not title: + continue + description = (item.get("data-search") or "").strip() + items.append(SeriesResult(title=title, description=description, url=url)) + return items + + +def _load_catalog_index_from_cache() -> Optional[List[SeriesResult]]: + global _CATALOG_INDEX_MEMORY + expires_at, cached = _CATALOG_INDEX_MEMORY + if cached and expires_at > time.time(): + return list(cached) + raw = _session_cache_get(CATALOG_SEARCH_CACHE_KEY) + if not isinstance(raw, list): + return None + items: List[SeriesResult] = [] + for entry in raw: + if not isinstance(entry, list) or len(entry) < 2: + continue + title = str(entry[0] or "").strip() + url = str(entry[1] or "").strip() + description = str(entry[2] or "") if len(entry) > 2 else "" + if title and url: + items.append(SeriesResult(title=title, description=description, url=url)) + if items: + _CATALOG_INDEX_MEMORY = (time.time() + CATALOG_SEARCH_TTL_SECONDS, list(items)) + return items or None + + +def _store_catalog_index_in_cache(items: List[SeriesResult]) -> None: + global _CATALOG_INDEX_MEMORY + if not items: + return + _CATALOG_INDEX_MEMORY = (time.time() + CATALOG_SEARCH_TTL_SECONDS, list(items)) + payload: List[List[str]] = [] + for entry in items: + if not entry.title or not entry.url: + continue + payload.append([entry.title, entry.url, entry.description]) + _session_cache_set(CATALOG_SEARCH_CACHE_KEY, payload, ttl_seconds=CATALOG_SEARCH_TTL_SECONDS) + + +def search_series(query: str, *, progress_callback: ProgressCallback = None) -> List[SeriesResult]: + """Sucht Serien im (/serien)-Katalog nach Titel. Nutzt Cache + Ein-Pass-Filter.""" _ensure_requests() if not _normalize_search_text(query): return [] - # Direkter Abruf wie in fetch_serien.py. + _emit_progress(progress_callback, "Server-Suche", 15) + server_results = _search_series_server(query) + if server_results: + _emit_progress(progress_callback, f"Server-Treffer: {len(server_results)}", 35) + return [entry for entry in server_results if entry.title and _matches_query(query, title=entry.title)] + _emit_progress(progress_callback, "Pruefe Such-Cache", 42) + cached = _load_catalog_index_from_cache() + if cached is not None: + _emit_progress(progress_callback, f"Cache-Treffer: {len(cached)}", 52) + return [entry for entry in cached if entry.title and _matches_query(query, title=entry.title)] + + _emit_progress(progress_callback, "Lade Katalogseite", 58) catalog_url = f"{_get_base_url()}/serien?by=genre" - soup = _get_soup_simple(catalog_url) - results: List[SeriesResult] = [] - for series in parse_series_catalog(soup).values(): - for entry in series: - if entry.title and _matches_query(query, title=entry.title): - results.append(entry) - return results + body = _get_html_simple(catalog_url) + items = _extract_catalog_index_from_html(body, progress_callback=progress_callback) + if not items: + _emit_progress(progress_callback, "Fallback-Parser", 70) + soup = BeautifulSoup(body, "html.parser") + items = _catalog_index_from_soup(soup) + if items: + _store_catalog_index_in_cache(items) + _emit_progress(progress_callback, f"Filtere Treffer ({len(items)})", 85) + return [entry for entry in items if entry.title and _matches_query(query, title=entry.title)] def parse_series_catalog(soup: BeautifulSoupT) -> Dict[str, List[SeriesResult]]: @@ -731,15 +1034,23 @@ def resolve_redirect(target_url: str) -> Optional[str]: _get_soup(_get_base_url(), session=session) except Exception: pass - response = session.get( - normalized_url, - headers=HEADERS, - timeout=DEFAULT_TIMEOUT, - allow_redirects=True, - ) - if response.url: - _log_url(response.url, kind="RESOLVED") - return response.url if response.url else None + response = None + try: + response = session.get( + normalized_url, + headers=HEADERS, + timeout=DEFAULT_TIMEOUT, + allow_redirects=True, + ) + if response.url: + _log_url(response.url, kind="RESOLVED") + return response.url if response.url else None + finally: + if response is not None: + try: + response.close() + except Exception: + pass def scrape_series_detail( @@ -785,7 +1096,7 @@ class SerienstreamPlugin(BasisPlugin): name = "Serienstream" version = "1.0.0" - POPULAR_GENRE_LABEL = "⭐ Beliebte Serien" + POPULAR_GENRE_LABEL = "Haeufig gesehen" def __init__(self) -> None: self._series_results: Dict[str, SeriesResult] = {} @@ -805,6 +1116,7 @@ class SerienstreamPlugin(BasisPlugin): self._hoster_cache: Dict[Tuple[str, str, str], List[str]] = {} self._latest_cache: Dict[int, List[LatestEpisode]] = {} self._latest_hoster_cache: Dict[str, List[str]] = {} + self._series_metadata_cache: Dict[str, Tuple[Dict[str, str], Dict[str, str]]] = {} self.is_available = True self.unavailable_reason: Optional[str] = None if not self._requests_available: # pragma: no cover - optional dependency @@ -851,12 +1163,30 @@ class SerienstreamPlugin(BasisPlugin): cache_key = title.casefold() if self._title_url_cache.get(cache_key) != url: self._title_url_cache[cache_key] = url - self._save_title_url_cache() + self._save_title_url_cache() + if url: return current = self._series_results.get(title) if current is None: self._series_results[title] = SeriesResult(title=title, description=description, url="") + @staticmethod + def _metadata_cache_key(title: str) -> str: + return (title or "").strip().casefold() + + def _series_for_title(self, title: str) -> Optional[SeriesResult]: + direct = self._series_results.get(title) + if direct and direct.url: + return direct + lookup_key = (title or "").strip().casefold() + for item in self._series_results.values(): + if item.title.casefold().strip() == lookup_key and item.url: + return item + cached_url = self._title_url_cache.get(lookup_key, "") + if cached_url: + return SeriesResult(title=title, description="", url=cached_url) + return None + @staticmethod def _season_links_cache_name(series_url: str) -> str: digest = hashlib.sha1((series_url or "").encode("utf-8")).hexdigest()[:20] @@ -1274,7 +1604,28 @@ class SerienstreamPlugin(BasisPlugin): self._season_links_cache[title] = list(session_links) return list(session_links) try: - seasons = scrape_series_detail(series.url, load_episodes=False) + series_soup = _get_soup(series.url, session=get_requests_session("serienstream", headers=HEADERS)) + info_labels, art = _extract_series_metadata(series_soup) + if series.description and "plot" not in info_labels: + info_labels["plot"] = series.description + cache_key = self._metadata_cache_key(title) + if info_labels or art: + self._series_metadata_cache[cache_key] = (info_labels, art) + + base_series_url = _series_root_url(_extract_canonical_url(series_soup, series.url)) + season_links = _extract_season_links(series_soup) + season_count = _extract_number_of_seasons(series_soup) + if season_count and (not season_links or len(season_links) < season_count): + existing = {number for number, _ in season_links} + for number in range(1, season_count + 1): + if number in existing: + continue + season_url = f"{base_series_url}/staffel-{number}" + _log_parsed_url(season_url) + season_links.append((number, season_url)) + season_links.sort(key=lambda item: item[0]) + seasons = [SeasonInfo(number=number, url=url, episodes=[]) for number, url in season_links] + seasons.sort(key=lambda s: s.number) except Exception as exc: # pragma: no cover - defensive logging raise RuntimeError(f"Serienstream-Staffeln konnten nicht geladen werden: {exc}") from exc self._season_links_cache[title] = list(seasons) @@ -1288,6 +1639,41 @@ class SerienstreamPlugin(BasisPlugin): return self._remember_series_result(title, series_url) + def metadata_for(self, title: str) -> Tuple[Dict[str, str], Dict[str, str], Optional[List[Any]]]: + title = (title or "").strip() + if not title or not self._requests_available: + return {}, {}, None + + cache_key = self._metadata_cache_key(title) + cached = self._series_metadata_cache.get(cache_key) + if cached is not None: + info, art = cached + return dict(info), dict(art), None + + series = self._series_for_title(title) + if series is None or not series.url: + info = {"title": title} + self._series_metadata_cache[cache_key] = (dict(info), {}) + return info, {}, None + + info: Dict[str, str] = {"title": title} + art: Dict[str, str] = {} + if series.description: + info["plot"] = series.description + + try: + soup = _get_soup(series.url, session=get_requests_session("serienstream", headers=HEADERS)) + parsed_info, parsed_art = _extract_series_metadata(soup) + if parsed_info: + info.update(parsed_info) + if parsed_art: + art.update(parsed_art) + except Exception: + pass + + self._series_metadata_cache[cache_key] = (dict(info), dict(art)) + return info, art, None + def series_url_for_title(self, title: str) -> str: title = (title or "").strip() if not title: @@ -1348,7 +1734,7 @@ class SerienstreamPlugin(BasisPlugin): return self._episode_label_cache.get(cache_key, {}).get(episode_label) return None - async def search_titles(self, query: str) -> List[str]: + async def search_titles(self, query: str, progress_callback: ProgressCallback = None) -> List[str]: query = query.strip() if not query: self._series_results.clear() @@ -1362,7 +1748,8 @@ class SerienstreamPlugin(BasisPlugin): try: # Nutzt den Katalog (/serien), der jetzt nach Genres gruppiert ist. # Alternativ gäbe es ein Ajax-Endpoint, aber der ist nicht immer zuverlässig erreichbar. - results = search_series(query) + _emit_progress(progress_callback, "Serienstream Suche startet", 10) + results = search_series(query, progress_callback=progress_callback) except Exception as exc: # pragma: no cover - defensive logging self._series_results.clear() self._season_cache.clear() @@ -1375,6 +1762,7 @@ class SerienstreamPlugin(BasisPlugin): self._season_cache.clear() self._season_links_cache.clear() self._episode_label_cache.clear() + _emit_progress(progress_callback, f"Treffer aufbereitet: {len(results)}", 95) return [result.title for result in results] def _ensure_seasons(self, title: str) -> List[SeasonInfo]: @@ -1443,6 +1831,18 @@ class SerienstreamPlugin(BasisPlugin): except Exception as exc: # pragma: no cover - defensive logging raise RuntimeError(f"Stream-Link konnte nicht geladen werden: {exc}") from exc + def episode_url_for(self, title: str, season: str, episode: str) -> str: + cache_key = (title, season) + cached = self._episode_label_cache.get(cache_key) + if cached: + info = cached.get(episode) + if info and info.url: + return info.url + episode_info = self._lookup_episode(title, season, episode) + if episode_info and episode_info.url: + return episode_info.url + return "" + def available_hosters_for(self, title: str, season: str, episode: str) -> List[str]: if not self._requests_available: raise RuntimeError("SerienstreamPlugin kann ohne requests/bs4 keine Hoster laden.") diff --git a/addon/plugins/topstreamfilm_plugin.py b/addon/plugins/topstreamfilm_plugin.py index 97c9e4b..ab71fd6 100644 --- a/addon/plugins/topstreamfilm_plugin.py +++ b/addon/plugins/topstreamfilm_plugin.py @@ -19,8 +19,8 @@ import hashlib import os import re import json -from typing import TYPE_CHECKING, Any, Dict, List, Optional, TypeAlias -from urllib.parse import urlencode, urljoin +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional +from urllib.parse import urljoin try: # pragma: no cover - optional dependency import requests @@ -51,13 +51,13 @@ if TYPE_CHECKING: # pragma: no cover from requests import Session as RequestsSession from bs4 import BeautifulSoup as BeautifulSoupT # type: ignore[import-not-found] else: # pragma: no cover - RequestsSession: TypeAlias = Any - BeautifulSoupT: TypeAlias = Any + RequestsSession = Any + BeautifulSoupT = Any ADDON_ID = "plugin.video.viewit" SETTING_BASE_URL = "topstream_base_url" -DEFAULT_BASE_URL = "https://www.meineseite" +DEFAULT_BASE_URL = "https://topstreamfilm.live" GLOBAL_SETTING_LOG_URLS = "debug_log_urls" GLOBAL_SETTING_DUMP_HTML = "debug_dump_html" GLOBAL_SETTING_SHOW_URL_INFO = "debug_show_url_info" @@ -78,6 +78,16 @@ HEADERS = { "Accept-Language": "de-DE,de;q=0.9,en;q=0.8", "Connection": "keep-alive", } +ProgressCallback = Optional[Callable[[str, Optional[int]], Any]] + + +def _emit_progress(callback: ProgressCallback, message: str, percent: Optional[int] = None) -> None: + if not callable(callback): + return + try: + callback(str(message or ""), None if percent is None else int(percent)) + except Exception: + return @dataclass(frozen=True) @@ -87,6 +97,7 @@ class SearchHit: title: str url: str description: str = "" + poster: str = "" def _normalize_search_text(value: str) -> str: @@ -139,6 +150,7 @@ class TopstreamfilmPlugin(BasisPlugin): self._season_to_episode_numbers: Dict[tuple[str, str], List[int]] = {} self._episode_title_by_number: Dict[tuple[str, int, int], str] = {} self._detail_html_cache: Dict[str, str] = {} + self._title_meta: Dict[str, tuple[str, str]] = {} self._popular_cache: List[str] | None = None self._default_preferred_hosters: List[str] = list(DEFAULT_PREFERRED_HOSTERS) self._preferred_hosters: List[str] = list(self._default_preferred_hosters) @@ -419,6 +431,7 @@ class TopstreamfilmPlugin(BasisPlugin): continue seen.add(hit.title) self._title_to_url[hit.title] = hit.url + self._store_title_meta(hit.title, plot=hit.description, poster=hit.poster) titles.append(hit.title) if titles: self._save_title_url_cache() @@ -477,6 +490,69 @@ class TopstreamfilmPlugin(BasisPlugin): except Exception: return "" + def _pick_image_from_node(self, node: Any) -> str: + if node is None: + return "" + image = node.select_one("img") + if image is None: + return "" + for attr in ("data-src", "src"): + value = (image.get(attr) or "").strip() + if value and "lazy_placeholder" not in value.casefold(): + return self._absolute_external_url(value, base=self._get_base_url()) + srcset = (image.get("data-srcset") or image.get("srcset") or "").strip() + if srcset: + first = srcset.split(",")[0].strip().split(" ", 1)[0].strip() + if first: + return self._absolute_external_url(first, base=self._get_base_url()) + return "" + + def _store_title_meta(self, title: str, *, plot: str = "", poster: str = "") -> None: + title = (title or "").strip() + if not title: + return + old_plot, old_poster = self._title_meta.get(title, ("", "")) + merged_plot = (plot or old_plot or "").strip() + merged_poster = (poster or old_poster or "").strip() + self._title_meta[title] = (merged_plot, merged_poster) + + def _extract_detail_metadata(self, soup: BeautifulSoupT) -> tuple[str, str]: + if not soup: + return "", "" + plot = "" + poster = "" + for selector in ("meta[property='og:description']", "meta[name='description']"): + node = soup.select_one(selector) + if node is None: + continue + content = (node.get("content") or "").strip() + if content: + plot = content + break + if not plot: + candidates: list[str] = [] + for paragraph in soup.select("article p, .TPost p, .Description p, .entry-content p"): + text = (paragraph.get_text(" ", strip=True) or "").strip() + if len(text) >= 60: + candidates.append(text) + if candidates: + plot = max(candidates, key=len) + + for selector in ("meta[property='og:image']", "meta[name='twitter:image']"): + node = soup.select_one(selector) + if node is None: + continue + content = (node.get("content") or "").strip() + if content: + poster = self._absolute_external_url(content, base=self._get_base_url()) + break + if not poster: + for selector in ("article", ".TPost", ".entry-content"): + poster = self._pick_image_from_node(soup.select_one(selector)) + if poster: + break + return plot, poster + def _clear_stream_index_for_title(self, title: str) -> None: for key in list(self._season_to_episode_numbers.keys()): if key[0] == title: @@ -584,15 +660,25 @@ class TopstreamfilmPlugin(BasisPlugin): session = self._get_session() self._log_url(url, kind="VISIT") self._notify_url(url) + response = None try: response = session.get(url, timeout=DEFAULT_TIMEOUT) response.raise_for_status() except Exception as exc: self._log_error(f"GET {url} failed: {exc}") raise - self._log_url(response.url, kind="OK") - self._log_response_html(response.url, response.text) - return BeautifulSoup(response.text, "html.parser") + try: + final_url = (response.url or url) if response is not None else url + body = (response.text or "") if response is not None else "" + self._log_url(final_url, kind="OK") + self._log_response_html(final_url, body) + return BeautifulSoup(body, "html.parser") + finally: + if response is not None: + try: + response.close() + except Exception: + pass def _get_detail_soup(self, title: str) -> Optional[BeautifulSoupT]: title = (title or "").strip() @@ -701,7 +787,17 @@ class TopstreamfilmPlugin(BasisPlugin): continue if is_movie_hint: self._movie_title_hint.add(title) - hits.append(SearchHit(title=title, url=self._absolute_url(href), description="")) + description_tag = item.select_one(".TPMvCn .Description, .Description, .entry-summary") + description = (description_tag.get_text(" ", strip=True) or "").strip() if description_tag else "" + poster = self._pick_image_from_node(item) + hits.append( + SearchHit( + title=title, + url=self._absolute_url(href), + description=description, + poster=poster, + ) + ) return hits def is_movie(self, title: str) -> bool: @@ -774,6 +870,7 @@ class TopstreamfilmPlugin(BasisPlugin): continue seen.add(hit.title) self._title_to_url[hit.title] = hit.url + self._store_title_meta(hit.title, plot=hit.description, poster=hit.poster) titles.append(hit.title) if titles: self._save_title_url_cache() @@ -814,7 +911,7 @@ class TopstreamfilmPlugin(BasisPlugin): # Sonst: Serie via Streams-Accordion parsen (falls vorhanden). self._parse_stream_accordion(soup, title=title) - async def search_titles(self, query: str) -> List[str]: + async def search_titles(self, query: str, progress_callback: ProgressCallback = None) -> List[str]: """Sucht Titel ueber eine HTML-Suche. Erwartetes HTML (Snippet): @@ -827,6 +924,7 @@ class TopstreamfilmPlugin(BasisPlugin): query = (query or "").strip() if not query: return [] + _emit_progress(progress_callback, "Topstreamfilm Suche", 15) session = self._get_session() url = self._get_base_url() + "/" @@ -834,6 +932,7 @@ class TopstreamfilmPlugin(BasisPlugin): request_url = f"{url}?{urlencode(params)}" self._log_url(request_url, kind="GET") self._notify_url(request_url) + response = None try: response = session.get( url, @@ -844,15 +943,28 @@ class TopstreamfilmPlugin(BasisPlugin): except Exception as exc: self._log_error(f"GET {request_url} failed: {exc}") raise - self._log_url(response.url, kind="OK") - self._log_response_html(response.url, response.text) + try: + final_url = (response.url or request_url) if response is not None else request_url + body = (response.text or "") if response is not None else "" + self._log_url(final_url, kind="OK") + self._log_response_html(final_url, body) - if BeautifulSoup is None: - return [] - soup = BeautifulSoup(response.text, "html.parser") + if BeautifulSoup is None: + return [] + soup = BeautifulSoup(body, "html.parser") + finally: + if response is not None: + try: + response.close() + except Exception: + pass hits: List[SearchHit] = [] - for item in soup.select("li.TPostMv"): + items = soup.select("li.TPostMv") + total_items = max(1, len(items)) + for idx, item in enumerate(items, start=1): + if idx == 1 or idx % 20 == 0: + _emit_progress(progress_callback, f"Treffer pruefen {idx}/{total_items}", 55) anchor = item.select_one("a[href]") if not anchor: continue @@ -870,7 +982,8 @@ class TopstreamfilmPlugin(BasisPlugin): self._movie_title_hint.add(title) description_tag = item.select_one(".TPMvCn .Description") description = description_tag.get_text(" ", strip=True) if description_tag else "" - hit = SearchHit(title=title, url=self._absolute_url(href), description=description) + poster = self._pick_image_from_node(item) + hit = SearchHit(title=title, url=self._absolute_url(href), description=description, poster=poster) if _matches_query(query, title=hit.title, description=hit.description): hits.append(hit) @@ -883,10 +996,41 @@ class TopstreamfilmPlugin(BasisPlugin): continue seen.add(hit.title) self._title_to_url[hit.title] = hit.url + self._store_title_meta(hit.title, plot=hit.description, poster=hit.poster) titles.append(hit.title) self._save_title_url_cache() + _emit_progress(progress_callback, f"Fertig: {len(titles)} Treffer", 95) return titles + def metadata_for(self, title: str) -> tuple[dict[str, str], dict[str, str], list[object] | None]: + title = (title or "").strip() + if not title: + return {}, {}, None + + info: dict[str, str] = {"title": title} + art: dict[str, str] = {} + + cached_plot, cached_poster = self._title_meta.get(title, ("", "")) + if cached_plot: + info["plot"] = cached_plot + if cached_poster: + art = {"thumb": cached_poster, "poster": cached_poster} + + if "plot" in info and art: + return info, art, None + + soup = self._get_detail_soup(title) + if soup is None: + return info, art, None + + plot, poster = self._extract_detail_metadata(soup) + if plot: + info["plot"] = plot + if poster: + art = {"thumb": poster, "poster": poster} + self._store_title_meta(title, plot=plot, poster=poster) + return info, art, None + def genres(self) -> List[str]: if not REQUESTS_AVAILABLE or BeautifulSoup is None: return [] diff --git a/addon/resources/settings.xml b/addon/resources/settings.xml index d9f7c76..1092fac 100644 --- a/addon/resources/settings.xml +++ b/addon/resources/settings.xml @@ -1,79 +1,90 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - + + + - + + - + + - + + - + + - + + - - - - + + + + - - - + + + - - - + + + - - - - - - - - - - + + + + + + + + + + + + + + + diff --git a/addon/tmdb.py b/addon/tmdb.py index 830e770..7ae2d15 100644 --- a/addon/tmdb.py +++ b/addon/tmdb.py @@ -14,6 +14,7 @@ except ImportError: # pragma: no cover TMDB_API_BASE = "https://api.themoviedb.org/3" TMDB_IMAGE_BASE = "https://image.tmdb.org/t/p" +MAX_CAST_MEMBERS = 30 _TMDB_THREAD_LOCAL = threading.local() @@ -73,53 +74,17 @@ def _fetch_credits( return [] params = {"api_key": api_key, "language": (language or "de-DE").strip()} url = f"{TMDB_API_BASE}/{kind}/{tmdb_id}/credits?{urlencode(params)}" - if callable(log): - log(f"TMDB GET {url}") - try: - response = requests.get(url, timeout=timeout) - except Exception as exc: # pragma: no cover - if callable(log): - log(f"TMDB ERROR /{kind}/{{id}}/credits request_failed error={exc!r}") - return [] - status = getattr(response, "status_code", None) + status, payload, body_text = _tmdb_get_json(url=url, timeout=timeout, log=log, log_responses=log_responses) if callable(log): log(f"TMDB RESPONSE /{kind}/{{id}}/credits status={status}") - if status != 200: + if log_responses and payload is None and body_text: + log(f"TMDB RESPONSE_BODY /{kind}/{{id}}/credits body={body_text[:2000]}") + if status != 200 or not isinstance(payload, dict): return [] - try: - payload = response.json() or {} - except Exception: - return [] - if callable(log) and log_responses: - try: - dumped = json.dumps(payload, ensure_ascii=False) - except Exception: - dumped = str(payload) - log(f"TMDB RESPONSE_BODY /{kind}/{{id}}/credits body={dumped[:2000]}") - cast_payload = payload.get("cast") or [] if callable(log): log(f"TMDB CREDITS /{kind}/{{id}}/credits cast={len(cast_payload)}") - with_images: List[TmdbCastMember] = [] - without_images: List[TmdbCastMember] = [] - for entry in cast_payload: - name = (entry.get("name") or "").strip() - role = (entry.get("character") or "").strip() - thumb = _image_url(entry.get("profile_path") or "", size="w185") - if not name: - continue - member = TmdbCastMember(name=name, role=role, thumb=thumb) - if thumb: - with_images.append(member) - else: - without_images.append(member) - - # Viele Kodi-Skins zeigen bei fehlendem Thumbnail Platzhalter-Köpfe. - # Bevorzugt daher Cast-Einträge mit Bild; nur wenn gar keine Bilder existieren, - # geben wir Namen ohne Bild zurück. - if with_images: - return with_images[:30] - return without_images[:30] + return _parse_cast_payload(cast_payload) def _parse_cast_payload(cast_payload: object) -> List[TmdbCastMember]: @@ -141,8 +106,8 @@ def _parse_cast_payload(cast_payload: object) -> List[TmdbCastMember]: else: without_images.append(member) if with_images: - return with_images[:30] - return without_images[:30] + return with_images[:MAX_CAST_MEMBERS] + return without_images[:MAX_CAST_MEMBERS] def _tmdb_get_json( @@ -163,23 +128,29 @@ def _tmdb_get_json( if callable(log): log(f"TMDB GET {url}") sess = session or _get_tmdb_session() or requests.Session() + response = None try: response = sess.get(url, timeout=timeout) + status = getattr(response, "status_code", None) + payload: object | None = None + body_text = "" + try: + payload = response.json() + except Exception: + try: + body_text = (response.text or "").strip() + except Exception: + body_text = "" except Exception as exc: # pragma: no cover if callable(log): log(f"TMDB ERROR request_failed url={url} error={exc!r}") return None, None, "" - - status = getattr(response, "status_code", None) - payload: object | None = None - body_text = "" - try: - payload = response.json() - except Exception: - try: - body_text = (response.text or "").strip() - except Exception: - body_text = "" + finally: + if response is not None: + try: + response.close() + except Exception: + pass if callable(log): log(f"TMDB RESPONSE status={status} url={url}") @@ -214,49 +185,17 @@ def fetch_tv_episode_credits( return [] params = {"api_key": api_key, "language": (language or "de-DE").strip()} url = f"{TMDB_API_BASE}/tv/{tmdb_id}/season/{season_number}/episode/{episode_number}/credits?{urlencode(params)}" - if callable(log): - log(f"TMDB GET {url}") - try: - response = requests.get(url, timeout=timeout) - except Exception as exc: # pragma: no cover - if callable(log): - log(f"TMDB ERROR /tv/{{id}}/season/{{n}}/episode/{{e}}/credits request_failed error={exc!r}") - return [] - status = getattr(response, "status_code", None) + status, payload, body_text = _tmdb_get_json(url=url, timeout=timeout, log=log, log_responses=log_responses) if callable(log): log(f"TMDB RESPONSE /tv/{{id}}/season/{{n}}/episode/{{e}}/credits status={status}") - if status != 200: + if log_responses and payload is None and body_text: + log(f"TMDB RESPONSE_BODY /tv/{{id}}/season/{{n}}/episode/{{e}}/credits body={body_text[:2000]}") + if status != 200 or not isinstance(payload, dict): return [] - try: - payload = response.json() or {} - except Exception: - return [] - if callable(log) and log_responses: - try: - dumped = json.dumps(payload, ensure_ascii=False) - except Exception: - dumped = str(payload) - log(f"TMDB RESPONSE_BODY /tv/{{id}}/season/{{n}}/episode/{{e}}/credits body={dumped[:2000]}") - cast_payload = payload.get("cast") or [] if callable(log): log(f"TMDB CREDITS /tv/{{id}}/season/{{n}}/episode/{{e}}/credits cast={len(cast_payload)}") - with_images: List[TmdbCastMember] = [] - without_images: List[TmdbCastMember] = [] - for entry in cast_payload: - name = (entry.get("name") or "").strip() - role = (entry.get("character") or "").strip() - thumb = _image_url(entry.get("profile_path") or "", size="w185") - if not name: - continue - member = TmdbCastMember(name=name, role=role, thumb=thumb) - if thumb: - with_images.append(member) - else: - without_images.append(member) - if with_images: - return with_images[:30] - return without_images[:30] + return _parse_cast_payload(cast_payload) def lookup_tv_show( @@ -546,27 +485,13 @@ def lookup_tv_season_summary( params = {"api_key": api_key, "language": (language or "de-DE").strip()} url = f"{TMDB_API_BASE}/tv/{tmdb_id}/season/{season_number}?{urlencode(params)}" - if callable(log): - log(f"TMDB GET {url}") - try: - response = requests.get(url, timeout=timeout) - except Exception: - return None - status = getattr(response, "status_code", None) + status, payload, body_text = _tmdb_get_json(url=url, timeout=timeout, log=log, log_responses=log_responses) if callable(log): log(f"TMDB RESPONSE /tv/{{id}}/season/{{n}} status={status}") - if status != 200: + if log_responses and payload is None and body_text: + log(f"TMDB RESPONSE_BODY /tv/{{id}}/season/{{n}} body={body_text[:2000]}") + if status != 200 or not isinstance(payload, dict): return None - try: - payload = response.json() or {} - except Exception: - return None - if callable(log) and log_responses: - try: - dumped = json.dumps(payload, ensure_ascii=False) - except Exception: - dumped = str(payload) - log(f"TMDB RESPONSE_BODY /tv/{{id}}/season/{{n}} body={dumped[:2000]}") plot = (payload.get("overview") or "").strip() poster_path = (payload.get("poster_path") or "").strip() @@ -594,27 +519,9 @@ def lookup_tv_season( return None params = {"api_key": api_key, "language": (language or "de-DE").strip()} url = f"{TMDB_API_BASE}/tv/{tmdb_id}/season/{season_number}?{urlencode(params)}" - if callable(log): - log(f"TMDB GET {url}") - try: - response = requests.get(url, timeout=timeout) - except Exception as exc: # pragma: no cover - if callable(log): - log(f"TMDB ERROR /tv/{{id}}/season/{{n}} request_failed error={exc!r}") - return None - - status = getattr(response, "status_code", None) - payload = None - body_text = "" - try: - payload = response.json() or {} - except Exception: - try: - body_text = (response.text or "").strip() - except Exception: - body_text = "" - - episodes = (payload or {}).get("episodes") or [] + status, payload, body_text = _tmdb_get_json(url=url, timeout=timeout, log=log, log_responses=log_responses) + episodes = (payload or {}).get("episodes") if isinstance(payload, dict) else [] + episodes = episodes or [] if callable(log): log(f"TMDB RESPONSE /tv/{{id}}/season/{{n}} status={status} episodes={len(episodes)}") if log_responses: diff --git a/docs/DEFAULT_ROUTER.md b/docs/DEFAULT_ROUTER.md index 61a2aed..4503d58 100644 --- a/docs/DEFAULT_ROUTER.md +++ b/docs/DEFAULT_ROUTER.md @@ -1,54 +1,49 @@ -# ViewIT – Hauptlogik (`addon/default.py`) +# ViewIT Hauptlogik (`addon/default.py`) -Dieses Dokument beschreibt den Einstiegspunkt des Addons und die zentrale Steuerlogik. +Diese Datei ist der Router des Addons. +Sie verbindet Kodi UI, Plugin Calls und Playback. -## Aufgabe der Datei -`addon/default.py` ist der Router des Addons. Er: -- lädt die Plugin‑Module dynamisch, -- stellt die Kodi‑Navigation bereit, -- übersetzt UI‑Aktionen in Plugin‑Aufrufe, -- startet die Wiedergabe und verwaltet Playstate/Resume. +## Kernaufgabe +- Plugins laden +- Menues bauen +- Aktionen auf Plugin Methoden mappen +- Playback starten +- Playstate speichern -## Ablauf (high level) -1. **Plugin‑Discovery**: Lädt alle `addon/plugins/*.py` (ohne `_`‑Prefix) und instanziiert Klassen, die von `BasisPlugin` erben. -2. **Navigation**: Baut Kodi‑Listen (Serien/Staffeln/Episoden) auf Basis der Plugin‑Antworten. -3. **Playback**: Holt Stream‑Links aus dem Plugin und startet die Wiedergabe. -4. **Playstate**: Speichert Resume‑Daten lokal (`playstate.json`) und setzt `playcount`/Resume‑Infos. +## Ablauf +1. Plugin Discovery fuer `addon/plugins/*.py` ohne `_` Prefix. +2. Navigation fuer Titel, Staffeln und Episoden. +3. Playback: Link holen, optional aufloesen, abspielen. +4. Playstate: watched und resume in `playstate.json` schreiben. -## Routing & Aktionen -Die Datei arbeitet mit URL‑Parametern (Kodi‑Plugin‑Standard). Typische Aktionen: -- `search` → Suche über ein Plugin -- `seasons` → Staffeln für einen Titel -- `episodes` → Episoden für eine Staffel -- `play` → Stream‑Link auflösen und abspielen +## Routing +Der Router liest Query Parameter aus `sys.argv[2]`. +Typische Aktionen: +- `search` +- `seasons` +- `episodes` +- `play_episode` +- `play_movie` +- `play_episode_url` -Die genaue Aktion wird aus den Query‑Parametern gelesen und an das entsprechende Plugin delegiert. +## Playstate +- Speicherort: Addon Profilordner, Datei `playstate.json` +- Key: Plugin + Titel + Staffel + Episode +- Werte: watched, playcount, resume_position, resume_total -## Playstate (Resume/Watched) -- **Speicherort**: `playstate.json` im Addon‑Profilordner. -- **Key**: Kombination aus Plugin‑Name, Titel, Staffel, Episode. -- **Verwendung**: - - `playcount` wird gesetzt, wenn „gesehen“ markiert ist. - - `resume_position`/`resume_total` werden gesetzt, wenn vorhanden. +## Wichtige Helper +- Plugin Loader und Discovery +- UI Builder fuer ListItems +- Playstate Load/Save/Merge +- TMDB Merge mit Source Fallback -## Wichtige Hilfsfunktionen -- **Plugin‑Loader**: findet & instanziiert Plugins. -- **UI‑Helper**: setzt Content‑Type, baut Verzeichnisseinträge. -- **Playstate‑Helper**: `_load_playstate`, `_save_playstate`, `_apply_playstate_to_info`. +## Fehlerverhalten +- Importfehler pro Plugin werden isoliert behandelt. +- Fehler in einem Plugin sollen das Addon nicht stoppen. +- User bekommt kurze Fehlermeldungen in Kodi. -## Fehlerbehandlung -- Plugin‑Importfehler werden isoliert behandelt, damit das Addon nicht komplett ausfällt. -- Netzwerk‑Fehler werden in Plugins abgefangen, `default.py` sollte nur saubere Fehlermeldungen weitergeben. - -## Debugging -- Globale Debug‑Settings werden über `addon/resources/settings.xml` gesteuert. -- Plugins loggen URLs/HTML optional (siehe jeweilige Plugin‑Doku). - -## Änderungen & Erweiterungen -Für neue Aktionen: -1. Neue Aktion im Router registrieren. -2. UI‑Einträge passend anlegen. -3. Entsprechende Plugin‑Methode definieren oder erweitern. - -## Hinweis zur Erstellung -Teile dieser Dokumentation wurden KI‑gestützt erstellt und bei Bedarf manuell angepasst. +## Erweiterung +Fuer neue Aktion im Router: +1. Action im `run()` Handler registrieren. +2. ListItem mit passenden Parametern bauen. +3. Zielmethode im Plugin bereitstellen. diff --git a/docs/PLUGIN_DEVELOPMENT.md b/docs/PLUGIN_DEVELOPMENT.md index 84e8c96..b167134 100644 --- a/docs/PLUGIN_DEVELOPMENT.md +++ b/docs/PLUGIN_DEVELOPMENT.md @@ -1,109 +1,85 @@ -# ViewIT – Entwicklerdoku Plugins (`addon/plugins/*_plugin.py`) +# ViewIT Plugin Entwicklung (`addon/plugins/*_plugin.py`) -Diese Doku beschreibt, wie Plugins im ViewIT‑Addon aufgebaut sind und wie neue Provider‑Integrationen entwickelt werden. +Diese Datei zeigt, wie Plugins im Projekt aufgebaut sind und wie sie mit dem Router zusammenarbeiten. ## Grundlagen -- Jedes Plugin ist eine einzelne Datei unter `addon/plugins/`. -- Dateinamen **ohne** `_`‑Prefix werden automatisch geladen. -- Jede Datei enthält eine Klasse, die von `BasisPlugin` erbt. +- Ein Plugin ist eine Python Datei in `addon/plugins/`. +- Dateien mit `_` Prefix werden nicht geladen. +- Plugin Klasse erbt von `BasisPlugin`. +- Optional: `Plugin = ` als klarer Einstiegspunkt. -## Pflicht‑Methoden (BasisPlugin) -Jedes Plugin muss diese Methoden implementieren: +## Pflichtmethoden +Jedes Plugin implementiert: - `async search_titles(query: str) -> list[str]` - `seasons_for(title: str) -> list[str]` - `episodes_for(title: str, season: str) -> list[str]` -## Vertrag Plugin ↔ Hauptlogik (`default.py`) -Die Hauptlogik ruft Plugin-Methoden auf und verarbeitet ausschließlich deren Rückgaben. +## Wichtige optionale Methoden +- `stream_link_for(...)` +- `resolve_stream_link(...)` +- `metadata_for(...)` +- `available_hosters_for(...)` +- `series_url_for_title(...)` +- `remember_series_url(...)` +- `episode_url_for(...)` +- `available_hosters_for_url(...)` +- `stream_link_for_url(...)` -Wesentliche Rückgaben an die Hauptlogik: -- `search_titles(...)` → Liste von Titel-Strings für die Trefferliste -- `seasons_for(...)` → Liste von Staffel-Labels -- `episodes_for(...)` → Liste von Episoden-Labels -- `stream_link_for(...)` → Hoster-/Player-Link (nicht zwingend finale Media-URL) -- `resolve_stream_link(...)` → finale/spielbare URL nach Redirect/Resolver -- Optional `available_hosters_for(...)` → auswählbare Hoster-Namen im Dialog -- Optional `series_url_for_title(...)` → stabile Detail-URL pro Titel für Folgeaufrufe -- Optional `remember_series_url(...)` → Übernahme einer bereits bekannten Detail-URL +## Film Provider Standard +Wenn keine echten Staffeln existieren: +- `seasons_for(title)` gibt `['Film']` +- `episodes_for(title, 'Film')` gibt `['Stream']` -Standard für Film-Provider (ohne echte Staffeln): -- `seasons_for(title)` gibt `["Film"]` zurück -- `episodes_for(title, "Film")` gibt `["Stream"]` zurück +## Capabilities +Ein Plugin kann Features melden ueber `capabilities()`. +Bekannte Werte: +- `popular_series` +- `genres` +- `latest_episodes` +- `new_titles` +- `alpha` +- `series_catalog` -## Optionale Features (Capabilities) -Über `capabilities()` kann das Plugin zusätzliche Funktionen anbieten: -- `popular_series` → `popular_series()` -- `genres` → `genres()` + `titles_for_genre(genre)` -- `latest_episodes` → `latest_episodes(page=1)` +## Suche +Aktuelle Regeln fuer Suchtreffer: +- Match auf Titel +- Wortbasiert +- Keine Teilwort Treffer im selben Wort +- Beschreibungen nicht fuer Match nutzen -## Empfohlene Struktur -- Konstanten für URLs/Endpoints (BASE_URL, Pfade, Templates) -- `requests` + `bs4` optional (fehlt beides, Plugin sollte sauber deaktivieren) -- Helper‑Funktionen für Parsing und Normalisierung -- Caches für Such‑, Staffel‑ und Episoden‑Daten +## Settings +Pro Plugin meist `*_base_url`. +Beispiele: +- `serienstream_base_url` +- `aniworld_base_url` +- `einschalten_base_url` +- `topstream_base_url` +- `filmpalast_base_url` +- `doku_streams_base_url` -## Suche (aktuelle Policy) -- **Nur Titel‑Matches** -- **Wortbasierter Match** nach Normalisierung (Lowercase + Nicht‑Alnum → Leerzeichen) -- Keine Teilwort-Treffer innerhalb eines Wortes (Beispiel: `hund` matcht nicht `thunder`) -- Keine Beschreibung/Plot/Meta für Matches +## Playback Flow +1. Episode oder Film auswaehlen. +2. Optional Hosterliste anzeigen. +3. `stream_link_for` oder `stream_link_for_url` aufrufen. +4. `resolve_stream_link` aufrufen. +5. Finale URL an Kodi geben. -## Namensgebung -- Plugin‑Klassenname: `XxxPlugin` -- Anzeigename (Property `name`): **mit Großbuchstaben beginnen** (z. B. `Serienstream`, `Einschalten`) - -## Settings pro Plugin -Standard: `*_base_url` (Domain / BASE_URL) -- Beispiele: - - `serienstream_base_url` - - `aniworld_base_url` - - `einschalten_base_url` - - `topstream_base_url` - - `filmpalast_base_url` - -## Playback -- `stream_link_for(...)` implementieren (liefert bevorzugten Hoster-Link). -- `available_hosters_for(...)` bereitstellen, wenn die Seite mehrere Hoster anbietet. -- `resolve_stream_link(...)` nach einheitlichem Flow umsetzen: - 1. Redirects auflösen (falls vorhanden) - 2. ResolveURL (`resolveurl_backend.resolve`) versuchen - 3. Bei Fehlschlag auf den besten verfügbaren Link zurückfallen -- Optional `set_preferred_hosters(...)` unterstützen, damit die Hoster-Auswahl aus der Hauptlogik direkt greift. - -## Standard‑Flow (empfohlen) -1. **Suche**: nur Titel liefern und Titel→Detail-URL mappen. -2. **Navigation**: `series_url_for_title`/`remember_series_url` unterstützen, damit URLs zwischen Aufrufen stabil bleiben. -3. **Auswahl Hoster**: Hoster-Namen aus der Detailseite extrahieren und anbieten. -4. **Playback**: Hoster-Link liefern, danach konsistent über `resolve_stream_link` finalisieren. -5. **Fallbacks**: bei Layout-Unterschieden defensiv parsen und Logging aktivierbar halten. - -## Debugging -Global gesteuert über Settings: -- `debug_log_urls` -- `debug_dump_html` -- `debug_show_url_info` - -Plugins sollten die Helper aus `addon/plugin_helpers.py` nutzen: +## Logging +Nutze Helper aus `addon/plugin_helpers.py`: - `log_url(...)` - `dump_response_html(...)` - `notify_url(...)` -## Template -`addon/plugins/_template_plugin.py` dient als Startpunkt für neue Provider. +## Build und Checks +- ZIP: `./scripts/build_kodi_zip.sh` +- Addon Ordner: `./scripts/build_install_addon.sh` +- Manifest: `python3 scripts/generate_plugin_manifest.py` +- Snapshot Checks: `python3 qa/run_plugin_snapshots.py` -## Build & Test -- ZIP bauen: `./scripts/build_kodi_zip.sh` -- Addon‑Ordner: `./scripts/build_install_addon.sh` - -## Beispiel‑Checkliste -- [ ] `name` korrekt gesetzt -- [ ] `*_base_url` in Settings vorhanden -- [ ] Suche matcht nur Titel und wortbasiert -- [ ] `stream_link_for` + `resolve_stream_link` folgen dem Standard-Flow -- [ ] Optional: `available_hosters_for` + `set_preferred_hosters` vorhanden -- [ ] Optional: `series_url_for_title` + `remember_series_url` vorhanden -- [ ] Fehlerbehandlung und Timeouts vorhanden -- [ ] Optional: Caches für Performance - -## Hinweis zur Erstellung -Teile dieser Dokumentation wurden KI‑gestützt erstellt und bei Bedarf manuell angepasst. +## Kurze Checkliste +- `name` gesetzt und korrekt +- `*_base_url` in Settings vorhanden +- Suche liefert nur passende Titel +- Playback Methoden vorhanden +- Fehler und Timeouts behandelt +- Cache nur da, wo er Zeit spart diff --git a/docs/PLUGIN_MANIFEST.json b/docs/PLUGIN_MANIFEST.json new file mode 100644 index 0000000..07f73d4 --- /dev/null +++ b/docs/PLUGIN_MANIFEST.json @@ -0,0 +1,104 @@ +{ + "schema_version": 1, + "plugins": [ + { + "file": "addon/plugins/aniworld_plugin.py", + "module": "aniworld_plugin", + "name": "Aniworld", + "class": "AniworldPlugin", + "version": "1.0.0", + "capabilities": [ + "genres", + "latest_episodes", + "popular_series" + ], + "prefer_source_metadata": false, + "base_url_setting": "aniworld_base_url", + "available": true, + "unavailable_reason": null, + "error": null + }, + { + "file": "addon/plugins/dokustreams_plugin.py", + "module": "dokustreams_plugin", + "name": "Doku-Streams", + "class": "DokuStreamsPlugin", + "version": "1.0.0", + "capabilities": [ + "genres", + "popular_series" + ], + "prefer_source_metadata": true, + "base_url_setting": "doku_streams_base_url", + "available": true, + "unavailable_reason": null, + "error": null + }, + { + "file": "addon/plugins/einschalten_plugin.py", + "module": "einschalten_plugin", + "name": "Einschalten", + "class": "EinschaltenPlugin", + "version": "1.0.0", + "capabilities": [ + "genres", + "new_titles" + ], + "prefer_source_metadata": false, + "base_url_setting": "einschalten_base_url", + "available": true, + "unavailable_reason": null, + "error": null + }, + { + "file": "addon/plugins/filmpalast_plugin.py", + "module": "filmpalast_plugin", + "name": "Filmpalast", + "class": "FilmpalastPlugin", + "version": "1.0.0", + "capabilities": [ + "alpha", + "genres", + "series_catalog" + ], + "prefer_source_metadata": false, + "base_url_setting": "filmpalast_base_url", + "available": true, + "unavailable_reason": null, + "error": null + }, + { + "file": "addon/plugins/serienstream_plugin.py", + "module": "serienstream_plugin", + "name": "Serienstream", + "class": "SerienstreamPlugin", + "version": "1.0.0", + "capabilities": [ + "genres", + "latest_episodes", + "popular_series" + ], + "prefer_source_metadata": false, + "base_url_setting": "serienstream_base_url", + "available": true, + "unavailable_reason": null, + "error": null + }, + { + "file": "addon/plugins/topstreamfilm_plugin.py", + "module": "topstreamfilm_plugin", + "name": "Topstreamfilm", + "class": "TopstreamfilmPlugin", + "version": "1.0.0", + "capabilities": [ + "genres", + "popular_series" + ], + "prefer_source_metadata": false, + "base_url_setting": "topstream_base_url", + "available": true, + "unavailable_reason": null, + "error": null + } + ] +} diff --git a/docs/PLUGIN_SYSTEM.md b/docs/PLUGIN_SYSTEM.md index d4ac5b7..37394ac 100644 --- a/docs/PLUGIN_SYSTEM.md +++ b/docs/PLUGIN_SYSTEM.md @@ -1,96 +1,71 @@ -## ViewIt Plugin-System +# ViewIT Plugin System -Dieses Dokument beschreibt, wie das Plugin-System von **ViewIt** funktioniert und wie die Community neue Integrationen hinzufügen kann. +Dieses Dokument beschreibt Laden, Vertrag und Betrieb der Plugins. -### Überblick +## Ueberblick +Der Router laedt Provider Integrationen aus `addon/plugins/*.py`. +Aktive Plugins werden instanziiert und im UI genutzt. -ViewIt lädt Provider-Integrationen dynamisch aus `addon/plugins/*.py`. Jede Datei enthält eine Klasse, die von `BasisPlugin` erbt. Beim Start werden alle Plugins instanziiert und nur aktiv genutzt, wenn sie verfügbar sind. +Relevante Dateien: +- `addon/default.py` +- `addon/plugin_interface.py` +- `docs/DEFAULT_ROUTER.md` +- `docs/PLUGIN_DEVELOPMENT.md` -Weitere Details: -- `docs/DEFAULT_ROUTER.md` (Hauptlogik in `addon/default.py`) -- `docs/PLUGIN_DEVELOPMENT.md` (Entwicklerdoku für Plugins) +## Aktuelle Plugins +- `serienstream_plugin.py` +- `topstreamfilm_plugin.py` +- `einschalten_plugin.py` +- `aniworld_plugin.py` +- `filmpalast_plugin.py` +- `dokustreams_plugin.py` +- `_template_plugin.py` (Vorlage) -### Aktuelle Plugins +## Discovery Ablauf +In `addon/default.py`: +1. Finde `*.py` in `addon/plugins/` +2. Ueberspringe Dateien mit `_` Prefix +3. Importiere Modul +4. Nutze `Plugin = `, falls vorhanden +5. Sonst instanziiere `BasisPlugin` Subklassen deterministisch +6. Ueberspringe Plugins mit `is_available = False` -- `serienstream_plugin.py` – Serienstream (s.to) -- `topstreamfilm_plugin.py` – Topstreamfilm -- `einschalten_plugin.py` – Einschalten -- `aniworld_plugin.py` – Aniworld -- `filmpalast_plugin.py` – Filmpalast -- `_template_plugin.py` – Vorlage für neue Plugins +## Basis Interface +`BasisPlugin` definiert den Kern: +- `search_titles` +- `seasons_for` +- `episodes_for` -### Plugin-Discovery (Ladeprozess) +Weitere Methoden sind optional und werden nur genutzt, wenn vorhanden. -Der Loader in `addon/default.py`: +## Capabilities +Plugins koennen Features aktiv melden. +Typische Werte: +- `popular_series` +- `genres` +- `latest_episodes` +- `new_titles` +- `alpha` +- `series_catalog` -1. Sucht alle `*.py` in `addon/plugins/` -2. Überspringt Dateien, die mit `_` beginnen -3. Lädt Module dynamisch -4. Instanziert Klassen, die von `BasisPlugin` erben -5. Ignoriert Plugins mit `is_available = False` +Das UI zeigt nur Menues fuer aktiv gemeldete Features. -Damit bleiben fehlerhafte Plugins isoliert und blockieren nicht das gesamte Add-on. +## Metadaten Quelle +`prefer_source_metadata = True` bedeutet: +- Quelle zuerst +- TMDB nur Fallback -### BasisPlugin – verpflichtende Methoden +## Stabilitaet +- Keine Netz Calls im Import Block. +- Fehler im Plugin muessen lokal behandelt werden. +- Ein defektes Plugin darf andere Plugins nicht blockieren. -Definiert in `addon/plugin_interface.py`: +## Build +Kodi ZIP bauen: -- `async search_titles(query: str) -> list[str]` -- `seasons_for(title: str) -> list[str]` -- `episodes_for(title: str, season: str) -> list[str]` - -### Optionale Features (Capabilities) - -Plugins können zusätzliche Features anbieten: - -- `capabilities() -> set[str]` - - `popular_series`: liefert beliebte Serien - - `genres`: Genre-Liste verfügbar - - `latest_episodes`: neue Episoden verfügbar -- `popular_series() -> list[str]` -- `genres() -> list[str]` -- `titles_for_genre(genre: str) -> list[str]` -- `latest_episodes(page: int = 1) -> list[LatestEpisode]` (wenn angeboten) - -ViewIt zeigt im UI nur die Features an, die ein Plugin tatsächlich liefert. - -### Plugin-Struktur (empfohlen) - -Eine Integration sollte typischerweise bieten: - -- Konstante `BASE_URL` -- `search_titles()` mit Provider-Suche -- `seasons_for()` und `episodes_for()` mit HTML-Parsing -- `stream_link_for()` optional für direkte Playback-Links -- Optional: `available_hosters_for()` oder Provider-spezifische Helfer - -Als Startpunkt dient `addon/plugins/_template_plugin.py`. - -### Community-Erweiterungen (Workflow) - -1. Fork/Branch erstellen -2. Neue Datei unter `addon/plugins/` hinzufügen (z. B. `meinprovider_plugin.py`) -3. Klasse erstellen, die `BasisPlugin` implementiert -4. In Kodi testen (ZIP bauen, installieren) -5. PR öffnen - -### Qualitätsrichtlinien - -- Keine Netzwerkzugriffe im Import-Top-Level -- Netzwerkzugriffe nur in Methoden (z. B. `search_titles`) -- Fehler sauber abfangen und verständliche Fehlermeldungen liefern -- Kein globaler Zustand, der across instances überrascht -- Provider-spezifische Parser in Helper-Funktionen kapseln - -### Debugging & Logs - -Hilfreiche Logs werden nach `userdata/addon_data/plugin.video.viewit/logs/` geschrieben. -Provider sollten URL-Logging optional halten (Settings). - -### ZIP-Build - -``` +```bash ./scripts/build_kodi_zip.sh ``` -Das ZIP liegt anschließend unter `dist/plugin.video.viewit-.zip`. +Ergebnis: +`dist/plugin.video.viewit-.zip` diff --git a/docs/RELEASE.md b/docs/RELEASE.md new file mode 100644 index 0000000..fbb8de5 --- /dev/null +++ b/docs/RELEASE.md @@ -0,0 +1,44 @@ +# Release Flow (Main + Nightly) + +This project uses two release channels: + +- `nightly`: integration and test channel +- `main`: stable channel + +## Rules + +- Feature work goes to `nightly` only. +- Promote from `nightly` to `main` with `--squash` only. +- `main` version has no suffix (`0.1.60`). +- `nightly` version uses `-nightly` and is always at least one patch higher than `main` (`0.1.61-nightly`). +- Keep changelogs split: + - `CHANGELOG-NIGHTLY.md` + - `CHANGELOG.md` + +## Nightly publish + +1) Finish changes on `nightly`. +2) Bump addon version in `addon/addon.xml` to `X.Y.Z-nightly`. +3) Build and publish nightly repo artifacts. +4) Push `nightly`. + +## Promote nightly to main + +```bash +git checkout main +git pull origin main +git merge --squash nightly +git commit -m "release: X.Y.Z" +``` + +Then: + +1) Set `addon/addon.xml` version to `X.Y.Z` (without `-nightly`). +2) Build and publish main repo artifacts. +3) Push `main`. +4) Optional tag: `vX.Y.Z`. + +## Local ZIPs (separated) + +- Main ZIP output: `dist/local_zips/main/` +- Nightly ZIP output: `dist/local_zips/nightly/` diff --git a/qa/plugin_snapshots.json b/qa/plugin_snapshots.json new file mode 100644 index 0000000..71d5c4d --- /dev/null +++ b/qa/plugin_snapshots.json @@ -0,0 +1,73 @@ +{ + "snapshots": { + "Serienstream::search_titles::trek": [ + "Star Trek: Lower Decks", + "Star Trek: Prodigy", + "Star Trek: The Animated Series", + "Inside Star Trek", + "Raumschiff Enterprise - Star Trek: The Original Series", + "Star Trek: Deep Space Nine", + "Star Trek: Discovery", + "Star Trek: Enterprise", + "Star Trek: Picard", + "Star Trek: Raumschiff Voyager", + "Star Trek: Short Treks", + "Star Trek: Starfleet Academy", + "Star Trek: Strange New Worlds", + "Star Trek: The Next Generation" + ], + "Aniworld::search_titles::naruto": [ + "Naruto", + "Naruto Shippuden", + "Boruto: Naruto Next Generations", + "Naruto Spin-Off: Rock Lee & His Ninja Pals" + ], + "Topstreamfilm::search_titles::matrix": [ + "Darkdrive – Verschollen in der Matrix", + "Matrix Reloaded", + "Armitage III: Poly Matrix", + "Matrix Resurrections", + "Matrix", + "Matrix Revolutions", + "Matrix Fighters" + ], + "Einschalten::new_titles_page::1": [ + "Miracle: Das Eishockeywunder von 1980", + "No Escape - Grizzly Night", + "Kidnapped: Der Fall Elizabeth Smart", + "The Internship", + "The Rip", + "Die Toten vom Bodensee – Schicksalsrad", + "People We Meet on Vacation", + "Anaconda", + "Even If This Love Disappears Tonight", + "Die Stunde der Mutigen", + "10DANCE", + "SpongeBob Schwammkopf: Piraten Ahoi!", + "Ella McCay", + "Merv", + "Elmo and Mark Rober's Merry Giftmas", + "Als mein Vater Weihnachten rettete 2", + "Die Fraggles: Der erste Schnee", + "Gregs Tagebuch 3: Jetzt reicht's!", + "Not Without Hope", + "Five Nights at Freddy's 2" + ], + "Filmpalast::search_titles::trek": [ + "Star Trek", + "Star Trek - Der Film", + "Star Trek 2 - Der Zorn des Khan", + "Star Trek 9 Der Aufstand", + "Star Trek: Nemesis", + "Star Trek: Section 31", + "Star Trek: Starfleet Academy", + "Star Trek: Strange New Worlds" + ], + "Doku-Streams::search_titles::japan": [ + "Deutsche im Knast - Japan und die Disziplin", + "Die Meerfrauen von Japan", + "Japan - Land der Moderne und Tradition", + "Japan im Zweiten Weltkrieg - Der Fall des Kaiserreichs" + ] + } +} diff --git a/qa/run_plugin_snapshots.py b/qa/run_plugin_snapshots.py new file mode 100755 index 0000000..2fdd89d --- /dev/null +++ b/qa/run_plugin_snapshots.py @@ -0,0 +1,153 @@ +#!/usr/bin/env python3 +"""Run live snapshot checks for plugins. + +Use --update to refresh stored snapshots. +""" +from __future__ import annotations + +import argparse +import asyncio +import importlib.util +import inspect +import json +import sys +from pathlib import Path +from typing import Any + +ROOT_DIR = Path(__file__).resolve().parents[1] +PLUGIN_DIR = ROOT_DIR / "addon" / "plugins" +SNAPSHOT_PATH = ROOT_DIR / "qa" / "plugin_snapshots.json" + +sys.path.insert(0, str(ROOT_DIR / "addon")) + +try: + from plugin_interface import BasisPlugin # type: ignore +except Exception as exc: # pragma: no cover + raise SystemExit(f"Failed to import BasisPlugin: {exc}") + +CONFIG = [ + {"plugin": "Serienstream", "method": "search_titles", "args": ["trek"], "max_items": 20}, + {"plugin": "Aniworld", "method": "search_titles", "args": ["naruto"], "max_items": 20}, + {"plugin": "Topstreamfilm", "method": "search_titles", "args": ["matrix"], "max_items": 20}, + {"plugin": "Einschalten", "method": "new_titles_page", "args": [1], "max_items": 20}, + {"plugin": "Filmpalast", "method": "search_titles", "args": ["trek"], "max_items": 20}, + {"plugin": "Doku-Streams", "method": "search_titles", "args": ["japan"], "max_items": 20}, +] + + +def _import_module(path: Path): + spec = importlib.util.spec_from_file_location(path.stem, path) + if spec is None or spec.loader is None: + raise ImportError(f"Missing spec for {path}") + module = importlib.util.module_from_spec(spec) + sys.modules[spec.name] = module + spec.loader.exec_module(module) + return module + + +def _discover_plugins() -> dict[str, BasisPlugin]: + plugins: dict[str, BasisPlugin] = {} + for file_path in sorted(PLUGIN_DIR.glob("*.py")): + if file_path.name.startswith("_"): + continue + module = _import_module(file_path) + preferred = getattr(module, "Plugin", None) + if inspect.isclass(preferred) and issubclass(preferred, BasisPlugin) and preferred is not BasisPlugin: + classes = [preferred] + else: + classes = [ + obj + for obj in module.__dict__.values() + if inspect.isclass(obj) and issubclass(obj, BasisPlugin) and obj is not BasisPlugin + ] + classes.sort(key=lambda cls: cls.__name__.casefold()) + for cls in classes: + instance = cls() + name = str(getattr(instance, "name", "") or "").strip() + if name and name not in plugins: + plugins[name] = instance + return plugins + + +def _normalize_titles(value: Any, max_items: int) -> list[str]: + if not value: + return [] + titles = [str(item).strip() for item in list(value) if item and str(item).strip()] + seen = set() + normalized: list[str] = [] + for title in titles: + key = title.casefold() + if key in seen: + continue + seen.add(key) + normalized.append(title) + if len(normalized) >= max_items: + break + return normalized + + +def _snapshot_key(entry: dict[str, Any]) -> str: + args = entry.get("args", []) + return f"{entry['plugin']}::{entry['method']}::{','.join(str(a) for a in args)}" + + +def _call_method(plugin: BasisPlugin, method_name: str, args: list[Any]): + method = getattr(plugin, method_name, None) + if not callable(method): + raise RuntimeError(f"Method missing: {method_name}") + result = method(*args) + if asyncio.iscoroutine(result): + return asyncio.run(result) + return result + + +def main() -> int: + parser = argparse.ArgumentParser() + parser.add_argument("--update", action="store_true") + args = parser.parse_args() + + snapshots: dict[str, Any] = {} + if SNAPSHOT_PATH.exists(): + snapshots = json.loads(SNAPSHOT_PATH.read_text(encoding="utf-8")) + data = snapshots.get("snapshots", {}) if isinstance(snapshots, dict) else {} + if args.update: + data = {} + + plugins = _discover_plugins() + errors = [] + + for entry in CONFIG: + plugin_name = entry["plugin"] + plugin = plugins.get(plugin_name) + if plugin is None: + errors.append(f"Plugin missing: {plugin_name}") + continue + key = _snapshot_key(entry) + try: + result = _call_method(plugin, entry["method"], entry.get("args", [])) + normalized = _normalize_titles(result, entry.get("max_items", 20)) + except Exception as exc: + errors.append(f"Snapshot error: {key} ({exc})") + if args.update: + data[key] = {"error": str(exc)} + continue + if args.update: + data[key] = normalized + else: + expected = data.get(key) + if expected != normalized: + errors.append(f"Snapshot mismatch: {key}\nExpected: {expected}\nActual: {normalized}") + + if args.update: + SNAPSHOT_PATH.parent.mkdir(parents=True, exist_ok=True) + SNAPSHOT_PATH.write_text(json.dumps({"snapshots": data}, indent=2, ensure_ascii=False) + "\n", encoding="utf-8") + + if errors: + for err in errors: + print(err) + return 1 + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/scripts/__pycache__/test_einschalten_api.cpython-312.pyc b/scripts/__pycache__/test_einschalten_api.cpython-312.pyc deleted file mode 100644 index 46a3480..0000000 Binary files a/scripts/__pycache__/test_einschalten_api.cpython-312.pyc and /dev/null differ diff --git a/scripts/__pycache__/test_tmdb.cpython-312.pyc b/scripts/__pycache__/test_tmdb.cpython-312.pyc deleted file mode 100644 index 37da5a3..0000000 Binary files a/scripts/__pycache__/test_tmdb.cpython-312.pyc and /dev/null differ diff --git a/scripts/build_kodi_zip.sh b/scripts/build_kodi_zip.sh index 4ae5971..96277de 100755 --- a/scripts/build_kodi_zip.sh +++ b/scripts/build_kodi_zip.sh @@ -37,6 +37,6 @@ ZIP_PATH="${INSTALL_DIR}/${ZIP_NAME}" ADDON_DIR="$("${ROOT_DIR}/scripts/build_install_addon.sh" >/dev/null; echo "${INSTALL_DIR}/${ADDON_ID}")" rm -f "${ZIP_PATH}" -(cd "${INSTALL_DIR}" && zip -r "${ZIP_NAME}" "$(basename "${ADDON_DIR}")" >/dev/null) +python3 "${ROOT_DIR}/scripts/zip_deterministic.py" "${ZIP_PATH}" "${ADDON_DIR}" >/dev/null echo "${ZIP_PATH}" diff --git a/scripts/build_local_kodi_repo.sh b/scripts/build_local_kodi_repo.sh index ddedb92..861970d 100755 --- a/scripts/build_local_kodi_repo.sh +++ b/scripts/build_local_kodi_repo.sh @@ -21,8 +21,20 @@ fi mkdir -p "${REPO_DIR}" +read -r ADDON_ID ADDON_VERSION < <(python3 - "${PLUGIN_ADDON_XML}" <<'PY' +import sys +import xml.etree.ElementTree as ET + +root = ET.parse(sys.argv[1]).getroot() +print(root.attrib.get("id", "plugin.video.viewit"), root.attrib.get("version", "0.0.0")) +PY +) + PLUGIN_ZIP="$("${ROOT_DIR}/scripts/build_kodi_zip.sh")" -cp -f "${PLUGIN_ZIP}" "${REPO_DIR}/" +PLUGIN_ZIP_NAME="$(basename "${PLUGIN_ZIP}")" +PLUGIN_ADDON_DIR_IN_REPO="${REPO_DIR}/${ADDON_ID}" +mkdir -p "${PLUGIN_ADDON_DIR_IN_REPO}" +cp -f "${PLUGIN_ZIP}" "${PLUGIN_ADDON_DIR_IN_REPO}/${PLUGIN_ZIP_NAME}" read -r REPO_ADDON_ID REPO_ADDON_VERSION < <(python3 - "${REPO_ADDON_XML}" <<'PY' import sys @@ -73,7 +85,10 @@ PY REPO_ZIP_NAME="${REPO_ADDON_ID}-${REPO_ADDON_VERSION}.zip" REPO_ZIP_PATH="${REPO_DIR}/${REPO_ZIP_NAME}" rm -f "${REPO_ZIP_PATH}" -(cd "${TMP_DIR}" && zip -r "${REPO_ZIP_PATH}" "${REPO_ADDON_ID}" >/dev/null) +python3 "${ROOT_DIR}/scripts/zip_deterministic.py" "${REPO_ZIP_PATH}" "${TMP_REPO_ADDON_DIR}" >/dev/null +REPO_ADDON_DIR_IN_REPO="${REPO_DIR}/${REPO_ADDON_ID}" +mkdir -p "${REPO_ADDON_DIR_IN_REPO}" +cp -f "${REPO_ZIP_PATH}" "${REPO_ADDON_DIR_IN_REPO}/${REPO_ZIP_NAME}" python3 - "${PLUGIN_ADDON_XML}" "${TMP_REPO_ADDON_DIR}/addon.xml" "${REPO_DIR}/addons.xml" <<'PY' import sys @@ -107,4 +122,5 @@ echo "Repo built:" echo " ${REPO_DIR}/addons.xml" echo " ${REPO_DIR}/addons.xml.md5" echo " ${REPO_ZIP_PATH}" -echo " ${REPO_DIR}/$(basename "${PLUGIN_ZIP}")" +echo " ${PLUGIN_ADDON_DIR_IN_REPO}/${PLUGIN_ZIP_NAME}" +echo " ${REPO_ADDON_DIR_IN_REPO}/${REPO_ZIP_NAME}" diff --git a/scripts/generate_plugin_manifest.py b/scripts/generate_plugin_manifest.py new file mode 100755 index 0000000..fe8d3b9 --- /dev/null +++ b/scripts/generate_plugin_manifest.py @@ -0,0 +1,106 @@ +#!/usr/bin/env python3 +"""Generate a JSON manifest for addon plugins.""" +from __future__ import annotations + +import importlib.util +import inspect +import json +import sys +from pathlib import Path + +ROOT_DIR = Path(__file__).resolve().parents[1] +PLUGIN_DIR = ROOT_DIR / "addon" / "plugins" +OUTPUT_PATH = ROOT_DIR / "docs" / "PLUGIN_MANIFEST.json" + +sys.path.insert(0, str(ROOT_DIR / "addon")) + +try: + from plugin_interface import BasisPlugin # type: ignore +except Exception as exc: # pragma: no cover + raise SystemExit(f"Failed to import BasisPlugin: {exc}") + + +def _import_module(path: Path): + spec = importlib.util.spec_from_file_location(path.stem, path) + if spec is None or spec.loader is None: + raise ImportError(f"Missing spec for {path}") + module = importlib.util.module_from_spec(spec) + sys.modules[spec.name] = module + spec.loader.exec_module(module) + return module + + +def _collect_plugins(): + plugins = [] + for file_path in sorted(PLUGIN_DIR.glob("*.py")): + if file_path.name.startswith("_"): + continue + entry = { + "file": str(file_path.relative_to(ROOT_DIR)), + "module": file_path.stem, + "name": None, + "class": None, + "version": None, + "capabilities": [], + "prefer_source_metadata": False, + "base_url_setting": None, + "available": None, + "unavailable_reason": None, + "error": None, + } + try: + module = _import_module(file_path) + preferred = getattr(module, "Plugin", None) + if inspect.isclass(preferred) and issubclass(preferred, BasisPlugin) and preferred is not BasisPlugin: + classes = [preferred] + else: + classes = [ + obj + for obj in module.__dict__.values() + if inspect.isclass(obj) and issubclass(obj, BasisPlugin) and obj is not BasisPlugin + ] + classes.sort(key=lambda cls: cls.__name__.casefold()) + + if not classes: + entry["error"] = "No plugin classes found" + plugins.append(entry) + continue + + cls = classes[0] + instance = cls() + entry["class"] = cls.__name__ + entry["name"] = str(getattr(instance, "name", "") or "") or None + entry["version"] = str(getattr(instance, "version", "0.0.0") or "0.0.0") + entry["prefer_source_metadata"] = bool(getattr(instance, "prefer_source_metadata", False)) + entry["available"] = bool(getattr(instance, "is_available", True)) + entry["unavailable_reason"] = getattr(instance, "unavailable_reason", None) + try: + caps = instance.capabilities() # type: ignore[call-arg] + entry["capabilities"] = sorted([str(c) for c in caps]) if caps else [] + except Exception: + entry["capabilities"] = [] + + entry["base_url_setting"] = getattr(module, "SETTING_BASE_URL", None) + except Exception as exc: # pragma: no cover + entry["error"] = str(exc) + plugins.append(entry) + + plugins.sort(key=lambda item: (item.get("name") or item["module"]).casefold()) + return plugins + + +def main() -> int: + if not PLUGIN_DIR.exists(): + raise SystemExit("Plugin directory missing") + manifest = { + "schema_version": 1, + "plugins": _collect_plugins(), + } + OUTPUT_PATH.parent.mkdir(parents=True, exist_ok=True) + OUTPUT_PATH.write_text(json.dumps(manifest, indent=2, ensure_ascii=False) + "\n", encoding="utf-8") + print(str(OUTPUT_PATH)) + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/scripts/zip_deterministic.py b/scripts/zip_deterministic.py new file mode 100755 index 0000000..f3cea34 --- /dev/null +++ b/scripts/zip_deterministic.py @@ -0,0 +1,73 @@ +#!/usr/bin/env python3 +"""Create deterministic zip archives. + +Usage: + zip_deterministic.py + +The archive will include the root directory itself and all files under it. +""" +from __future__ import annotations + +import os +import sys +import time +import zipfile +from pathlib import Path + + +def _timestamp() -> tuple[int, int, int, int, int, int]: + epoch = os.environ.get("SOURCE_DATE_EPOCH") + if epoch: + try: + value = int(epoch) + return time.gmtime(value)[:6] + except Exception: + pass + return (2000, 1, 1, 0, 0, 0) + + +def _iter_files(root: Path): + for dirpath, dirnames, filenames in os.walk(root): + dirnames[:] = sorted([d for d in dirnames if d != "__pycache__"]) + for filename in sorted(filenames): + if filename.endswith(".pyc"): + continue + yield Path(dirpath) / filename + + +def _add_file(zf: zipfile.ZipFile, file_path: Path, arcname: str) -> None: + info = zipfile.ZipInfo(arcname, date_time=_timestamp()) + info.compress_type = zipfile.ZIP_DEFLATED + info.external_attr = (0o644 & 0xFFFF) << 16 + with file_path.open("rb") as handle: + data = handle.read() + zf.writestr(info, data, compress_type=zipfile.ZIP_DEFLATED) + + +def main() -> int: + if len(sys.argv) != 3: + print("Usage: zip_deterministic.py ") + return 2 + + zip_path = Path(sys.argv[1]).resolve() + root = Path(sys.argv[2]).resolve() + if not root.exists() or not root.is_dir(): + print(f"Missing root dir: {root}") + return 2 + + base = root.parent + zip_path.parent.mkdir(parents=True, exist_ok=True) + if zip_path.exists(): + zip_path.unlink() + + with zipfile.ZipFile(zip_path, "w") as zf: + for file_path in sorted(_iter_files(root)): + arcname = str(file_path.relative_to(base)).replace(os.sep, "/") + _add_file(zf, file_path, arcname) + + print(str(zip_path)) + return 0 + + +if __name__ == "__main__": + raise SystemExit(main())