Merge nightly into main

This commit is contained in:
2026-02-23 17:56:15 +01:00
13 changed files with 1113 additions and 517 deletions

View File

@@ -1,5 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version='1.0' encoding='utf-8'?>
<addon id="plugin.video.viewit" name="ViewIt" version="0.1.56" provider-name="ViewIt"> <addon id="plugin.video.viewit" name="ViewIt" version="0.1.57" provider-name="ViewIt">
<requires> <requires>
<import addon="xbmc.python" version="3.0.0" /> <import addon="xbmc.python" version="3.0.0" />
<import addon="script.module.requests" /> <import addon="script.module.requests" />

View File

@@ -8,6 +8,7 @@ ruft Plugin-Implementierungen auf und startet die Wiedergabe.
from __future__ import annotations from __future__ import annotations
import asyncio import asyncio
import atexit
from contextlib import contextmanager from contextlib import contextmanager
from datetime import datetime from datetime import datetime
import importlib.util import importlib.util
@@ -102,6 +103,13 @@ except ImportError: # pragma: no cover - allow importing outside Kodi (e.g. lin
xbmcplugin = _XbmcPluginStub() xbmcplugin = _XbmcPluginStub()
from plugin_interface import BasisPlugin from plugin_interface import BasisPlugin
from http_session_pool import close_all_sessions
from metadata_utils import (
collect_plugin_metadata as _collect_plugin_metadata,
merge_metadata as _merge_metadata,
metadata_policy as _metadata_policy_impl,
needs_tmdb as _needs_tmdb,
)
from tmdb import TmdbCastMember, fetch_tv_episode_credits, lookup_movie, lookup_tv_season, lookup_tv_season_summary, lookup_tv_show from tmdb import TmdbCastMember, fetch_tv_episode_credits, lookup_movie, lookup_tv_season, lookup_tv_season_summary, lookup_tv_show
PLUGIN_DIR = Path(__file__).with_name("plugins") PLUGIN_DIR = Path(__file__).with_name("plugins")
@@ -116,8 +124,22 @@ _TMDB_LOG_PATH: str | None = None
_GENRE_TITLES_CACHE: dict[tuple[str, str], list[str]] = {} _GENRE_TITLES_CACHE: dict[tuple[str, str], list[str]] = {}
_ADDON_INSTANCE = None _ADDON_INSTANCE = None
_PLAYSTATE_CACHE: dict[str, dict[str, object]] | None = None _PLAYSTATE_CACHE: dict[str, dict[str, object]] | None = None
_PLAYSTATE_LOCK = threading.RLock()
_TMDB_LOCK = threading.RLock()
WATCHED_THRESHOLD = 0.9 WATCHED_THRESHOLD = 0.9
atexit.register(close_all_sessions)
def _tmdb_cache_get(cache: dict, key, default=None):
with _TMDB_LOCK:
return cache.get(key, default)
def _tmdb_cache_set(cache: dict, key, value) -> None:
with _TMDB_LOCK:
cache[key] = value
def _tmdb_prefetch_concurrency() -> int: def _tmdb_prefetch_concurrency() -> int:
"""Max number of concurrent TMDB lookups when prefetching metadata for lists.""" """Max number of concurrent TMDB lookups when prefetching metadata for lists."""
@@ -155,12 +177,19 @@ def _busy_close() -> None:
@contextmanager @contextmanager
def _busy_dialog(): def _busy_dialog(message: str = "Bitte warten...", *, heading: str = "Bitte warten"):
_busy_open() """Progress-Dialog statt Spinner, mit kurzem Status-Text."""
with _progress_dialog(heading, message) as progress:
progress(10, message)
def _update(step_message: str, percent: int | None = None) -> bool:
pct = 50 if percent is None else max(5, min(95, int(percent)))
return progress(pct, step_message or message)
try: try:
yield yield _update
finally: finally:
_busy_close() progress(100, "Fertig")
@contextmanager @contextmanager
@@ -202,6 +231,33 @@ def _progress_dialog(heading: str, message: str = ""):
pass pass
def _method_accepts_kwarg(method: object, kwarg_name: str) -> bool:
if not callable(method):
return False
try:
signature = inspect.signature(method)
except Exception:
return False
for param in signature.parameters.values():
if param.kind == inspect.Parameter.VAR_KEYWORD:
return True
if param.name == kwarg_name and param.kind in (
inspect.Parameter.POSITIONAL_OR_KEYWORD,
inspect.Parameter.KEYWORD_ONLY,
):
return True
return False
def _call_plugin_search(plugin: BasisPlugin, query: str, *, progress_callback=None):
method = getattr(plugin, "search_titles", None)
if not callable(method):
raise RuntimeError("Plugin hat keine gueltige search_titles Methode.")
if progress_callback is not None and _method_accepts_kwarg(method, "progress_callback"):
return method(query, progress_callback=progress_callback)
return method(query)
def _get_handle() -> int: def _get_handle() -> int:
return int(sys.argv[1]) if len(sys.argv) > 1 else -1 return int(sys.argv[1]) if len(sys.argv) > 1 else -1
@@ -242,6 +298,7 @@ def _playstate_path() -> str:
def _load_playstate() -> dict[str, dict[str, object]]: def _load_playstate() -> dict[str, dict[str, object]]:
global _PLAYSTATE_CACHE global _PLAYSTATE_CACHE
with _PLAYSTATE_LOCK:
if _PLAYSTATE_CACHE is not None: if _PLAYSTATE_CACHE is not None:
return _PLAYSTATE_CACHE return _PLAYSTATE_CACHE
path = _playstate_path() path = _playstate_path()
@@ -269,6 +326,7 @@ def _load_playstate() -> dict[str, dict[str, object]]:
def _save_playstate(state: dict[str, dict[str, object]]) -> None: def _save_playstate(state: dict[str, dict[str, object]]) -> None:
global _PLAYSTATE_CACHE global _PLAYSTATE_CACHE
with _PLAYSTATE_LOCK:
_PLAYSTATE_CACHE = state _PLAYSTATE_CACHE = state
path = _playstate_path() path = _playstate_path()
try: try:
@@ -452,40 +510,18 @@ def _get_setting_int(setting_id: str, *, default: int = 0) -> int:
return default return default
METADATA_MODE_AUTO = 0
METADATA_MODE_SOURCE = 1
METADATA_MODE_TMDB = 2
METADATA_MODE_MIX = 3
def _metadata_setting_id(plugin_name: str) -> str:
safe = re.sub(r"[^a-z0-9]+", "_", (plugin_name or "").strip().casefold()).strip("_")
return f"{safe}_metadata_source" if safe else "metadata_source"
def _plugin_supports_metadata(plugin: BasisPlugin) -> bool:
try:
return plugin.__class__.metadata_for is not BasisPlugin.metadata_for
except Exception:
return False
def _metadata_policy( def _metadata_policy(
plugin_name: str, plugin_name: str,
plugin: BasisPlugin, plugin: BasisPlugin,
*, *,
allow_tmdb: bool, allow_tmdb: bool,
) -> tuple[bool, bool, bool]: ) -> tuple[bool, bool, bool]:
mode = _get_setting_int(_metadata_setting_id(plugin_name), default=METADATA_MODE_AUTO) return _metadata_policy_impl(
supports_source = _plugin_supports_metadata(plugin) plugin_name,
if mode == METADATA_MODE_SOURCE: plugin,
return supports_source, False, True allow_tmdb=allow_tmdb,
if mode == METADATA_MODE_TMDB: get_setting_int=_get_setting_int,
return False, allow_tmdb, False )
if mode == METADATA_MODE_MIX:
return supports_source, allow_tmdb, True
prefer_source = bool(getattr(plugin, "prefer_source_metadata", False))
return supports_source, allow_tmdb, prefer_source
def _tmdb_list_enabled() -> bool: def _tmdb_list_enabled() -> bool:
@@ -715,11 +751,11 @@ def _tmdb_labels_and_art(title: str) -> tuple[dict[str, str], dict[str, str], li
show_cast = _get_setting_bool("tmdb_show_cast", default=False) show_cast = _get_setting_bool("tmdb_show_cast", default=False)
flags = f"p{int(show_plot)}a{int(show_art)}f{int(show_fanart)}r{int(show_rating)}v{int(show_votes)}c{int(show_cast)}" flags = f"p{int(show_plot)}a{int(show_art)}f{int(show_fanart)}r{int(show_rating)}v{int(show_votes)}c{int(show_cast)}"
cache_key = f"{language}|{flags}|{title_key}" cache_key = f"{language}|{flags}|{title_key}"
cached = _TMDB_CACHE.get(cache_key) cached = _tmdb_cache_get(_TMDB_CACHE, cache_key)
if cached is not None: if cached is not None:
info, art = cached info, art = cached
# Cast wird nicht in _TMDB_CACHE gehalten (weil es ListItem.setCast betrifft), daher separat cachen: # Cast wird nicht in _TMDB_CACHE gehalten (weil es ListItem.setCast betrifft), daher separat cachen:
cast_cached = _TMDB_CAST_CACHE.get(cache_key, []) cast_cached = _tmdb_cache_get(_TMDB_CAST_CACHE, cache_key, [])
return info, art, list(cast_cached) return info, art, list(cast_cached)
info_labels: dict[str, str] = {"title": title} info_labels: dict[str, str] = {"title": title}
@@ -777,7 +813,7 @@ def _tmdb_labels_and_art(title: str) -> tuple[dict[str, str], dict[str, str], li
if meta: if meta:
# Nur TV-IDs cachen (für Staffel-/Episoden-Lookups); Movie-IDs würden dort fehlschlagen. # Nur TV-IDs cachen (für Staffel-/Episoden-Lookups); Movie-IDs würden dort fehlschlagen.
if is_tv: if is_tv:
_TMDB_ID_CACHE[title_key] = int(getattr(meta, "tmdb_id", 0) or 0) _tmdb_cache_set(_TMDB_ID_CACHE, title_key, int(getattr(meta, "tmdb_id", 0) or 0))
info_labels.setdefault("mediatype", "tvshow") info_labels.setdefault("mediatype", "tvshow")
else: else:
info_labels.setdefault("mediatype", "movie") info_labels.setdefault("mediatype", "movie")
@@ -805,8 +841,8 @@ def _tmdb_labels_and_art(title: str) -> tuple[dict[str, str], dict[str, str], li
elif log_requests or log_responses: elif log_requests or log_responses:
_tmdb_file_log(f"TMDB MISS title={title!r}") _tmdb_file_log(f"TMDB MISS title={title!r}")
_TMDB_CACHE[cache_key] = (info_labels, art) _tmdb_cache_set(_TMDB_CACHE, cache_key, (info_labels, art))
_TMDB_CAST_CACHE[cache_key] = list(cast) _tmdb_cache_set(_TMDB_CAST_CACHE, cache_key, list(cast))
return info_labels, art, list(cast) return info_labels, art, list(cast)
@@ -852,10 +888,10 @@ def _tmdb_episode_labels_and_art(*, title: str, season_label: str, episode_label
if not _tmdb_enabled(): if not _tmdb_enabled():
return {"title": episode_label}, {} return {"title": episode_label}, {}
title_key = (title or "").strip().casefold() title_key = (title or "").strip().casefold()
tmdb_id = _TMDB_ID_CACHE.get(title_key) tmdb_id = _tmdb_cache_get(_TMDB_ID_CACHE, title_key)
if not tmdb_id: if not tmdb_id:
_tmdb_labels_and_art(title) _tmdb_labels_and_art(title)
tmdb_id = _TMDB_ID_CACHE.get(title_key) tmdb_id = _tmdb_cache_get(_TMDB_ID_CACHE, title_key)
if not tmdb_id: if not tmdb_id:
return {"title": episode_label}, {} return {"title": episode_label}, {}
@@ -869,7 +905,7 @@ def _tmdb_episode_labels_and_art(*, title: str, season_label: str, episode_label
show_art = _get_setting_bool("tmdb_show_art", default=True) show_art = _get_setting_bool("tmdb_show_art", default=True)
flags = f"p{int(show_plot)}a{int(show_art)}" flags = f"p{int(show_plot)}a{int(show_art)}"
season_key = (tmdb_id, season_number, language, flags) season_key = (tmdb_id, season_number, language, flags)
cached_season = _TMDB_SEASON_CACHE.get(season_key) cached_season = _tmdb_cache_get(_TMDB_SEASON_CACHE, season_key)
if cached_season is None: if cached_season is None:
api_key = _get_setting_string("tmdb_api_key").strip() api_key = _get_setting_string("tmdb_api_key").strip()
if not api_key: if not api_key:
@@ -902,7 +938,7 @@ def _tmdb_episode_labels_and_art(*, title: str, season_label: str, episode_label
if show_art and ep.thumb: if show_art and ep.thumb:
art = {"thumb": ep.thumb} art = {"thumb": ep.thumb}
mapped[ep_no] = (info, art) mapped[ep_no] = (info, art)
_TMDB_SEASON_CACHE[season_key] = mapped _tmdb_cache_set(_TMDB_SEASON_CACHE, season_key, mapped)
cached_season = mapped cached_season = mapped
return cached_season.get(episode_number, ({"title": episode_label}, {})) return cached_season.get(episode_number, ({"title": episode_label}, {}))
@@ -916,10 +952,10 @@ def _tmdb_episode_cast(*, title: str, season_label: str, episode_label: str) ->
return [] return []
title_key = (title or "").strip().casefold() title_key = (title or "").strip().casefold()
tmdb_id = _TMDB_ID_CACHE.get(title_key) tmdb_id = _tmdb_cache_get(_TMDB_ID_CACHE, title_key)
if not tmdb_id: if not tmdb_id:
_tmdb_labels_and_art(title) _tmdb_labels_and_art(title)
tmdb_id = _TMDB_ID_CACHE.get(title_key) tmdb_id = _tmdb_cache_get(_TMDB_ID_CACHE, title_key)
if not tmdb_id: if not tmdb_id:
return [] return []
@@ -930,13 +966,13 @@ def _tmdb_episode_cast(*, title: str, season_label: str, episode_label: str) ->
language = _get_setting_string("tmdb_language").strip() or "de-DE" language = _get_setting_string("tmdb_language").strip() or "de-DE"
cache_key = (tmdb_id, season_number, episode_number, language) cache_key = (tmdb_id, season_number, episode_number, language)
cached = _TMDB_EPISODE_CAST_CACHE.get(cache_key) cached = _tmdb_cache_get(_TMDB_EPISODE_CAST_CACHE, cache_key)
if cached is not None: if cached is not None:
return list(cached) return list(cached)
api_key = _get_setting_string("tmdb_api_key").strip() api_key = _get_setting_string("tmdb_api_key").strip()
if not api_key: if not api_key:
_TMDB_EPISODE_CAST_CACHE[cache_key] = [] _tmdb_cache_set(_TMDB_EPISODE_CAST_CACHE, cache_key, [])
return [] return []
log_requests = _get_setting_bool("tmdb_log_requests", default=False) log_requests = _get_setting_bool("tmdb_log_requests", default=False)
@@ -958,7 +994,7 @@ def _tmdb_episode_cast(*, title: str, season_label: str, episode_label: str) ->
f"TMDB ERROR episode_credits_failed tmdb_id={tmdb_id} season={season_number} episode={episode_number} error={exc!r}" f"TMDB ERROR episode_credits_failed tmdb_id={tmdb_id} season={season_number} episode={episode_number} error={exc!r}"
) )
cast = [] cast = []
_TMDB_EPISODE_CAST_CACHE[cache_key] = list(cast) _tmdb_cache_set(_TMDB_EPISODE_CAST_CACHE, cache_key, list(cast))
return list(cast) return list(cast)
@@ -1079,52 +1115,6 @@ def _settings_key_for_plugin(name: str) -> str:
return f"update_version_{safe}" if safe else "update_version_unknown" return f"update_version_{safe}" if safe else "update_version_unknown"
def _collect_plugin_metadata(plugin: BasisPlugin, titles: list[str]) -> dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember] | None]]:
getter = getattr(plugin, "metadata_for", None)
if not callable(getter):
return {}
collected: dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember] | None]] = {}
for title in titles:
try:
labels, art, cast = getter(title)
except Exception:
continue
if isinstance(labels, dict) or isinstance(art, dict) or cast:
label_map = {str(k): str(v) for k, v in dict(labels or {}).items() if v}
art_map = {str(k): str(v) for k, v in dict(art or {}).items() if v}
collected[title] = (label_map, art_map, cast if isinstance(cast, list) else None)
return collected
def _needs_tmdb(labels: dict[str, str], art: dict[str, str], *, want_plot: bool, want_art: bool) -> bool:
if want_plot and not labels.get("plot"):
return True
if want_art and not (art.get("thumb") or art.get("poster") or art.get("fanart") or art.get("landscape")):
return True
return False
def _merge_metadata(
title: str,
tmdb_labels: dict[str, str] | None,
tmdb_art: dict[str, str] | None,
tmdb_cast: list[TmdbCastMember] | None,
plugin_meta: tuple[dict[str, str], dict[str, str], list[TmdbCastMember] | None] | None,
) -> tuple[dict[str, str], dict[str, str], list[TmdbCastMember] | None]:
labels = dict(tmdb_labels or {})
art = dict(tmdb_art or {})
cast = tmdb_cast
if plugin_meta is not None:
meta_labels, meta_art, meta_cast = plugin_meta
labels.update({k: str(v) for k, v in dict(meta_labels or {}).items() if v})
art.update({k: str(v) for k, v in dict(meta_art or {}).items() if v})
if meta_cast is not None:
cast = meta_cast
if "title" not in labels:
labels["title"] = title
return labels, art, cast
def _sync_update_version_settings() -> None: def _sync_update_version_settings() -> None:
addon = _get_addon() addon = _get_addon()
addon_version = "0.0.0" addon_version = "0.0.0"
@@ -1236,7 +1226,11 @@ def _show_plugin_search_results(plugin_name: str, query: str) -> None:
try: try:
with _progress_dialog("Suche laeuft", f"{plugin_name} (1/1) startet...") as progress: with _progress_dialog("Suche laeuft", f"{plugin_name} (1/1) startet...") as progress:
canceled = progress(5, f"{plugin_name} (1/1) Suche...") canceled = progress(5, f"{plugin_name} (1/1) Suche...")
search_coro = plugin.search_titles(query) plugin_progress = lambda msg="", pct=None: progress( # noqa: E731 - kompakte Callback-Bruecke
max(5, min(95, int(pct))) if pct is not None else 20,
f"{plugin_name} (1/1) {str(msg or 'Suche...').strip()}",
)
search_coro = _call_plugin_search(plugin, query, progress_callback=plugin_progress)
try: try:
results = _run_async(search_coro) results = _run_async(search_coro)
except Exception: except Exception:
@@ -1246,7 +1240,7 @@ def _show_plugin_search_results(plugin_name: str, query: str) -> None:
except Exception: except Exception:
pass pass
raise raise
results = [str(t).strip() for t in (results or []) if t and str(t).strip()] results = _clean_search_titles([str(t).strip() for t in (results or []) if t and str(t).strip()])
results.sort(key=lambda value: value.casefold()) results.sort(key=lambda value: value.casefold())
use_source, show_tmdb, prefer_source = _metadata_policy( use_source, show_tmdb, prefer_source = _metadata_policy(
@@ -1438,6 +1432,33 @@ def _series_url_params(plugin: BasisPlugin, title: str) -> dict[str, str]:
return {"series_url": series_url} if series_url else {} return {"series_url": series_url} if series_url else {}
def _clean_search_titles(values: list[str]) -> list[str]:
"""Filtert offensichtliche Platzhalter und dedupliziert Treffer."""
blocked = {
"stream",
"streams",
"film",
"movie",
"play",
"details",
"details/play",
}
cleaned: list[str] = []
seen: set[str] = set()
for raw in values:
title = (raw or "").strip()
if not title:
continue
key = title.casefold()
if key in blocked:
continue
if key in seen:
continue
seen.add(key)
cleaned.append(title)
return cleaned
def _show_search() -> None: def _show_search() -> None:
_log("Suche gestartet.") _log("Suche gestartet.")
dialog = xbmcgui.Dialog() dialog = xbmcgui.Dialog()
@@ -1470,7 +1491,11 @@ def _show_search_results(query: str) -> None:
canceled = progress(range_start, f"{plugin_name} ({plugin_index}/{total_plugins}) Suche...") canceled = progress(range_start, f"{plugin_name} ({plugin_index}/{total_plugins}) Suche...")
if canceled: if canceled:
break break
search_coro = plugin.search_titles(query) plugin_progress = lambda msg="", pct=None: progress( # noqa: E731 - kompakte Callback-Bruecke
max(range_start, min(range_end, int(pct))) if pct is not None else range_start + 20,
f"{plugin_name} ({plugin_index}/{total_plugins}) {str(msg or 'Suche...').strip()}",
)
search_coro = _call_plugin_search(plugin, query, progress_callback=plugin_progress)
try: try:
results = _run_async(search_coro) results = _run_async(search_coro)
except Exception as exc: except Exception as exc:
@@ -1481,7 +1506,7 @@ def _show_search_results(query: str) -> None:
pass pass
_log(f"Suche fehlgeschlagen ({plugin_name}): {exc}", xbmc.LOGWARNING) _log(f"Suche fehlgeschlagen ({plugin_name}): {exc}", xbmc.LOGWARNING)
continue continue
results = [str(t).strip() for t in (results or []) if t and str(t).strip()] results = _clean_search_titles([str(t).strip() for t in (results or []) if t and str(t).strip()])
_log(f"Treffer ({plugin_name}): {len(results)}", xbmc.LOGDEBUG) _log(f"Treffer ({plugin_name}): {len(results)}", xbmc.LOGDEBUG)
use_source, show_tmdb, prefer_source = _metadata_policy( use_source, show_tmdb, prefer_source = _metadata_policy(
plugin_name, plugin, allow_tmdb=_tmdb_enabled() plugin_name, plugin, allow_tmdb=_tmdb_enabled()
@@ -1565,6 +1590,73 @@ def _show_search_results(query: str) -> None:
xbmcplugin.endOfDirectory(handle) xbmcplugin.endOfDirectory(handle)
def _movie_seed_for_title(plugin: BasisPlugin, title: str, seasons: list[str]) -> tuple[str, str] | None:
"""Ermittelt ein Film-Seed (Season/Episode), um direkt Provider anzeigen zu können."""
if not seasons or len(seasons) != 1:
return None
season = str(seasons[0] or "").strip()
if not season:
return None
try:
episodes = [str(value or "").strip() for value in (plugin.episodes_for(title, season) or [])]
except Exception:
return None
episodes = [value for value in episodes if value]
if len(episodes) != 1:
return None
episode = episodes[0]
season_key = season.casefold()
episode_key = episode.casefold()
title_key = (title or "").strip().casefold()
generic_seasons = {"film", "movie", "stream"}
generic_episodes = {"stream", "film", "play", title_key}
if season_key in generic_seasons and episode_key in generic_episodes:
return (season, episode)
return None
def _show_movie_streams(
plugin_name: str,
title: str,
season: str,
episode: str,
*,
series_url: str = "",
) -> None:
handle = _get_handle()
plugin = _discover_plugins().get(plugin_name)
if plugin is None:
xbmcgui.Dialog().notification("Streams", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000)
xbmcplugin.endOfDirectory(handle)
return
if series_url:
remember_series_url = getattr(plugin, "remember_series_url", None)
if callable(remember_series_url):
try:
remember_series_url(title, series_url)
except Exception:
pass
xbmcplugin.setPluginCategory(handle, f"{title} - Streams")
_set_content(handle, "videos")
base_params = {"plugin": plugin_name, "title": title, "season": season, "episode": episode}
if series_url:
base_params["series_url"] = series_url
# Hoster bleiben im Auswahldialog der Wiedergabe (wie bisher).
_add_directory_item(
handle,
title,
"play_episode",
dict(base_params),
is_folder=False,
info_labels={"title": title, "mediatype": "movie"},
)
xbmcplugin.endOfDirectory(handle)
def _show_seasons(plugin_name: str, title: str, series_url: str = "") -> None: def _show_seasons(plugin_name: str, title: str, series_url: str = "") -> None:
handle = _get_handle() handle = _get_handle()
_log(f"Staffeln laden: {plugin_name} / {title}") _log(f"Staffeln laden: {plugin_name} / {title}")
@@ -1581,60 +1673,6 @@ def _show_seasons(plugin_name: str, title: str, series_url: str = "") -> None:
except Exception: except Exception:
pass pass
# Einschalten liefert Filme. Für Playback soll nach dem Öffnen des Titels direkt ein
# einzelnes abspielbares Item angezeigt werden: <Titel> -> (<Titel> abspielbar).
# Wichtig: ohne zusätzliche Netzwerkanfragen (sonst bleibt Kodi ggf. im Busy-Spinner hängen).
if (plugin_name or "").casefold() == "einschalten" and _get_setting_bool("einschalten_enable_playback", default=False):
xbmcplugin.setPluginCategory(handle, title)
_set_content(handle, "movies")
playstate = _title_playstate(plugin_name, title)
info_labels: dict[str, object] = {"title": title, "mediatype": "movie"}
info_labels = _apply_playstate_to_info(info_labels, playstate)
display_label = _label_with_playstate(title, playstate)
movie_params = {"plugin": plugin_name, "title": title}
if series_url:
movie_params["series_url"] = series_url
_add_directory_item(
handle,
display_label,
"play_movie",
movie_params,
is_folder=False,
info_labels=info_labels,
)
xbmcplugin.endOfDirectory(handle)
return
# Optional: Plugins können schnell (ohne Detail-Request) sagen, ob ein Titel ein Film ist.
# Dann zeigen wir direkt ein einzelnes abspielbares Item: <Titel> -> (<Titel>).
is_movie = getattr(plugin, "is_movie", None)
if callable(is_movie):
try:
if bool(is_movie(title)):
xbmcplugin.setPluginCategory(handle, title)
_set_content(handle, "movies")
playstate = _title_playstate(plugin_name, title)
info_labels: dict[str, object] = {"title": title, "mediatype": "movie"}
info_labels = _apply_playstate_to_info(info_labels, playstate)
display_label = _label_with_playstate(title, playstate)
movie_params = {"plugin": plugin_name, "title": title}
if series_url:
movie_params["series_url"] = series_url
else:
movie_params.update(_series_url_params(plugin, title))
_add_directory_item(
handle,
display_label,
"play_movie",
movie_params,
is_folder=False,
info_labels=info_labels,
)
xbmcplugin.endOfDirectory(handle)
return
except Exception:
pass
use_source, show_tmdb, _prefer_source = _metadata_policy( use_source, show_tmdb, _prefer_source = _metadata_policy(
plugin_name, plugin, allow_tmdb=_tmdb_enabled() plugin_name, plugin, allow_tmdb=_tmdb_enabled()
) )
@@ -1644,7 +1682,7 @@ def _show_seasons(plugin_name: str, title: str, series_url: str = "") -> None:
meta_getter = getattr(plugin, "metadata_for", None) meta_getter = getattr(plugin, "metadata_for", None)
if use_source and callable(meta_getter): if use_source and callable(meta_getter):
try: try:
with _busy_dialog(): with _busy_dialog("Metadaten werden geladen..."):
meta_labels, meta_art, meta_cast = meta_getter(title) meta_labels, meta_art, meta_cast = meta_getter(title)
if isinstance(meta_labels, dict): if isinstance(meta_labels, dict):
title_info_labels = {str(k): str(v) for k, v in meta_labels.items() if v} title_info_labels = {str(k): str(v) for k, v in meta_labels.items() if v}
@@ -1664,6 +1702,26 @@ def _show_seasons(plugin_name: str, title: str, series_url: str = "") -> None:
xbmcplugin.endOfDirectory(handle) xbmcplugin.endOfDirectory(handle)
return return
movie_seed = _movie_seed_for_title(plugin, title, seasons)
if movie_seed is not None:
# Dieser Action-Pfad wurde als Verzeichnis aufgerufen. Ohne endOfDirectory()
# bleibt Kodi im Busy-Zustand, auch wenn wir direkt in die Wiedergabe springen.
try:
xbmcplugin.endOfDirectory(handle, succeeded=False)
except Exception:
try:
xbmcplugin.endOfDirectory(handle)
except Exception:
pass
_play_episode(
plugin_name,
title,
movie_seed[0],
movie_seed[1],
series_url=series_url,
)
return
count = len(seasons) count = len(seasons)
suffix = "Staffel" if count == 1 else "Staffeln" suffix = "Staffel" if count == 1 else "Staffeln"
xbmcplugin.setPluginCategory(handle, f"{title} ({count} {suffix})") xbmcplugin.setPluginCategory(handle, f"{title} ({count} {suffix})")
@@ -1684,8 +1742,8 @@ def _show_seasons(plugin_name: str, title: str, series_url: str = "") -> None:
art: dict[str, str] | None = None art: dict[str, str] | None = None
season_number = _extract_first_int(season) season_number = _extract_first_int(season)
if api_key and season_number is not None: if api_key and season_number is not None:
cache_key = (_TMDB_ID_CACHE.get((title or "").strip().casefold(), 0), season_number, language, flags) cache_key = (_tmdb_cache_get(_TMDB_ID_CACHE, (title or "").strip().casefold(), 0), season_number, language, flags)
cached = _TMDB_SEASON_SUMMARY_CACHE.get(cache_key) cached = _tmdb_cache_get(_TMDB_SEASON_SUMMARY_CACHE, cache_key)
if cached is None and cache_key[0]: if cached is None and cache_key[0]:
try: try:
meta = lookup_tv_season_summary( meta = lookup_tv_season_summary(
@@ -1708,7 +1766,7 @@ def _show_seasons(plugin_name: str, title: str, series_url: str = "") -> None:
if show_art and meta.poster: if show_art and meta.poster:
art_map = {"thumb": meta.poster, "poster": meta.poster} art_map = {"thumb": meta.poster, "poster": meta.poster}
cached = (labels, art_map) cached = (labels, art_map)
_TMDB_SEASON_SUMMARY_CACHE[cache_key] = cached _tmdb_cache_set(_TMDB_SEASON_SUMMARY_CACHE, cache_key, cached)
if cached is not None: if cached is not None:
info_labels, art = cached info_labels, art = cached
merged_labels = dict(info_labels or {}) merged_labels = dict(info_labels or {})
@@ -1774,7 +1832,7 @@ def _show_episodes(plugin_name: str, title: str, season: str, series_url: str =
meta_getter = getattr(plugin, "metadata_for", None) meta_getter = getattr(plugin, "metadata_for", None)
if callable(meta_getter): if callable(meta_getter):
try: try:
with _busy_dialog(): with _busy_dialog("Episoden-Metadaten werden geladen..."):
meta_labels, meta_art, meta_cast = meta_getter(title) meta_labels, meta_art, meta_cast = meta_getter(title)
if isinstance(meta_labels, dict): if isinstance(meta_labels, dict):
show_info = {str(k): str(v) for k, v in meta_labels.items() if v} show_info = {str(k): str(v) for k, v in meta_labels.items() if v}
@@ -1787,7 +1845,7 @@ def _show_episodes(plugin_name: str, title: str, season: str, series_url: str =
show_fanart = (show_art or {}).get("fanart") if isinstance(show_art, dict) else "" show_fanart = (show_art or {}).get("fanart") if isinstance(show_art, dict) else ""
show_poster = (show_art or {}).get("poster") if isinstance(show_art, dict) else "" show_poster = (show_art or {}).get("poster") if isinstance(show_art, dict) else ""
with _busy_dialog(): with _busy_dialog("Episoden werden aufbereitet..."):
for episode in episodes: for episode in episodes:
if show_tmdb: if show_tmdb:
info_labels, art = _tmdb_episode_labels_and_art( info_labels, art = _tmdb_episode_labels_and_art(
@@ -2018,7 +2076,7 @@ def _show_category_titles_page(plugin_name: str, category: str, page: int = 1) -
if _needs_tmdb(meta_labels, meta_art, want_plot=show_plot, want_art=show_art): if _needs_tmdb(meta_labels, meta_art, want_plot=show_plot, want_art=show_art):
tmdb_titles.append(title) tmdb_titles.append(title)
if show_tmdb and tmdb_titles: if show_tmdb and tmdb_titles:
with _busy_dialog(): with _busy_dialog("Genre-Liste wird geladen..."):
tmdb_prefetched = _tmdb_labels_and_art_bulk(tmdb_titles) tmdb_prefetched = _tmdb_labels_and_art_bulk(tmdb_titles)
for title in titles: for title in titles:
tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) if show_tmdb else ({}, {}, []) tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) if show_tmdb else ({}, {}, [])
@@ -2134,7 +2192,7 @@ def _show_genre_titles_page(plugin_name: str, genre: str, page: int = 1) -> None
if _needs_tmdb(meta_labels, meta_art, want_plot=show_plot, want_art=show_art): if _needs_tmdb(meta_labels, meta_art, want_plot=show_plot, want_art=show_art):
tmdb_titles.append(title) tmdb_titles.append(title)
if show_tmdb and tmdb_titles: if show_tmdb and tmdb_titles:
with _busy_dialog(): with _busy_dialog("Genre-Seite wird geladen..."):
tmdb_prefetched = _tmdb_labels_and_art_bulk(tmdb_titles) tmdb_prefetched = _tmdb_labels_and_art_bulk(tmdb_titles)
for title in titles: for title in titles:
tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) if show_tmdb else ({}, {}, []) tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) if show_tmdb else ({}, {}, [])
@@ -2285,7 +2343,7 @@ def _show_alpha_titles_page(plugin_name: str, letter: str, page: int = 1) -> Non
if _needs_tmdb(meta_labels, meta_art, want_plot=show_plot, want_art=show_art): if _needs_tmdb(meta_labels, meta_art, want_plot=show_plot, want_art=show_art):
tmdb_titles.append(title) tmdb_titles.append(title)
if show_tmdb and tmdb_titles: if show_tmdb and tmdb_titles:
with _busy_dialog(): with _busy_dialog("A-Z Liste wird geladen..."):
tmdb_prefetched = _tmdb_labels_and_art_bulk(tmdb_titles) tmdb_prefetched = _tmdb_labels_and_art_bulk(tmdb_titles)
for title in titles: for title in titles:
tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) if show_tmdb else ({}, {}, []) tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) if show_tmdb else ({}, {}, [])
@@ -2397,7 +2455,7 @@ def _show_series_catalog(plugin_name: str, page: int = 1) -> None:
if _needs_tmdb(meta_labels, meta_art, want_plot=show_plot, want_art=show_art): if _needs_tmdb(meta_labels, meta_art, want_plot=show_plot, want_art=show_art):
tmdb_titles.append(title) tmdb_titles.append(title)
if show_tmdb and tmdb_titles: if show_tmdb and tmdb_titles:
with _busy_dialog(): with _busy_dialog("A-Z Seite wird geladen..."):
tmdb_prefetched = _tmdb_labels_and_art_bulk(tmdb_titles) tmdb_prefetched = _tmdb_labels_and_art_bulk(tmdb_titles)
for title in titles: for title in titles:
tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) if show_tmdb else ({}, {}, []) tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) if show_tmdb else ({}, {}, [])
@@ -2632,7 +2690,7 @@ def _show_popular(plugin_name: str | None = None, page: int = 1) -> None:
if _needs_tmdb(meta_labels, meta_art, want_plot=show_plot, want_art=show_art): if _needs_tmdb(meta_labels, meta_art, want_plot=show_plot, want_art=show_art):
tmdb_titles.append(title) tmdb_titles.append(title)
if show_tmdb and tmdb_titles: if show_tmdb and tmdb_titles:
with _busy_dialog(): with _busy_dialog("Beliebte Titel werden geladen..."):
tmdb_prefetched = _tmdb_labels_and_art_bulk(tmdb_titles) tmdb_prefetched = _tmdb_labels_and_art_bulk(tmdb_titles)
for title in page_items: for title in page_items:
tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) if show_tmdb else ({}, {}, []) tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) if show_tmdb else ({}, {}, [])
@@ -2780,7 +2838,7 @@ def _show_new_titles(plugin_name: str, page: int = 1) -> None:
if _needs_tmdb(meta_labels, meta_art, want_plot=show_plot, want_art=show_art): if _needs_tmdb(meta_labels, meta_art, want_plot=show_plot, want_art=show_art):
tmdb_titles.append(title) tmdb_titles.append(title)
if show_tmdb and tmdb_titles: if show_tmdb and tmdb_titles:
with _busy_dialog(): with _busy_dialog("Neue Titel werden geladen..."):
tmdb_prefetched = _tmdb_labels_and_art_bulk(tmdb_titles) tmdb_prefetched = _tmdb_labels_and_art_bulk(tmdb_titles)
for title in page_items: for title in page_items:
tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) if show_tmdb else ({}, {}, []) tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) if show_tmdb else ({}, {}, [])
@@ -2846,7 +2904,7 @@ def _show_latest_episodes(plugin_name: str, page: int = 1) -> None:
_set_content(handle, "episodes") _set_content(handle, "episodes")
try: try:
with _busy_dialog(): with _busy_dialog("Neueste Episoden werden geladen..."):
entries = list(getter(page) or []) entries = list(getter(page) or [])
except Exception as exc: except Exception as exc:
_log(f"Neueste Folgen fehlgeschlagen ({plugin_name}): {exc}", xbmc.LOGWARNING) _log(f"Neueste Folgen fehlgeschlagen ({plugin_name}): {exc}", xbmc.LOGWARNING)
@@ -2951,7 +3009,7 @@ def _show_genre_series_group(plugin_name: str, genre: str, group_code: str, page
if _needs_tmdb(meta_labels, meta_art, want_plot=show_plot, want_art=show_art): if _needs_tmdb(meta_labels, meta_art, want_plot=show_plot, want_art=show_art):
tmdb_titles.append(title) tmdb_titles.append(title)
if show_tmdb and tmdb_titles: if show_tmdb and tmdb_titles:
with _busy_dialog(): with _busy_dialog("Genre-Gruppe wird geladen..."):
tmdb_prefetched = _tmdb_labels_and_art_bulk(tmdb_titles) tmdb_prefetched = _tmdb_labels_and_art_bulk(tmdb_titles)
for title in page_items: for title in page_items:
tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) if show_tmdb else ({}, {}, []) tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) if show_tmdb else ({}, {}, [])
@@ -3039,7 +3097,7 @@ def _show_genre_series_group(plugin_name: str, genre: str, group_code: str, page
if _needs_tmdb(meta_labels, meta_art, want_plot=show_plot, want_art=show_art): if _needs_tmdb(meta_labels, meta_art, want_plot=show_plot, want_art=show_art):
tmdb_titles.append(title) tmdb_titles.append(title)
if show_tmdb and tmdb_titles: if show_tmdb and tmdb_titles:
with _busy_dialog(): with _busy_dialog("Genre-Serien werden geladen..."):
tmdb_prefetched = _tmdb_labels_and_art_bulk(tmdb_titles) tmdb_prefetched = _tmdb_labels_and_art_bulk(tmdb_titles)
for title in page_items: for title in page_items:
tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) if show_tmdb else ({}, {}, []) tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) if show_tmdb else ({}, {}, [])
@@ -3237,12 +3295,29 @@ def _track_playback_and_update_state(key: str) -> None:
pass pass
def _track_playback_and_update_state_async(key: str) -> None:
"""Startet Playstate-Tracking im Hintergrund, damit die UI nicht blockiert."""
key = (key or "").strip()
if not key:
return
def _worker() -> None:
try:
_track_playback_and_update_state(key)
except Exception:
pass
worker = threading.Thread(target=_worker, name="viewit-playstate-tracker", daemon=True)
worker.start()
def _play_episode( def _play_episode(
plugin_name: str, plugin_name: str,
title: str, title: str,
season: str, season: str,
episode: str, episode: str,
*, *,
forced_hoster: str = "",
episode_url: str = "", episode_url: str = "",
series_url: str = "", series_url: str = "",
resolve_handle: int | None = None, resolve_handle: int | None = None,
@@ -3281,16 +3356,22 @@ def _play_episode(
hoster_getter = getattr(plugin, "available_hosters_for", None) hoster_getter = getattr(plugin, "available_hosters_for", None)
if callable(hoster_getter): if callable(hoster_getter):
try: try:
with _busy_dialog(): with _busy_dialog("Hoster werden geladen..."):
available_hosters = list(hoster_getter(title, season, episode) or []) available_hosters = list(hoster_getter(title, season, episode) or [])
except Exception as exc: except Exception as exc:
_log(f"Hoster laden fehlgeschlagen ({plugin_name}): {exc}", xbmc.LOGWARNING) _log(f"Hoster laden fehlgeschlagen ({plugin_name}): {exc}", xbmc.LOGWARNING)
selected_hoster: str | None = None selected_hoster: str | None = None
forced_hoster = (forced_hoster or "").strip()
if available_hosters: if available_hosters:
if len(available_hosters) == 1: if forced_hoster:
for hoster in available_hosters:
if hoster.casefold() == forced_hoster.casefold():
selected_hoster = hoster
break
if selected_hoster is None and len(available_hosters) == 1:
selected_hoster = available_hosters[0] selected_hoster = available_hosters[0]
else: elif selected_hoster is None:
selected_index = xbmcgui.Dialog().select("Hoster waehlen", available_hosters) selected_index = xbmcgui.Dialog().select("Hoster waehlen", available_hosters)
if selected_index is None or selected_index < 0: if selected_index is None or selected_index < 0:
_log("Play abgebrochen (kein Hoster gewählt).", xbmc.LOGDEBUG) _log("Play abgebrochen (kein Hoster gewählt).", xbmc.LOGDEBUG)
@@ -3335,7 +3416,7 @@ def _play_episode(
cast=cast, cast=cast,
resolve_handle=resolve_handle, resolve_handle=resolve_handle,
) )
_track_playback_and_update_state( _track_playback_and_update_state_async(
_playstate_key(plugin_name=plugin_name, title=title, season=season, episode=episode) _playstate_key(plugin_name=plugin_name, title=title, season=season, episode=episode)
) )
@@ -3365,7 +3446,7 @@ def _play_episode_url(
hoster_getter = getattr(plugin, "available_hosters_for_url", None) hoster_getter = getattr(plugin, "available_hosters_for_url", None)
if callable(hoster_getter): if callable(hoster_getter):
try: try:
with _busy_dialog(): with _busy_dialog("Hoster werden geladen..."):
available_hosters = list(hoster_getter(episode_url) or []) available_hosters = list(hoster_getter(episode_url) or [])
except Exception as exc: except Exception as exc:
_log(f"Hoster laden fehlgeschlagen ({plugin_name}): {exc}", xbmc.LOGWARNING) _log(f"Hoster laden fehlgeschlagen ({plugin_name}): {exc}", xbmc.LOGWARNING)
@@ -3423,7 +3504,7 @@ def _play_episode_url(
cast=cast, cast=cast,
resolve_handle=resolve_handle, resolve_handle=resolve_handle,
) )
_track_playback_and_update_state( _track_playback_and_update_state_async(
_playstate_key(plugin_name=plugin_name, title=title, season=season_label, episode=episode_label) _playstate_key(plugin_name=plugin_name, title=title, season=season_label, episode=episode_label)
) )
@@ -3523,6 +3604,7 @@ def run() -> None:
params.get("title", ""), params.get("title", ""),
params.get("season", ""), params.get("season", ""),
params.get("episode", ""), params.get("episode", ""),
forced_hoster=params.get("hoster", ""),
episode_url=params.get("url", ""), episode_url=params.get("url", ""),
series_url=params.get("series_url", ""), series_url=params.get("series_url", ""),
resolve_handle=_get_handle(), resolve_handle=_get_handle(),

View File

@@ -32,3 +32,12 @@ def get_requests_session(key: str, *, headers: Optional[dict[str, str]] = None):
pass pass
return session return session
def close_all_sessions() -> None:
"""Close and clear all pooled sessions."""
for session in list(_SESSIONS.values()):
try:
session.close()
except Exception:
pass
_SESSIONS.clear()

93
addon/metadata_utils.py Normal file
View File

@@ -0,0 +1,93 @@
from __future__ import annotations
import re
from plugin_interface import BasisPlugin
from tmdb import TmdbCastMember
METADATA_MODE_AUTO = 0
METADATA_MODE_SOURCE = 1
METADATA_MODE_TMDB = 2
METADATA_MODE_MIX = 3
def metadata_setting_id(plugin_name: str) -> str:
safe = re.sub(r"[^a-z0-9]+", "_", (plugin_name or "").strip().casefold()).strip("_")
return f"{safe}_metadata_source" if safe else "metadata_source"
def plugin_supports_metadata(plugin: BasisPlugin) -> bool:
try:
return plugin.__class__.metadata_for is not BasisPlugin.metadata_for
except Exception:
return False
def metadata_policy(
plugin_name: str,
plugin: BasisPlugin,
*,
allow_tmdb: bool,
get_setting_int=None,
) -> tuple[bool, bool, bool]:
if not callable(get_setting_int):
return plugin_supports_metadata(plugin), allow_tmdb, bool(getattr(plugin, "prefer_source_metadata", False))
mode = get_setting_int(metadata_setting_id(plugin_name), default=METADATA_MODE_AUTO)
supports_source = plugin_supports_metadata(plugin)
if mode == METADATA_MODE_SOURCE:
return supports_source, False, True
if mode == METADATA_MODE_TMDB:
return False, allow_tmdb, False
if mode == METADATA_MODE_MIX:
return supports_source, allow_tmdb, True
prefer_source = bool(getattr(plugin, "prefer_source_metadata", False))
return supports_source, allow_tmdb, prefer_source
def collect_plugin_metadata(
plugin: BasisPlugin,
titles: list[str],
) -> dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember] | None]]:
getter = getattr(plugin, "metadata_for", None)
if not callable(getter):
return {}
collected: dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember] | None]] = {}
for title in titles:
try:
labels, art, cast = getter(title)
except Exception:
continue
if isinstance(labels, dict) or isinstance(art, dict) or cast:
label_map = {str(k): str(v) for k, v in dict(labels or {}).items() if v}
art_map = {str(k): str(v) for k, v in dict(art or {}).items() if v}
collected[title] = (label_map, art_map, cast if isinstance(cast, list) else None)
return collected
def needs_tmdb(labels: dict[str, str], art: dict[str, str], *, want_plot: bool, want_art: bool) -> bool:
if want_plot and not labels.get("plot"):
return True
if want_art and not (art.get("thumb") or art.get("poster") or art.get("fanart") or art.get("landscape")):
return True
return False
def merge_metadata(
title: str,
tmdb_labels: dict[str, str] | None,
tmdb_art: dict[str, str] | None,
tmdb_cast: list[TmdbCastMember] | None,
plugin_meta: tuple[dict[str, str], dict[str, str], list[TmdbCastMember] | None] | None,
) -> tuple[dict[str, str], dict[str, str], list[TmdbCastMember] | None]:
labels = dict(tmdb_labels or {})
art = dict(tmdb_art or {})
cast = tmdb_cast
if plugin_meta is not None:
meta_labels, meta_art, meta_cast = plugin_meta
labels.update({k: str(v) for k, v in dict(meta_labels or {}).items() if v})
art.update({k: str(v) for k, v in dict(meta_art or {}).items() if v})
if meta_cast is not None:
cast = meta_cast
if "title" not in labels:
labels["title"] = title
return labels, art, cast

View File

@@ -4,7 +4,7 @@
from __future__ import annotations from __future__ import annotations
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from typing import Any, Dict, List, Optional, Set, Tuple from typing import Any, Callable, Dict, List, Optional, Set, Tuple
class BasisPlugin(ABC): class BasisPlugin(ABC):
@@ -15,7 +15,11 @@ class BasisPlugin(ABC):
prefer_source_metadata: bool = False prefer_source_metadata: bool = False
@abstractmethod @abstractmethod
async def search_titles(self, query: str) -> List[str]: async def search_titles(
self,
query: str,
progress_callback: Optional[Callable[[str, Optional[int]], Any]] = None,
) -> List[str]:
"""Liefert eine Liste aller Treffer fuer die Suche.""" """Liefert eine Liste aller Treffer fuer die Suche."""
@abstractmethod @abstractmethod

View File

@@ -9,7 +9,7 @@ Zum Verwenden:
from __future__ import annotations from __future__ import annotations
from dataclasses import dataclass from dataclasses import dataclass
from typing import TYPE_CHECKING, Any, List, Optional from typing import TYPE_CHECKING, Any, Callable, List, Optional
try: # pragma: no cover - optional dependency try: # pragma: no cover - optional dependency
import requests import requests
@@ -88,9 +88,13 @@ class TemplatePlugin(BasisPlugin):
self._session = session self._session = session
return self._session return self._session
async def search_titles(self, query: str) -> List[str]: async def search_titles(
self,
query: str,
progress_callback: Optional[Callable[[str, Optional[int]], Any]] = None,
) -> List[str]:
"""TODO: Suche auf der Zielseite implementieren.""" """TODO: Suche auf der Zielseite implementieren."""
_ = query _ = (query, progress_callback)
return [] return []
def seasons_for(self, title: str) -> List[str]: def seasons_for(self, title: str) -> List[str]:

View File

@@ -13,7 +13,8 @@ import hashlib
import json import json
import re import re
import time import time
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple
from urllib.parse import quote
try: # pragma: no cover - optional dependency try: # pragma: no cover - optional dependency
import requests import requests
@@ -69,6 +70,16 @@ HEADERS = {
SESSION_CACHE_TTL_SECONDS = 300 SESSION_CACHE_TTL_SECONDS = 300
SESSION_CACHE_PREFIX = "viewit.aniworld" SESSION_CACHE_PREFIX = "viewit.aniworld"
SESSION_CACHE_MAX_TITLE_URLS = 800 SESSION_CACHE_MAX_TITLE_URLS = 800
ProgressCallback = Optional[Callable[[str, Optional[int]], Any]]
def _emit_progress(callback: ProgressCallback, message: str, percent: Optional[int] = None) -> None:
if not callable(callback):
return
try:
callback(str(message or ""), None if percent is None else int(percent))
except Exception:
return
@dataclass @dataclass
@@ -126,7 +137,7 @@ def _latest_episodes_url() -> str:
def _search_url(query: str) -> str: def _search_url(query: str) -> str:
return f"{_get_base_url()}/search?q={query}" return f"{_get_base_url()}/search?q={quote((query or '').strip())}"
def _search_api_url() -> str: def _search_api_url() -> str:
@@ -289,37 +300,56 @@ def _get_soup(url: str, *, session: Optional[RequestsSession] = None) -> Beautif
_ensure_requests() _ensure_requests()
_log_visit(url) _log_visit(url)
sess = session or get_requests_session("aniworld", headers=HEADERS) sess = session or get_requests_session("aniworld", headers=HEADERS)
response = None
try: try:
response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT) response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT)
response.raise_for_status() response.raise_for_status()
except Exception as exc: except Exception as exc:
_log_error(f"GET {url} failed: {exc}") _log_error(f"GET {url} failed: {exc}")
raise raise
if response.url and response.url != url: try:
_log_url(response.url, kind="REDIRECT") final_url = (response.url or url) if response is not None else url
_log_response_html(url, response.text) body = (response.text or "") if response is not None else ""
if _looks_like_cloudflare_challenge(response.text): if final_url != url:
_log_url(final_url, kind="REDIRECT")
_log_response_html(url, body)
if _looks_like_cloudflare_challenge(body):
raise RuntimeError("Cloudflare-Schutz erkannt. requests reicht ggf. nicht aus.") raise RuntimeError("Cloudflare-Schutz erkannt. requests reicht ggf. nicht aus.")
return BeautifulSoup(response.text, "html.parser") return BeautifulSoup(body, "html.parser")
finally:
if response is not None:
try:
response.close()
except Exception:
pass
def _get_html_simple(url: str) -> str: def _get_html_simple(url: str) -> str:
_ensure_requests() _ensure_requests()
_log_visit(url) _log_visit(url)
sess = get_requests_session("aniworld", headers=HEADERS) sess = get_requests_session("aniworld", headers=HEADERS)
response = None
try: try:
response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT) response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT)
response.raise_for_status() response.raise_for_status()
except Exception as exc: except Exception as exc:
_log_error(f"GET {url} failed: {exc}") _log_error(f"GET {url} failed: {exc}")
raise raise
if response.url and response.url != url: try:
_log_url(response.url, kind="REDIRECT") final_url = (response.url or url) if response is not None else url
body = response.text body = (response.text or "") if response is not None else ""
if final_url != url:
_log_url(final_url, kind="REDIRECT")
_log_response_html(url, body) _log_response_html(url, body)
if _looks_like_cloudflare_challenge(body): if _looks_like_cloudflare_challenge(body):
raise RuntimeError("Cloudflare-Schutz erkannt. requests reicht ggf. nicht aus.") raise RuntimeError("Cloudflare-Schutz erkannt. requests reicht ggf. nicht aus.")
return body return body
finally:
if response is not None:
try:
response.close()
except Exception:
pass
def _get_soup_simple(url: str) -> BeautifulSoupT: def _get_soup_simple(url: str) -> BeautifulSoupT:
@@ -351,17 +381,27 @@ def _post_json(url: str, *, payload: Dict[str, str], session: Optional[RequestsS
_ensure_requests() _ensure_requests()
_log_visit(url) _log_visit(url)
sess = session or get_requests_session("aniworld", headers=HEADERS) sess = session or get_requests_session("aniworld", headers=HEADERS)
response = None
try:
response = sess.post(url, data=payload, headers=HEADERS, timeout=DEFAULT_TIMEOUT) response = sess.post(url, data=payload, headers=HEADERS, timeout=DEFAULT_TIMEOUT)
response.raise_for_status() response.raise_for_status()
if response.url and response.url != url: final_url = (response.url or url) if response is not None else url
_log_url(response.url, kind="REDIRECT") body = (response.text or "") if response is not None else ""
_log_response_html(url, response.text) if final_url != url:
if _looks_like_cloudflare_challenge(response.text): _log_url(final_url, kind="REDIRECT")
_log_response_html(url, body)
if _looks_like_cloudflare_challenge(body):
raise RuntimeError("Cloudflare-Schutz erkannt. requests reicht ggf. nicht aus.") raise RuntimeError("Cloudflare-Schutz erkannt. requests reicht ggf. nicht aus.")
try: try:
return response.json() return response.json()
except Exception: except Exception:
return None return None
finally:
if response is not None:
try:
response.close()
except Exception:
pass
def _extract_canonical_url(soup: BeautifulSoupT, fallback: str) -> str: def _extract_canonical_url(soup: BeautifulSoupT, fallback: str) -> str:
@@ -555,10 +595,18 @@ def resolve_redirect(target_url: str) -> Optional[str]:
_log_visit(normalized_url) _log_visit(normalized_url)
session = get_requests_session("aniworld", headers=HEADERS) session = get_requests_session("aniworld", headers=HEADERS)
_get_soup(_get_base_url(), session=session) _get_soup(_get_base_url(), session=session)
response = None
try:
response = session.get(normalized_url, headers=HEADERS, timeout=DEFAULT_TIMEOUT, allow_redirects=True) response = session.get(normalized_url, headers=HEADERS, timeout=DEFAULT_TIMEOUT, allow_redirects=True)
if response.url: if response.url:
_log_url(response.url, kind="RESOLVED") _log_url(response.url, kind="RESOLVED")
return response.url if response.url else None return response.url if response.url else None
finally:
if response is not None:
try:
response.close()
except Exception:
pass
def fetch_episode_hoster_names(episode_url: str) -> List[str]: def fetch_episode_hoster_names(episode_url: str) -> List[str]:
@@ -629,11 +677,12 @@ def fetch_episode_stream_link(
return resolved return resolved
def search_animes(query: str) -> List[SeriesResult]: def search_animes(query: str, *, progress_callback: ProgressCallback = None) -> List[SeriesResult]:
_ensure_requests() _ensure_requests()
query = (query or "").strip() query = (query or "").strip()
if not query: if not query:
return [] return []
_emit_progress(progress_callback, "AniWorld API-Suche", 15)
session = get_requests_session("aniworld", headers=HEADERS) session = get_requests_session("aniworld", headers=HEADERS)
try: try:
session.get(_get_base_url(), headers=HEADERS, timeout=DEFAULT_TIMEOUT) session.get(_get_base_url(), headers=HEADERS, timeout=DEFAULT_TIMEOUT)
@@ -643,7 +692,9 @@ def search_animes(query: str) -> List[SeriesResult]:
results: List[SeriesResult] = [] results: List[SeriesResult] = []
seen: set[str] = set() seen: set[str] = set()
if isinstance(data, list): if isinstance(data, list):
for entry in data: for idx, entry in enumerate(data, start=1):
if idx == 1 or idx % 50 == 0:
_emit_progress(progress_callback, f"API auswerten {idx}/{len(data)}", 35)
if not isinstance(entry, dict): if not isinstance(entry, dict):
continue continue
title = _strip_html((entry.get("title") or "").strip()) title = _strip_html((entry.get("title") or "").strip())
@@ -665,10 +716,16 @@ def search_animes(query: str) -> List[SeriesResult]:
seen.add(key) seen.add(key)
description = (entry.get("description") or "").strip() description = (entry.get("description") or "").strip()
results.append(SeriesResult(title=title, description=description, url=url)) results.append(SeriesResult(title=title, description=description, url=url))
_emit_progress(progress_callback, f"API-Treffer: {len(results)}", 85)
return results return results
soup = _get_soup_simple(_search_url(requests.utils.quote(query))) _emit_progress(progress_callback, "HTML-Suche (Fallback)", 55)
for anchor in soup.select("a[href^='/anime/stream/'][href]"): soup = _get_soup_simple(_search_url(query))
anchors = soup.select("a[href^='/anime/stream/'][href]")
total_anchors = max(1, len(anchors))
for idx, anchor in enumerate(anchors, start=1):
if idx == 1 or idx % 100 == 0:
_emit_progress(progress_callback, f"HTML auswerten {idx}/{total_anchors}", 70)
href = (anchor.get("href") or "").strip() href = (anchor.get("href") or "").strip()
if not href or "/staffel-" in href or "/episode-" in href: if not href or "/staffel-" in href or "/episode-" in href:
continue continue
@@ -686,6 +743,7 @@ def search_animes(query: str) -> List[SeriesResult]:
continue continue
seen.add(key) seen.add(key)
results.append(SeriesResult(title=title, description="", url=url)) results.append(SeriesResult(title=title, description="", url=url))
_emit_progress(progress_callback, f"HTML-Treffer: {len(results)}", 85)
return results return results
@@ -696,6 +754,7 @@ class AniworldPlugin(BasisPlugin):
def __init__(self) -> None: def __init__(self) -> None:
self._anime_results: Dict[str, SeriesResult] = {} self._anime_results: Dict[str, SeriesResult] = {}
self._title_url_cache: Dict[str, str] = self._load_title_url_cache() self._title_url_cache: Dict[str, str] = self._load_title_url_cache()
self._title_meta: Dict[str, tuple[str, str]] = {}
self._genre_names_cache: Optional[List[str]] = None self._genre_names_cache: Optional[List[str]] = None
self._season_cache: Dict[str, List[SeasonInfo]] = {} self._season_cache: Dict[str, List[SeasonInfo]] = {}
self._season_links_cache: Dict[str, List[SeasonInfo]] = {} self._season_links_cache: Dict[str, List[SeasonInfo]] = {}
@@ -760,8 +819,64 @@ class AniworldPlugin(BasisPlugin):
changed = True changed = True
if changed and persist: if changed and persist:
self._save_title_url_cache() self._save_title_url_cache()
if description:
old_plot, old_poster = self._title_meta.get(title, ("", ""))
self._title_meta[title] = (description.strip() or old_plot, old_poster)
return changed return changed
def _store_title_meta(self, title: str, *, plot: str = "", poster: str = "") -> None:
title = (title or "").strip()
if not title:
return
old_plot, old_poster = self._title_meta.get(title, ("", ""))
merged_plot = (plot or old_plot or "").strip()
merged_poster = (poster or old_poster or "").strip()
self._title_meta[title] = (merged_plot, merged_poster)
def _extract_series_metadata(self, soup: BeautifulSoupT) -> tuple[str, str]:
if not soup:
return "", ""
plot = ""
poster = ""
for selector in ("meta[property='og:description']", "meta[name='description']"):
node = soup.select_one(selector)
if node is None:
continue
content = (node.get("content") or "").strip()
if content:
plot = content
break
if not plot:
for selector in (".series-description", ".seri_des", ".description", "article p"):
node = soup.select_one(selector)
if node is None:
continue
text = (node.get_text(" ", strip=True) or "").strip()
if text:
plot = text
break
for selector in ("meta[property='og:image']", "meta[name='twitter:image']"):
node = soup.select_one(selector)
if node is None:
continue
content = (node.get("content") or "").strip()
if content:
poster = _absolute_url(content)
break
if not poster:
for selector in ("img.seriesCoverBox", ".seriesCoverBox img", "img[alt][src]"):
image = soup.select_one(selector)
if image is None:
continue
value = (image.get("data-src") or image.get("src") or "").strip()
if value:
poster = _absolute_url(value)
break
return plot, poster
@staticmethod @staticmethod
def _season_links_cache_name(series_url: str) -> str: def _season_links_cache_name(series_url: str) -> str:
digest = hashlib.sha1((series_url or "").encode("utf-8")).hexdigest()[:20] digest = hashlib.sha1((series_url or "").encode("utf-8")).hexdigest()[:20]
@@ -893,6 +1008,40 @@ class AniworldPlugin(BasisPlugin):
return None return None
def metadata_for(self, title: str) -> tuple[dict[str, str], dict[str, str], list[object] | None]:
title = (title or "").strip()
if not title:
return {}, {}, None
info: dict[str, str] = {"title": title}
art: dict[str, str] = {}
cached_plot, cached_poster = self._title_meta.get(title, ("", ""))
if cached_plot:
info["plot"] = cached_plot
if cached_poster:
art = {"thumb": cached_poster, "poster": cached_poster}
if "plot" in info and art:
return info, art, None
series = self._find_series_by_title(title)
if series is None or not series.url:
return info, art, None
if series.description and "plot" not in info:
info["plot"] = series.description
try:
soup = _get_soup(series.url, session=get_requests_session("aniworld", headers=HEADERS))
plot, poster = self._extract_series_metadata(soup)
except Exception:
plot, poster = "", ""
if plot:
info["plot"] = plot
if poster:
art = {"thumb": poster, "poster": poster}
self._store_title_meta(title, plot=info.get("plot", ""), poster=poster)
return info, art, None
def _ensure_popular(self) -> List[SeriesResult]: def _ensure_popular(self) -> List[SeriesResult]:
if self._popular_cache is not None: if self._popular_cache is not None:
return list(self._popular_cache) return list(self._popular_cache)
@@ -1151,7 +1300,7 @@ class AniworldPlugin(BasisPlugin):
return self._episode_label_cache.get(cache_key, {}).get(episode_label) return self._episode_label_cache.get(cache_key, {}).get(episode_label)
return None return None
async def search_titles(self, query: str) -> List[str]: async def search_titles(self, query: str, progress_callback: ProgressCallback = None) -> List[str]:
query = (query or "").strip() query = (query or "").strip()
if not query: if not query:
self._anime_results.clear() self._anime_results.clear()
@@ -1163,7 +1312,8 @@ class AniworldPlugin(BasisPlugin):
if not self._requests_available: if not self._requests_available:
raise RuntimeError("AniworldPlugin kann ohne requests/bs4 nicht suchen.") raise RuntimeError("AniworldPlugin kann ohne requests/bs4 nicht suchen.")
try: try:
results = search_animes(query) _emit_progress(progress_callback, "AniWorld Suche startet", 10)
results = search_animes(query, progress_callback=progress_callback)
except Exception as exc: # pragma: no cover except Exception as exc: # pragma: no cover
self._anime_results.clear() self._anime_results.clear()
self._season_cache.clear() self._season_cache.clear()
@@ -1178,6 +1328,7 @@ class AniworldPlugin(BasisPlugin):
self._season_cache.clear() self._season_cache.clear()
self._season_links_cache.clear() self._season_links_cache.clear()
self._episode_label_cache.clear() self._episode_label_cache.clear()
_emit_progress(progress_callback, f"Treffer aufbereitet: {len(results)}", 95)
return [result.title for result in results] return [result.title for result in results]
def _ensure_seasons(self, title: str) -> List[SeasonInfo]: def _ensure_seasons(self, title: str) -> List[SeasonInfo]:

View File

@@ -5,7 +5,7 @@ from __future__ import annotations
from dataclasses import dataclass from dataclasses import dataclass
import re import re
from urllib.parse import quote from urllib.parse import quote
from typing import TYPE_CHECKING, Any, Dict, List, Optional from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional
try: # pragma: no cover - optional dependency try: # pragma: no cover - optional dependency
import requests import requests
@@ -44,6 +44,16 @@ SETTING_LOG_URLS = "log_urls_dokustreams"
SETTING_DUMP_HTML = "dump_html_dokustreams" SETTING_DUMP_HTML = "dump_html_dokustreams"
SETTING_SHOW_URL_INFO = "show_url_info_dokustreams" SETTING_SHOW_URL_INFO = "show_url_info_dokustreams"
SETTING_LOG_ERRORS = "log_errors_dokustreams" SETTING_LOG_ERRORS = "log_errors_dokustreams"
ProgressCallback = Optional[Callable[[str, Optional[int]], Any]]
def _emit_progress(callback: ProgressCallback, message: str, percent: Optional[int] = None) -> None:
if not callable(callback):
return
try:
callback(str(message or ""), None if percent is None else int(percent))
except Exception:
return
HEADERS = { HEADERS = {
"User-Agent": "Mozilla/5.0 (Kodi; ViewIt) AppleWebKit/537.36 (KHTML, like Gecko)", "User-Agent": "Mozilla/5.0 (Kodi; ViewIt) AppleWebKit/537.36 (KHTML, like Gecko)",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
@@ -213,16 +223,26 @@ def _get_soup(url: str, *, session: Optional[RequestsSession] = None) -> Beautif
raise RuntimeError("requests/bs4 sind nicht verfuegbar.") raise RuntimeError("requests/bs4 sind nicht verfuegbar.")
_log_visit(url) _log_visit(url)
sess = session or get_requests_session("dokustreams", headers=HEADERS) sess = session or get_requests_session("dokustreams", headers=HEADERS)
response = None
try: try:
response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT) response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT)
response.raise_for_status() response.raise_for_status()
except Exception as exc: except Exception as exc:
_log_error_message(f"GET {url} failed: {exc}") _log_error_message(f"GET {url} failed: {exc}")
raise raise
if response.url and response.url != url: try:
_log_url_event(response.url, kind="REDIRECT") final_url = (response.url or url) if response is not None else url
_log_response_html(url, response.text) body = (response.text or "") if response is not None else ""
return BeautifulSoup(response.text, "html.parser") if final_url != url:
_log_url_event(final_url, kind="REDIRECT")
_log_response_html(url, body)
return BeautifulSoup(body, "html.parser")
finally:
if response is not None:
try:
response.close()
except Exception:
pass
class DokuStreamsPlugin(BasisPlugin): class DokuStreamsPlugin(BasisPlugin):
@@ -247,14 +267,17 @@ class DokuStreamsPlugin(BasisPlugin):
if REQUESTS_IMPORT_ERROR: if REQUESTS_IMPORT_ERROR:
print(f"DokuStreamsPlugin Importfehler: {REQUESTS_IMPORT_ERROR}") print(f"DokuStreamsPlugin Importfehler: {REQUESTS_IMPORT_ERROR}")
async def search_titles(self, query: str) -> List[str]: async def search_titles(self, query: str, progress_callback: ProgressCallback = None) -> List[str]:
_emit_progress(progress_callback, "Doku-Streams Suche", 15)
hits = self._search_hits(query) hits = self._search_hits(query)
_emit_progress(progress_callback, f"Treffer verarbeiten ({len(hits)})", 70)
self._title_to_url = {hit.title: hit.url for hit in hits if hit.title and hit.url} self._title_to_url = {hit.title: hit.url for hit in hits if hit.title and hit.url}
for hit in hits: for hit in hits:
if hit.title: if hit.title:
self._title_meta[hit.title] = (hit.plot, hit.poster) self._title_meta[hit.title] = (hit.plot, hit.poster)
titles = [hit.title for hit in hits if hit.title] titles = [hit.title for hit in hits if hit.title]
titles.sort(key=lambda value: value.casefold()) titles.sort(key=lambda value: value.casefold())
_emit_progress(progress_callback, f"Fertig: {len(titles)} Treffer", 95)
return titles return titles
def _search_hits(self, query: str) -> List[SearchHit]: def _search_hits(self, query: str) -> List[SearchHit]:

View File

@@ -11,7 +11,7 @@ from __future__ import annotations
import json import json
import re import re
from dataclasses import dataclass from dataclasses import dataclass
from typing import Any, Dict, List, Optional, Set from typing import Any, Callable, Dict, List, Optional, Set
from urllib.parse import urlencode, urljoin, urlsplit from urllib.parse import urlencode, urljoin, urlsplit
try: # pragma: no cover - optional dependency (Kodi dependency) try: # pragma: no cover - optional dependency (Kodi dependency)
@@ -56,6 +56,16 @@ HEADERS = {
"Accept-Language": "de-DE,de;q=0.9,en;q=0.8", "Accept-Language": "de-DE,de;q=0.9,en;q=0.8",
"Connection": "keep-alive", "Connection": "keep-alive",
} }
ProgressCallback = Optional[Callable[[str, Optional[int]], Any]]
def _emit_progress(callback: ProgressCallback, message: str, percent: Optional[int] = None) -> None:
if not callable(callback):
return
try:
callback(str(message or ""), None if percent is None else int(percent))
except Exception:
return
@dataclass(frozen=True) @dataclass(frozen=True)
@@ -526,6 +536,34 @@ class EinschaltenPlugin(BasisPlugin):
self._session = requests.Session() self._session = requests.Session()
return self._session return self._session
def _http_get_text(self, url: str, *, timeout: int = 20) -> tuple[str, str]:
_log_url(url, kind="GET")
_notify_url(url)
sess = self._get_session()
response = None
try:
response = sess.get(url, headers=HEADERS, timeout=timeout)
response.raise_for_status()
final_url = (response.url or url) if response is not None else url
body = (response.text or "") if response is not None else ""
_log_url(final_url, kind="OK")
_log_response_html(final_url, body)
return final_url, body
finally:
if response is not None:
try:
response.close()
except Exception:
pass
def _http_get_json(self, url: str, *, timeout: int = 20) -> tuple[str, Any]:
final_url, body = self._http_get_text(url, timeout=timeout)
try:
payload = json.loads(body or "{}")
except Exception:
payload = {}
return final_url, payload
def _get_base_url(self) -> str: def _get_base_url(self) -> str:
base = _get_setting_text(SETTING_BASE_URL, default=DEFAULT_BASE_URL).strip() base = _get_setting_text(SETTING_BASE_URL, default=DEFAULT_BASE_URL).strip()
return base.rstrip("/") return base.rstrip("/")
@@ -646,15 +684,9 @@ class EinschaltenPlugin(BasisPlugin):
if not url: if not url:
return "" return ""
try: try:
_log_url(url, kind="GET") _, body = self._http_get_text(url, timeout=20)
_notify_url(url) self._detail_html_by_id[movie_id] = body
sess = self._get_session() return body
resp = sess.get(url, headers=HEADERS, timeout=20)
resp.raise_for_status()
_log_url(resp.url or url, kind="OK")
_log_response_html(resp.url or url, resp.text)
self._detail_html_by_id[movie_id] = resp.text or ""
return resp.text or ""
except Exception as exc: except Exception as exc:
_log_error(f"GET {url} failed: {exc}") _log_error(f"GET {url} failed: {exc}")
return "" return ""
@@ -667,16 +699,8 @@ class EinschaltenPlugin(BasisPlugin):
if not url: if not url:
return {} return {}
try: try:
_log_url(url, kind="GET") _, data = self._http_get_json(url, timeout=20)
_notify_url(url) return data
sess = self._get_session()
resp = sess.get(url, headers=HEADERS, timeout=20)
resp.raise_for_status()
_log_url(resp.url or url, kind="OK")
# Some backends may return JSON with a JSON content-type; for debugging we still dump text.
_log_response_html(resp.url or url, resp.text)
data = resp.json()
return dict(data) if isinstance(data, dict) else {}
except Exception as exc: except Exception as exc:
_log_error(f"GET {url} failed: {exc}") _log_error(f"GET {url} failed: {exc}")
return {} return {}
@@ -741,14 +765,8 @@ class EinschaltenPlugin(BasisPlugin):
if not url: if not url:
return [] return []
try: try:
_log_url(url, kind="GET") _, body = self._http_get_text(url, timeout=20)
_notify_url(url) payload = _extract_ng_state_payload(body)
sess = self._get_session()
resp = sess.get(url, headers=HEADERS, timeout=20)
resp.raise_for_status()
_log_url(resp.url or url, kind="OK")
_log_response_html(resp.url or url, resp.text)
payload = _extract_ng_state_payload(resp.text)
return _parse_ng_state_movies(payload) return _parse_ng_state_movies(payload)
except Exception: except Exception:
return [] return []
@@ -759,14 +777,8 @@ class EinschaltenPlugin(BasisPlugin):
if not url: if not url:
return [] return []
try: try:
_log_url(url, kind="GET") _, body = self._http_get_text(url, timeout=20)
_notify_url(url) payload = _extract_ng_state_payload(body)
sess = self._get_session()
resp = sess.get(url, headers=HEADERS, timeout=20)
resp.raise_for_status()
_log_url(resp.url or url, kind="OK")
_log_response_html(resp.url or url, resp.text)
payload = _extract_ng_state_payload(resp.text)
movies = _parse_ng_state_movies(payload) movies = _parse_ng_state_movies(payload)
_log_debug_line(f"parse_ng_state_movies:count={len(movies)}") _log_debug_line(f"parse_ng_state_movies:count={len(movies)}")
if movies: if movies:
@@ -784,14 +796,8 @@ class EinschaltenPlugin(BasisPlugin):
if page > 1: if page > 1:
url = f"{url}?{urlencode({'page': str(page)})}" url = f"{url}?{urlencode({'page': str(page)})}"
try: try:
_log_url(url, kind="GET") _, body = self._http_get_text(url, timeout=20)
_notify_url(url) payload = _extract_ng_state_payload(body)
sess = self._get_session()
resp = sess.get(url, headers=HEADERS, timeout=20)
resp.raise_for_status()
_log_url(resp.url or url, kind="OK")
_log_response_html(resp.url or url, resp.text)
payload = _extract_ng_state_payload(resp.text)
movies, has_more, current_page = _parse_ng_state_movies_with_pagination(payload) movies, has_more, current_page = _parse_ng_state_movies_with_pagination(payload)
_log_debug_line(f"parse_ng_state_movies_page:page={page} count={len(movies)}") _log_debug_line(f"parse_ng_state_movies_page:page={page} count={len(movies)}")
if has_more is not None: if has_more is not None:
@@ -844,14 +850,8 @@ class EinschaltenPlugin(BasisPlugin):
if not url: if not url:
return [] return []
try: try:
_log_url(url, kind="GET") _, body = self._http_get_text(url, timeout=20)
_notify_url(url) payload = _extract_ng_state_payload(body)
sess = self._get_session()
resp = sess.get(url, headers=HEADERS, timeout=20)
resp.raise_for_status()
_log_url(resp.url or url, kind="OK")
_log_response_html(resp.url or url, resp.text)
payload = _extract_ng_state_payload(resp.text)
results = _parse_ng_state_search_results(payload) results = _parse_ng_state_search_results(payload)
return _filter_movies_by_title(query, results) return _filter_movies_by_title(query, results)
except Exception: except Exception:
@@ -867,13 +867,7 @@ class EinschaltenPlugin(BasisPlugin):
api_url = self._api_genres_url() api_url = self._api_genres_url()
if api_url: if api_url:
try: try:
_log_url(api_url, kind="GET") _, payload = self._http_get_json(api_url, timeout=20)
_notify_url(api_url)
sess = self._get_session()
resp = sess.get(api_url, headers=HEADERS, timeout=20)
resp.raise_for_status()
_log_url(resp.url or api_url, kind="OK")
payload = resp.json()
if isinstance(payload, list): if isinstance(payload, list):
parsed: Dict[str, int] = {} parsed: Dict[str, int] = {}
for item in payload: for item in payload:
@@ -900,14 +894,8 @@ class EinschaltenPlugin(BasisPlugin):
if not url: if not url:
return return
try: try:
_log_url(url, kind="GET") _, body = self._http_get_text(url, timeout=20)
_notify_url(url) payload = _extract_ng_state_payload(body)
sess = self._get_session()
resp = sess.get(url, headers=HEADERS, timeout=20)
resp.raise_for_status()
_log_url(resp.url or url, kind="OK")
_log_response_html(resp.url or url, resp.text)
payload = _extract_ng_state_payload(resp.text)
parsed = _parse_ng_state_genres(payload) parsed = _parse_ng_state_genres(payload)
if parsed: if parsed:
self._genre_id_by_name.clear() self._genre_id_by_name.clear()
@@ -915,7 +903,7 @@ class EinschaltenPlugin(BasisPlugin):
except Exception: except Exception:
return return
async def search_titles(self, query: str) -> List[str]: async def search_titles(self, query: str, progress_callback: ProgressCallback = None) -> List[str]:
if not REQUESTS_AVAILABLE: if not REQUESTS_AVAILABLE:
return [] return []
query = (query or "").strip() query = (query or "").strip()
@@ -924,9 +912,12 @@ class EinschaltenPlugin(BasisPlugin):
if not self._get_base_url(): if not self._get_base_url():
return [] return []
_emit_progress(progress_callback, "Einschalten Suche", 15)
movies = self._fetch_search_movies(query) movies = self._fetch_search_movies(query)
if not movies: if not movies:
_emit_progress(progress_callback, "Fallback: Index filtern", 45)
movies = _filter_movies_by_title(query, self._load_movies()) movies = _filter_movies_by_title(query, self._load_movies())
_emit_progress(progress_callback, f"Treffer verarbeiten ({len(movies)})", 75)
titles: List[str] = [] titles: List[str] = []
seen: set[str] = set() seen: set[str] = set()
for movie in movies: for movie in movies:
@@ -936,6 +927,7 @@ class EinschaltenPlugin(BasisPlugin):
self._id_by_title[movie.title] = movie.id self._id_by_title[movie.title] = movie.id
titles.append(movie.title) titles.append(movie.title)
titles.sort(key=lambda value: value.casefold()) titles.sort(key=lambda value: value.casefold())
_emit_progress(progress_callback, f"Fertig: {len(titles)} Treffer", 95)
return titles return titles
def genres(self) -> List[str]: def genres(self) -> List[str]:
@@ -971,14 +963,8 @@ class EinschaltenPlugin(BasisPlugin):
if not url: if not url:
return [] return []
try: try:
_log_url(url, kind="GET") _, body = self._http_get_text(url, timeout=20)
_notify_url(url) payload = _extract_ng_state_payload(body)
sess = self._get_session()
resp = sess.get(url, headers=HEADERS, timeout=20)
resp.raise_for_status()
_log_url(resp.url or url, kind="OK")
_log_response_html(resp.url or url, resp.text)
payload = _extract_ng_state_payload(resp.text)
except Exception: except Exception:
return [] return []
if not isinstance(payload, dict): if not isinstance(payload, dict):

View File

@@ -11,7 +11,7 @@ from dataclasses import dataclass
import re import re
from urllib.parse import quote, urlencode from urllib.parse import quote, urlencode
from urllib.parse import urljoin from urllib.parse import urljoin
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple
try: # pragma: no cover - optional dependency try: # pragma: no cover - optional dependency
import requests import requests
@@ -53,6 +53,16 @@ SETTING_LOG_URLS = "log_urls_filmpalast"
SETTING_DUMP_HTML = "dump_html_filmpalast" SETTING_DUMP_HTML = "dump_html_filmpalast"
SETTING_SHOW_URL_INFO = "show_url_info_filmpalast" SETTING_SHOW_URL_INFO = "show_url_info_filmpalast"
SETTING_LOG_ERRORS = "log_errors_filmpalast" SETTING_LOG_ERRORS = "log_errors_filmpalast"
ProgressCallback = Optional[Callable[[str, Optional[int]], Any]]
def _emit_progress(callback: ProgressCallback, message: str, percent: Optional[int] = None) -> None:
if not callable(callback):
return
try:
callback(str(message or ""), None if percent is None else int(percent))
except Exception:
return
HEADERS = { HEADERS = {
"User-Agent": "Mozilla/5.0 (Kodi; ViewIt) AppleWebKit/537.36 (KHTML, like Gecko)", "User-Agent": "Mozilla/5.0 (Kodi; ViewIt) AppleWebKit/537.36 (KHTML, like Gecko)",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
@@ -206,16 +216,26 @@ def _get_soup(url: str, *, session: Optional[RequestsSession] = None) -> Beautif
raise RuntimeError("requests/bs4 sind nicht verfuegbar.") raise RuntimeError("requests/bs4 sind nicht verfuegbar.")
_log_visit(url) _log_visit(url)
sess = session or get_requests_session("filmpalast", headers=HEADERS) sess = session or get_requests_session("filmpalast", headers=HEADERS)
response = None
try: try:
response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT) response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT)
response.raise_for_status() response.raise_for_status()
except Exception as exc: except Exception as exc:
_log_error_message(f"GET {url} failed: {exc}") _log_error_message(f"GET {url} failed: {exc}")
raise raise
if response.url and response.url != url: try:
_log_url_event(response.url, kind="REDIRECT") final_url = (response.url or url) if response is not None else url
_log_response_html(url, response.text) body = (response.text or "") if response is not None else ""
return BeautifulSoup(response.text, "html.parser") if final_url != url:
_log_url_event(final_url, kind="REDIRECT")
_log_response_html(url, body)
return BeautifulSoup(body, "html.parser")
finally:
if response is not None:
try:
response.close()
except Exception:
pass
class FilmpalastPlugin(BasisPlugin): class FilmpalastPlugin(BasisPlugin):
@@ -224,6 +244,7 @@ class FilmpalastPlugin(BasisPlugin):
def __init__(self) -> None: def __init__(self) -> None:
self._title_to_url: Dict[str, str] = {} self._title_to_url: Dict[str, str] = {}
self._title_meta: Dict[str, tuple[str, str]] = {}
self._series_entries: Dict[str, Dict[int, Dict[int, EpisodeEntry]]] = {} self._series_entries: Dict[str, Dict[int, Dict[int, EpisodeEntry]]] = {}
self._hoster_cache: Dict[str, Dict[str, str]] = {} self._hoster_cache: Dict[str, Dict[str, str]] = {}
self._genre_to_url: Dict[str, str] = {} self._genre_to_url: Dict[str, str] = {}
@@ -352,6 +373,7 @@ class FilmpalastPlugin(BasisPlugin):
seen_titles: set[str] = set() seen_titles: set[str] = set()
seen_urls: set[str] = set() seen_urls: set[str] = set()
for base_url, params in search_requests: for base_url, params in search_requests:
response = None
try: try:
request_url = base_url if not params else f"{base_url}?{urlencode(params)}" request_url = base_url if not params else f"{base_url}?{urlencode(params)}"
_log_url_event(request_url, kind="GET") _log_url_event(request_url, kind="GET")
@@ -365,6 +387,12 @@ class FilmpalastPlugin(BasisPlugin):
except Exception as exc: except Exception as exc:
_log_error_message(f"search request failed ({base_url}): {exc}") _log_error_message(f"search request failed ({base_url}): {exc}")
continue continue
finally:
if response is not None:
try:
response.close()
except Exception:
pass
anchors = soup.select("article.liste h2 a[href], article.liste h3 a[href]") anchors = soup.select("article.liste h2 a[href], article.liste h3 a[href]")
if not anchors: if not anchors:
@@ -466,9 +494,13 @@ class FilmpalastPlugin(BasisPlugin):
titles.sort(key=lambda value: value.casefold()) titles.sort(key=lambda value: value.casefold())
return titles return titles
async def search_titles(self, query: str) -> List[str]: async def search_titles(self, query: str, progress_callback: ProgressCallback = None) -> List[str]:
_emit_progress(progress_callback, "Filmpalast Suche", 15)
hits = self._search_hits(query) hits = self._search_hits(query)
return self._apply_hits_to_title_index(hits) _emit_progress(progress_callback, f"Treffer verarbeiten ({len(hits)})", 70)
titles = self._apply_hits_to_title_index(hits)
_emit_progress(progress_callback, f"Fertig: {len(titles)} Treffer", 95)
return titles
def _parse_genres(self, soup: BeautifulSoupT) -> Dict[str, str]: def _parse_genres(self, soup: BeautifulSoupT) -> Dict[str, str]:
genres: Dict[str, str] = {} genres: Dict[str, str] = {}
@@ -691,6 +723,59 @@ class FilmpalastPlugin(BasisPlugin):
return hit.url return hit.url
return "" return ""
def _store_title_meta(self, title: str, *, plot: str = "", poster: str = "") -> None:
title = (title or "").strip()
if not title:
return
old_plot, old_poster = self._title_meta.get(title, ("", ""))
merged_plot = (plot or old_plot or "").strip()
merged_poster = (poster or old_poster or "").strip()
self._title_meta[title] = (merged_plot, merged_poster)
def _extract_detail_metadata(self, soup: BeautifulSoupT) -> tuple[str, str]:
if not soup:
return "", ""
plot = ""
poster = ""
for selector in ("meta[property='og:description']", "meta[name='description']"):
node = soup.select_one(selector)
if node is None:
continue
content = (node.get("content") or "").strip()
if content:
plot = content
break
if not plot:
for selector in (".toggle-content .coverDetails", ".entry-content p", "article p"):
node = soup.select_one(selector)
if node is None:
continue
text = (node.get_text(" ", strip=True) or "").strip()
if text and len(text) > 40:
plot = text
break
for selector in ("meta[property='og:image']", "meta[name='twitter:image']"):
node = soup.select_one(selector)
if node is None:
continue
content = (node.get("content") or "").strip()
if content:
poster = _absolute_url(content)
break
if not poster:
for selector in ("img.cover", "article img", ".entry-content img"):
image = soup.select_one(selector)
if image is None:
continue
value = (image.get("data-src") or image.get("src") or "").strip()
if value:
poster = _absolute_url(value)
break
return plot, poster
def remember_series_url(self, title: str, series_url: str) -> None: def remember_series_url(self, title: str, series_url: str) -> None:
title = (title or "").strip() title = (title or "").strip()
series_url = (series_url or "").strip() series_url = (series_url or "").strip()
@@ -711,6 +796,52 @@ class FilmpalastPlugin(BasisPlugin):
return _series_hint_value(series_key) return _series_hint_value(series_key)
return "" return ""
def metadata_for(self, title: str) -> tuple[dict[str, str], dict[str, str], list[object] | None]:
title = (title or "").strip()
if not title:
return {}, {}, None
info: dict[str, str] = {"title": title}
art: dict[str, str] = {}
cached_plot, cached_poster = self._title_meta.get(title, ("", ""))
if cached_plot:
info["plot"] = cached_plot
if cached_poster:
art = {"thumb": cached_poster, "poster": cached_poster}
if "plot" in info and art:
return info, art, None
detail_url = self._ensure_title_url(title)
if not detail_url:
series_key = self._series_key_for_title(title) or self._ensure_series_entries_for_title(title)
if series_key:
seasons = self._series_entries.get(series_key, {})
first_entry: Optional[EpisodeEntry] = None
for season_number in sorted(seasons.keys()):
episodes = seasons.get(season_number, {})
for episode_number in sorted(episodes.keys()):
first_entry = episodes.get(episode_number)
if first_entry is not None:
break
if first_entry is not None:
break
detail_url = first_entry.url if first_entry is not None else ""
if not detail_url:
return info, art, None
try:
soup = _get_soup(detail_url, session=get_requests_session("filmpalast", headers=HEADERS))
plot, poster = self._extract_detail_metadata(soup)
except Exception:
plot, poster = "", ""
if plot:
info["plot"] = plot
if poster:
art = {"thumb": poster, "poster": poster}
self._store_title_meta(title, plot=info.get("plot", ""), poster=poster)
return info, art, None
def is_movie(self, title: str) -> bool: def is_movie(self, title: str) -> bool:
title = (title or "").strip() title = (title or "").strip()
if not title: if not title:
@@ -913,6 +1044,7 @@ class FilmpalastPlugin(BasisPlugin):
redirected = link redirected = link
if self._requests_available: if self._requests_available:
response = None
try: try:
session = get_requests_session("filmpalast", headers=HEADERS) session = get_requests_session("filmpalast", headers=HEADERS)
response = session.get(link, headers=HEADERS, timeout=DEFAULT_TIMEOUT, allow_redirects=True) response = session.get(link, headers=HEADERS, timeout=DEFAULT_TIMEOUT, allow_redirects=True)
@@ -920,6 +1052,12 @@ class FilmpalastPlugin(BasisPlugin):
redirected = (response.url or link).strip() or link redirected = (response.url or link).strip() or link
except Exception: except Exception:
redirected = link redirected = link
finally:
if response is not None:
try:
response.close()
except Exception:
pass
# 2) Danach optional die Redirect-URL nochmals auflösen. # 2) Danach optional die Redirect-URL nochmals auflösen.
if callable(resolve_with_resolveurl) and redirected and redirected != link: if callable(resolve_with_resolveurl) and redirected and redirected != link:

View File

@@ -17,7 +17,7 @@ import os
import re import re
import time import time
import unicodedata import unicodedata
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple
from urllib.parse import quote from urllib.parse import quote
try: # pragma: no cover - optional dependency try: # pragma: no cover - optional dependency
@@ -80,6 +80,16 @@ SESSION_CACHE_MAX_TITLE_URLS = 800
CATALOG_SEARCH_TTL_SECONDS = 600 CATALOG_SEARCH_TTL_SECONDS = 600
CATALOG_SEARCH_CACHE_KEY = "catalog_index" CATALOG_SEARCH_CACHE_KEY = "catalog_index"
_CATALOG_INDEX_MEMORY: tuple[float, List["SeriesResult"]] = (0.0, []) _CATALOG_INDEX_MEMORY: tuple[float, List["SeriesResult"]] = (0.0, [])
ProgressCallback = Optional[Callable[[str, Optional[int]], Any]]
def _emit_progress(callback: ProgressCallback, message: str, percent: Optional[int] = None) -> None:
if not callable(callback):
return
try:
callback(str(message or ""), None if percent is None else int(percent))
except Exception:
return
@dataclass @dataclass
@@ -398,37 +408,56 @@ def _get_soup(url: str, *, session: Optional[RequestsSession] = None) -> Beautif
_ensure_requests() _ensure_requests()
_log_visit(url) _log_visit(url)
sess = session or get_requests_session("serienstream", headers=HEADERS) sess = session or get_requests_session("serienstream", headers=HEADERS)
response = None
try: try:
response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT) response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT)
response.raise_for_status() response.raise_for_status()
except Exception as exc: except Exception as exc:
_log_error(f"GET {url} failed: {exc}") _log_error(f"GET {url} failed: {exc}")
raise raise
if response.url and response.url != url: try:
_log_url(response.url, kind="REDIRECT") final_url = (response.url or url) if response is not None else url
_log_response_html(url, response.text) body = (response.text or "") if response is not None else ""
if _looks_like_cloudflare_challenge(response.text): if final_url != url:
_log_url(final_url, kind="REDIRECT")
_log_response_html(url, body)
if _looks_like_cloudflare_challenge(body):
raise RuntimeError("Cloudflare-Schutz erkannt. requests reicht ggf. nicht aus.") raise RuntimeError("Cloudflare-Schutz erkannt. requests reicht ggf. nicht aus.")
return BeautifulSoup(response.text, "html.parser") return BeautifulSoup(body, "html.parser")
finally:
if response is not None:
try:
response.close()
except Exception:
pass
def _get_html_simple(url: str) -> str: def _get_html_simple(url: str) -> str:
_ensure_requests() _ensure_requests()
_log_visit(url) _log_visit(url)
sess = get_requests_session("serienstream", headers=HEADERS) sess = get_requests_session("serienstream", headers=HEADERS)
response = None
try: try:
response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT) response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT)
response.raise_for_status() response.raise_for_status()
except Exception as exc: except Exception as exc:
_log_error(f"GET {url} failed: {exc}") _log_error(f"GET {url} failed: {exc}")
raise raise
if response.url and response.url != url: try:
_log_url(response.url, kind="REDIRECT") final_url = (response.url or url) if response is not None else url
body = response.text body = (response.text or "") if response is not None else ""
if final_url != url:
_log_url(final_url, kind="REDIRECT")
_log_response_html(url, body) _log_response_html(url, body)
if _looks_like_cloudflare_challenge(body): if _looks_like_cloudflare_challenge(body):
raise RuntimeError("Cloudflare-Schutz erkannt. requests reicht ggf. nicht aus.") raise RuntimeError("Cloudflare-Schutz erkannt. requests reicht ggf. nicht aus.")
return body return body
finally:
if response is not None:
try:
response.close()
except Exception:
pass
def _get_soup_simple(url: str) -> BeautifulSoupT: def _get_soup_simple(url: str) -> BeautifulSoupT:
@@ -472,6 +501,7 @@ def _search_series_api(query: str) -> List[SeriesResult]:
terms.extend([token for token in query.split() if token]) terms.extend([token for token in query.split() if token])
seen_urls: set[str] = set() seen_urls: set[str] = set()
for term in terms: for term in terms:
response = None
try: try:
response = sess.get( response = sess.get(
f"{_get_base_url()}/api/search/suggest", f"{_get_base_url()}/api/search/suggest",
@@ -486,6 +516,12 @@ def _search_series_api(query: str) -> List[SeriesResult]:
payload = response.json() payload = response.json()
except Exception: except Exception:
continue continue
finally:
if response is not None:
try:
response.close()
except Exception:
pass
shows = payload.get("shows") if isinstance(payload, dict) else None shows = payload.get("shows") if isinstance(payload, dict) else None
if not isinstance(shows, list): if not isinstance(shows, list):
continue continue
@@ -558,7 +594,7 @@ def _search_series_server(query: str) -> List[SeriesResult]:
return [] return []
def _extract_catalog_index_from_html(body: str) -> List[SeriesResult]: def _extract_catalog_index_from_html(body: str, *, progress_callback: ProgressCallback = None) -> List[SeriesResult]:
items: List[SeriesResult] = [] items: List[SeriesResult] = []
if not body: if not body:
return items return items
@@ -569,7 +605,9 @@ def _extract_catalog_index_from_html(body: str) -> List[SeriesResult]:
) )
anchor_re = re.compile(r"<a[^>]+href=[\"']([^\"']+)[\"'][^>]*>(.*?)</a>", re.IGNORECASE | re.DOTALL) anchor_re = re.compile(r"<a[^>]+href=[\"']([^\"']+)[\"'][^>]*>(.*?)</a>", re.IGNORECASE | re.DOTALL)
data_search_re = re.compile(r"data-search=[\"']([^\"']*)[\"']", re.IGNORECASE) data_search_re = re.compile(r"data-search=[\"']([^\"']*)[\"']", re.IGNORECASE)
for match in item_re.finditer(body): for idx, match in enumerate(item_re.finditer(body), start=1):
if idx == 1 or idx % 200 == 0:
_emit_progress(progress_callback, f"Katalog parsen {idx}", 62)
block = match.group(0) block = match.group(0)
inner = match.group(1) or "" inner = match.group(1) or ""
anchor_match = anchor_re.search(inner) anchor_match = anchor_re.search(inner)
@@ -651,26 +689,33 @@ def _store_catalog_index_in_cache(items: List[SeriesResult]) -> None:
_session_cache_set(CATALOG_SEARCH_CACHE_KEY, payload, ttl_seconds=CATALOG_SEARCH_TTL_SECONDS) _session_cache_set(CATALOG_SEARCH_CACHE_KEY, payload, ttl_seconds=CATALOG_SEARCH_TTL_SECONDS)
def search_series(query: str) -> List[SeriesResult]: def search_series(query: str, *, progress_callback: ProgressCallback = None) -> List[SeriesResult]:
"""Sucht Serien im (/serien)-Katalog nach Titel. Nutzt Cache + Ein-Pass-Filter.""" """Sucht Serien im (/serien)-Katalog nach Titel. Nutzt Cache + Ein-Pass-Filter."""
_ensure_requests() _ensure_requests()
if not _normalize_search_text(query): if not _normalize_search_text(query):
return [] return []
_emit_progress(progress_callback, "Server-Suche", 15)
server_results = _search_series_server(query) server_results = _search_series_server(query)
if server_results: if server_results:
_emit_progress(progress_callback, f"Server-Treffer: {len(server_results)}", 35)
return [entry for entry in server_results if entry.title and _matches_query(query, title=entry.title)] return [entry for entry in server_results if entry.title and _matches_query(query, title=entry.title)]
_emit_progress(progress_callback, "Pruefe Such-Cache", 42)
cached = _load_catalog_index_from_cache() cached = _load_catalog_index_from_cache()
if cached is not None: if cached is not None:
_emit_progress(progress_callback, f"Cache-Treffer: {len(cached)}", 52)
return [entry for entry in cached if entry.title and _matches_query(query, title=entry.title)] return [entry for entry in cached if entry.title and _matches_query(query, title=entry.title)]
_emit_progress(progress_callback, "Lade Katalogseite", 58)
catalog_url = f"{_get_base_url()}/serien?by=genre" catalog_url = f"{_get_base_url()}/serien?by=genre"
body = _get_html_simple(catalog_url) body = _get_html_simple(catalog_url)
items = _extract_catalog_index_from_html(body) items = _extract_catalog_index_from_html(body, progress_callback=progress_callback)
if not items: if not items:
_emit_progress(progress_callback, "Fallback-Parser", 70)
soup = BeautifulSoup(body, "html.parser") soup = BeautifulSoup(body, "html.parser")
items = _catalog_index_from_soup(soup) items = _catalog_index_from_soup(soup)
if items: if items:
_store_catalog_index_in_cache(items) _store_catalog_index_in_cache(items)
_emit_progress(progress_callback, f"Filtere Treffer ({len(items)})", 85)
return [entry for entry in items if entry.title and _matches_query(query, title=entry.title)] return [entry for entry in items if entry.title and _matches_query(query, title=entry.title)]
@@ -989,6 +1034,8 @@ def resolve_redirect(target_url: str) -> Optional[str]:
_get_soup(_get_base_url(), session=session) _get_soup(_get_base_url(), session=session)
except Exception: except Exception:
pass pass
response = None
try:
response = session.get( response = session.get(
normalized_url, normalized_url,
headers=HEADERS, headers=HEADERS,
@@ -998,6 +1045,12 @@ def resolve_redirect(target_url: str) -> Optional[str]:
if response.url: if response.url:
_log_url(response.url, kind="RESOLVED") _log_url(response.url, kind="RESOLVED")
return response.url if response.url else None return response.url if response.url else None
finally:
if response is not None:
try:
response.close()
except Exception:
pass
def scrape_series_detail( def scrape_series_detail(
@@ -1681,7 +1734,7 @@ class SerienstreamPlugin(BasisPlugin):
return self._episode_label_cache.get(cache_key, {}).get(episode_label) return self._episode_label_cache.get(cache_key, {}).get(episode_label)
return None return None
async def search_titles(self, query: str) -> List[str]: async def search_titles(self, query: str, progress_callback: ProgressCallback = None) -> List[str]:
query = query.strip() query = query.strip()
if not query: if not query:
self._series_results.clear() self._series_results.clear()
@@ -1695,7 +1748,8 @@ class SerienstreamPlugin(BasisPlugin):
try: try:
# Nutzt den Katalog (/serien), der jetzt nach Genres gruppiert ist. # Nutzt den Katalog (/serien), der jetzt nach Genres gruppiert ist.
# Alternativ gäbe es ein Ajax-Endpoint, aber der ist nicht immer zuverlässig erreichbar. # Alternativ gäbe es ein Ajax-Endpoint, aber der ist nicht immer zuverlässig erreichbar.
results = search_series(query) _emit_progress(progress_callback, "Serienstream Suche startet", 10)
results = search_series(query, progress_callback=progress_callback)
except Exception as exc: # pragma: no cover - defensive logging except Exception as exc: # pragma: no cover - defensive logging
self._series_results.clear() self._series_results.clear()
self._season_cache.clear() self._season_cache.clear()
@@ -1708,6 +1762,7 @@ class SerienstreamPlugin(BasisPlugin):
self._season_cache.clear() self._season_cache.clear()
self._season_links_cache.clear() self._season_links_cache.clear()
self._episode_label_cache.clear() self._episode_label_cache.clear()
_emit_progress(progress_callback, f"Treffer aufbereitet: {len(results)}", 95)
return [result.title for result in results] return [result.title for result in results]
def _ensure_seasons(self, title: str) -> List[SeasonInfo]: def _ensure_seasons(self, title: str) -> List[SeasonInfo]:

View File

@@ -19,7 +19,7 @@ import hashlib
import os import os
import re import re
import json import json
from typing import TYPE_CHECKING, Any, Dict, List, Optional from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional
from urllib.parse import urlencode, urljoin from urllib.parse import urlencode, urljoin
try: # pragma: no cover - optional dependency try: # pragma: no cover - optional dependency
@@ -78,6 +78,16 @@ HEADERS = {
"Accept-Language": "de-DE,de;q=0.9,en;q=0.8", "Accept-Language": "de-DE,de;q=0.9,en;q=0.8",
"Connection": "keep-alive", "Connection": "keep-alive",
} }
ProgressCallback = Optional[Callable[[str, Optional[int]], Any]]
def _emit_progress(callback: ProgressCallback, message: str, percent: Optional[int] = None) -> None:
if not callable(callback):
return
try:
callback(str(message or ""), None if percent is None else int(percent))
except Exception:
return
@dataclass(frozen=True) @dataclass(frozen=True)
@@ -87,6 +97,7 @@ class SearchHit:
title: str title: str
url: str url: str
description: str = "" description: str = ""
poster: str = ""
def _normalize_search_text(value: str) -> str: def _normalize_search_text(value: str) -> str:
@@ -139,6 +150,7 @@ class TopstreamfilmPlugin(BasisPlugin):
self._season_to_episode_numbers: Dict[tuple[str, str], List[int]] = {} self._season_to_episode_numbers: Dict[tuple[str, str], List[int]] = {}
self._episode_title_by_number: Dict[tuple[str, int, int], str] = {} self._episode_title_by_number: Dict[tuple[str, int, int], str] = {}
self._detail_html_cache: Dict[str, str] = {} self._detail_html_cache: Dict[str, str] = {}
self._title_meta: Dict[str, tuple[str, str]] = {}
self._popular_cache: List[str] | None = None self._popular_cache: List[str] | None = None
self._default_preferred_hosters: List[str] = list(DEFAULT_PREFERRED_HOSTERS) self._default_preferred_hosters: List[str] = list(DEFAULT_PREFERRED_HOSTERS)
self._preferred_hosters: List[str] = list(self._default_preferred_hosters) self._preferred_hosters: List[str] = list(self._default_preferred_hosters)
@@ -419,6 +431,7 @@ class TopstreamfilmPlugin(BasisPlugin):
continue continue
seen.add(hit.title) seen.add(hit.title)
self._title_to_url[hit.title] = hit.url self._title_to_url[hit.title] = hit.url
self._store_title_meta(hit.title, plot=hit.description, poster=hit.poster)
titles.append(hit.title) titles.append(hit.title)
if titles: if titles:
self._save_title_url_cache() self._save_title_url_cache()
@@ -477,6 +490,69 @@ class TopstreamfilmPlugin(BasisPlugin):
except Exception: except Exception:
return "" return ""
def _pick_image_from_node(self, node: Any) -> str:
if node is None:
return ""
image = node.select_one("img")
if image is None:
return ""
for attr in ("data-src", "src"):
value = (image.get(attr) or "").strip()
if value and "lazy_placeholder" not in value.casefold():
return self._absolute_external_url(value, base=self._get_base_url())
srcset = (image.get("data-srcset") or image.get("srcset") or "").strip()
if srcset:
first = srcset.split(",")[0].strip().split(" ", 1)[0].strip()
if first:
return self._absolute_external_url(first, base=self._get_base_url())
return ""
def _store_title_meta(self, title: str, *, plot: str = "", poster: str = "") -> None:
title = (title or "").strip()
if not title:
return
old_plot, old_poster = self._title_meta.get(title, ("", ""))
merged_plot = (plot or old_plot or "").strip()
merged_poster = (poster or old_poster or "").strip()
self._title_meta[title] = (merged_plot, merged_poster)
def _extract_detail_metadata(self, soup: BeautifulSoupT) -> tuple[str, str]:
if not soup:
return "", ""
plot = ""
poster = ""
for selector in ("meta[property='og:description']", "meta[name='description']"):
node = soup.select_one(selector)
if node is None:
continue
content = (node.get("content") or "").strip()
if content:
plot = content
break
if not plot:
candidates: list[str] = []
for paragraph in soup.select("article p, .TPost p, .Description p, .entry-content p"):
text = (paragraph.get_text(" ", strip=True) or "").strip()
if len(text) >= 60:
candidates.append(text)
if candidates:
plot = max(candidates, key=len)
for selector in ("meta[property='og:image']", "meta[name='twitter:image']"):
node = soup.select_one(selector)
if node is None:
continue
content = (node.get("content") or "").strip()
if content:
poster = self._absolute_external_url(content, base=self._get_base_url())
break
if not poster:
for selector in ("article", ".TPost", ".entry-content"):
poster = self._pick_image_from_node(soup.select_one(selector))
if poster:
break
return plot, poster
def _clear_stream_index_for_title(self, title: str) -> None: def _clear_stream_index_for_title(self, title: str) -> None:
for key in list(self._season_to_episode_numbers.keys()): for key in list(self._season_to_episode_numbers.keys()):
if key[0] == title: if key[0] == title:
@@ -584,15 +660,25 @@ class TopstreamfilmPlugin(BasisPlugin):
session = self._get_session() session = self._get_session()
self._log_url(url, kind="VISIT") self._log_url(url, kind="VISIT")
self._notify_url(url) self._notify_url(url)
response = None
try: try:
response = session.get(url, timeout=DEFAULT_TIMEOUT) response = session.get(url, timeout=DEFAULT_TIMEOUT)
response.raise_for_status() response.raise_for_status()
except Exception as exc: except Exception as exc:
self._log_error(f"GET {url} failed: {exc}") self._log_error(f"GET {url} failed: {exc}")
raise raise
self._log_url(response.url, kind="OK") try:
self._log_response_html(response.url, response.text) final_url = (response.url or url) if response is not None else url
return BeautifulSoup(response.text, "html.parser") body = (response.text or "") if response is not None else ""
self._log_url(final_url, kind="OK")
self._log_response_html(final_url, body)
return BeautifulSoup(body, "html.parser")
finally:
if response is not None:
try:
response.close()
except Exception:
pass
def _get_detail_soup(self, title: str) -> Optional[BeautifulSoupT]: def _get_detail_soup(self, title: str) -> Optional[BeautifulSoupT]:
title = (title or "").strip() title = (title or "").strip()
@@ -701,7 +787,17 @@ class TopstreamfilmPlugin(BasisPlugin):
continue continue
if is_movie_hint: if is_movie_hint:
self._movie_title_hint.add(title) self._movie_title_hint.add(title)
hits.append(SearchHit(title=title, url=self._absolute_url(href), description="")) description_tag = item.select_one(".TPMvCn .Description, .Description, .entry-summary")
description = (description_tag.get_text(" ", strip=True) or "").strip() if description_tag else ""
poster = self._pick_image_from_node(item)
hits.append(
SearchHit(
title=title,
url=self._absolute_url(href),
description=description,
poster=poster,
)
)
return hits return hits
def is_movie(self, title: str) -> bool: def is_movie(self, title: str) -> bool:
@@ -774,6 +870,7 @@ class TopstreamfilmPlugin(BasisPlugin):
continue continue
seen.add(hit.title) seen.add(hit.title)
self._title_to_url[hit.title] = hit.url self._title_to_url[hit.title] = hit.url
self._store_title_meta(hit.title, plot=hit.description, poster=hit.poster)
titles.append(hit.title) titles.append(hit.title)
if titles: if titles:
self._save_title_url_cache() self._save_title_url_cache()
@@ -814,7 +911,7 @@ class TopstreamfilmPlugin(BasisPlugin):
# Sonst: Serie via Streams-Accordion parsen (falls vorhanden). # Sonst: Serie via Streams-Accordion parsen (falls vorhanden).
self._parse_stream_accordion(soup, title=title) self._parse_stream_accordion(soup, title=title)
async def search_titles(self, query: str) -> List[str]: async def search_titles(self, query: str, progress_callback: ProgressCallback = None) -> List[str]:
"""Sucht Titel ueber eine HTML-Suche. """Sucht Titel ueber eine HTML-Suche.
Erwartetes HTML (Snippet): Erwartetes HTML (Snippet):
@@ -827,6 +924,7 @@ class TopstreamfilmPlugin(BasisPlugin):
query = (query or "").strip() query = (query or "").strip()
if not query: if not query:
return [] return []
_emit_progress(progress_callback, "Topstreamfilm Suche", 15)
session = self._get_session() session = self._get_session()
url = self._get_base_url() + "/" url = self._get_base_url() + "/"
@@ -834,6 +932,7 @@ class TopstreamfilmPlugin(BasisPlugin):
request_url = f"{url}?{urlencode(params)}" request_url = f"{url}?{urlencode(params)}"
self._log_url(request_url, kind="GET") self._log_url(request_url, kind="GET")
self._notify_url(request_url) self._notify_url(request_url)
response = None
try: try:
response = session.get( response = session.get(
url, url,
@@ -844,15 +943,28 @@ class TopstreamfilmPlugin(BasisPlugin):
except Exception as exc: except Exception as exc:
self._log_error(f"GET {request_url} failed: {exc}") self._log_error(f"GET {request_url} failed: {exc}")
raise raise
self._log_url(response.url, kind="OK") try:
self._log_response_html(response.url, response.text) final_url = (response.url or request_url) if response is not None else request_url
body = (response.text or "") if response is not None else ""
self._log_url(final_url, kind="OK")
self._log_response_html(final_url, body)
if BeautifulSoup is None: if BeautifulSoup is None:
return [] return []
soup = BeautifulSoup(response.text, "html.parser") soup = BeautifulSoup(body, "html.parser")
finally:
if response is not None:
try:
response.close()
except Exception:
pass
hits: List[SearchHit] = [] hits: List[SearchHit] = []
for item in soup.select("li.TPostMv"): items = soup.select("li.TPostMv")
total_items = max(1, len(items))
for idx, item in enumerate(items, start=1):
if idx == 1 or idx % 20 == 0:
_emit_progress(progress_callback, f"Treffer pruefen {idx}/{total_items}", 55)
anchor = item.select_one("a[href]") anchor = item.select_one("a[href]")
if not anchor: if not anchor:
continue continue
@@ -870,7 +982,8 @@ class TopstreamfilmPlugin(BasisPlugin):
self._movie_title_hint.add(title) self._movie_title_hint.add(title)
description_tag = item.select_one(".TPMvCn .Description") description_tag = item.select_one(".TPMvCn .Description")
description = description_tag.get_text(" ", strip=True) if description_tag else "" description = description_tag.get_text(" ", strip=True) if description_tag else ""
hit = SearchHit(title=title, url=self._absolute_url(href), description=description) poster = self._pick_image_from_node(item)
hit = SearchHit(title=title, url=self._absolute_url(href), description=description, poster=poster)
if _matches_query(query, title=hit.title, description=hit.description): if _matches_query(query, title=hit.title, description=hit.description):
hits.append(hit) hits.append(hit)
@@ -883,10 +996,41 @@ class TopstreamfilmPlugin(BasisPlugin):
continue continue
seen.add(hit.title) seen.add(hit.title)
self._title_to_url[hit.title] = hit.url self._title_to_url[hit.title] = hit.url
self._store_title_meta(hit.title, plot=hit.description, poster=hit.poster)
titles.append(hit.title) titles.append(hit.title)
self._save_title_url_cache() self._save_title_url_cache()
_emit_progress(progress_callback, f"Fertig: {len(titles)} Treffer", 95)
return titles return titles
def metadata_for(self, title: str) -> tuple[dict[str, str], dict[str, str], list[object] | None]:
title = (title or "").strip()
if not title:
return {}, {}, None
info: dict[str, str] = {"title": title}
art: dict[str, str] = {}
cached_plot, cached_poster = self._title_meta.get(title, ("", ""))
if cached_plot:
info["plot"] = cached_plot
if cached_poster:
art = {"thumb": cached_poster, "poster": cached_poster}
if "plot" in info and art:
return info, art, None
soup = self._get_detail_soup(title)
if soup is None:
return info, art, None
plot, poster = self._extract_detail_metadata(soup)
if plot:
info["plot"] = plot
if poster:
art = {"thumb": poster, "poster": poster}
self._store_title_meta(title, plot=plot, poster=poster)
return info, art, None
def genres(self) -> List[str]: def genres(self) -> List[str]:
if not REQUESTS_AVAILABLE or BeautifulSoup is None: if not REQUESTS_AVAILABLE or BeautifulSoup is None:
return [] return []

View File

@@ -14,6 +14,7 @@ except ImportError: # pragma: no cover
TMDB_API_BASE = "https://api.themoviedb.org/3" TMDB_API_BASE = "https://api.themoviedb.org/3"
TMDB_IMAGE_BASE = "https://image.tmdb.org/t/p" TMDB_IMAGE_BASE = "https://image.tmdb.org/t/p"
MAX_CAST_MEMBERS = 30
_TMDB_THREAD_LOCAL = threading.local() _TMDB_THREAD_LOCAL = threading.local()
@@ -73,53 +74,17 @@ def _fetch_credits(
return [] return []
params = {"api_key": api_key, "language": (language or "de-DE").strip()} params = {"api_key": api_key, "language": (language or "de-DE").strip()}
url = f"{TMDB_API_BASE}/{kind}/{tmdb_id}/credits?{urlencode(params)}" url = f"{TMDB_API_BASE}/{kind}/{tmdb_id}/credits?{urlencode(params)}"
if callable(log): status, payload, body_text = _tmdb_get_json(url=url, timeout=timeout, log=log, log_responses=log_responses)
log(f"TMDB GET {url}")
try:
response = requests.get(url, timeout=timeout)
except Exception as exc: # pragma: no cover
if callable(log):
log(f"TMDB ERROR /{kind}/{{id}}/credits request_failed error={exc!r}")
return []
status = getattr(response, "status_code", None)
if callable(log): if callable(log):
log(f"TMDB RESPONSE /{kind}/{{id}}/credits status={status}") log(f"TMDB RESPONSE /{kind}/{{id}}/credits status={status}")
if status != 200: if log_responses and payload is None and body_text:
log(f"TMDB RESPONSE_BODY /{kind}/{{id}}/credits body={body_text[:2000]}")
if status != 200 or not isinstance(payload, dict):
return [] return []
try:
payload = response.json() or {}
except Exception:
return []
if callable(log) and log_responses:
try:
dumped = json.dumps(payload, ensure_ascii=False)
except Exception:
dumped = str(payload)
log(f"TMDB RESPONSE_BODY /{kind}/{{id}}/credits body={dumped[:2000]}")
cast_payload = payload.get("cast") or [] cast_payload = payload.get("cast") or []
if callable(log): if callable(log):
log(f"TMDB CREDITS /{kind}/{{id}}/credits cast={len(cast_payload)}") log(f"TMDB CREDITS /{kind}/{{id}}/credits cast={len(cast_payload)}")
with_images: List[TmdbCastMember] = [] return _parse_cast_payload(cast_payload)
without_images: List[TmdbCastMember] = []
for entry in cast_payload:
name = (entry.get("name") or "").strip()
role = (entry.get("character") or "").strip()
thumb = _image_url(entry.get("profile_path") or "", size="w185")
if not name:
continue
member = TmdbCastMember(name=name, role=role, thumb=thumb)
if thumb:
with_images.append(member)
else:
without_images.append(member)
# Viele Kodi-Skins zeigen bei fehlendem Thumbnail Platzhalter-Köpfe.
# Bevorzugt daher Cast-Einträge mit Bild; nur wenn gar keine Bilder existieren,
# geben wir Namen ohne Bild zurück.
if with_images:
return with_images[:30]
return without_images[:30]
def _parse_cast_payload(cast_payload: object) -> List[TmdbCastMember]: def _parse_cast_payload(cast_payload: object) -> List[TmdbCastMember]:
@@ -141,8 +106,8 @@ def _parse_cast_payload(cast_payload: object) -> List[TmdbCastMember]:
else: else:
without_images.append(member) without_images.append(member)
if with_images: if with_images:
return with_images[:30] return with_images[:MAX_CAST_MEMBERS]
return without_images[:30] return without_images[:MAX_CAST_MEMBERS]
def _tmdb_get_json( def _tmdb_get_json(
@@ -163,13 +128,9 @@ def _tmdb_get_json(
if callable(log): if callable(log):
log(f"TMDB GET {url}") log(f"TMDB GET {url}")
sess = session or _get_tmdb_session() or requests.Session() sess = session or _get_tmdb_session() or requests.Session()
response = None
try: try:
response = sess.get(url, timeout=timeout) response = sess.get(url, timeout=timeout)
except Exception as exc: # pragma: no cover
if callable(log):
log(f"TMDB ERROR request_failed url={url} error={exc!r}")
return None, None, ""
status = getattr(response, "status_code", None) status = getattr(response, "status_code", None)
payload: object | None = None payload: object | None = None
body_text = "" body_text = ""
@@ -180,6 +141,16 @@ def _tmdb_get_json(
body_text = (response.text or "").strip() body_text = (response.text or "").strip()
except Exception: except Exception:
body_text = "" body_text = ""
except Exception as exc: # pragma: no cover
if callable(log):
log(f"TMDB ERROR request_failed url={url} error={exc!r}")
return None, None, ""
finally:
if response is not None:
try:
response.close()
except Exception:
pass
if callable(log): if callable(log):
log(f"TMDB RESPONSE status={status} url={url}") log(f"TMDB RESPONSE status={status} url={url}")
@@ -214,49 +185,17 @@ def fetch_tv_episode_credits(
return [] return []
params = {"api_key": api_key, "language": (language or "de-DE").strip()} params = {"api_key": api_key, "language": (language or "de-DE").strip()}
url = f"{TMDB_API_BASE}/tv/{tmdb_id}/season/{season_number}/episode/{episode_number}/credits?{urlencode(params)}" url = f"{TMDB_API_BASE}/tv/{tmdb_id}/season/{season_number}/episode/{episode_number}/credits?{urlencode(params)}"
if callable(log): status, payload, body_text = _tmdb_get_json(url=url, timeout=timeout, log=log, log_responses=log_responses)
log(f"TMDB GET {url}")
try:
response = requests.get(url, timeout=timeout)
except Exception as exc: # pragma: no cover
if callable(log):
log(f"TMDB ERROR /tv/{{id}}/season/{{n}}/episode/{{e}}/credits request_failed error={exc!r}")
return []
status = getattr(response, "status_code", None)
if callable(log): if callable(log):
log(f"TMDB RESPONSE /tv/{{id}}/season/{{n}}/episode/{{e}}/credits status={status}") log(f"TMDB RESPONSE /tv/{{id}}/season/{{n}}/episode/{{e}}/credits status={status}")
if status != 200: if log_responses and payload is None and body_text:
log(f"TMDB RESPONSE_BODY /tv/{{id}}/season/{{n}}/episode/{{e}}/credits body={body_text[:2000]}")
if status != 200 or not isinstance(payload, dict):
return [] return []
try:
payload = response.json() or {}
except Exception:
return []
if callable(log) and log_responses:
try:
dumped = json.dumps(payload, ensure_ascii=False)
except Exception:
dumped = str(payload)
log(f"TMDB RESPONSE_BODY /tv/{{id}}/season/{{n}}/episode/{{e}}/credits body={dumped[:2000]}")
cast_payload = payload.get("cast") or [] cast_payload = payload.get("cast") or []
if callable(log): if callable(log):
log(f"TMDB CREDITS /tv/{{id}}/season/{{n}}/episode/{{e}}/credits cast={len(cast_payload)}") log(f"TMDB CREDITS /tv/{{id}}/season/{{n}}/episode/{{e}}/credits cast={len(cast_payload)}")
with_images: List[TmdbCastMember] = [] return _parse_cast_payload(cast_payload)
without_images: List[TmdbCastMember] = []
for entry in cast_payload:
name = (entry.get("name") or "").strip()
role = (entry.get("character") or "").strip()
thumb = _image_url(entry.get("profile_path") or "", size="w185")
if not name:
continue
member = TmdbCastMember(name=name, role=role, thumb=thumb)
if thumb:
with_images.append(member)
else:
without_images.append(member)
if with_images:
return with_images[:30]
return without_images[:30]
def lookup_tv_show( def lookup_tv_show(
@@ -546,27 +485,13 @@ def lookup_tv_season_summary(
params = {"api_key": api_key, "language": (language or "de-DE").strip()} params = {"api_key": api_key, "language": (language or "de-DE").strip()}
url = f"{TMDB_API_BASE}/tv/{tmdb_id}/season/{season_number}?{urlencode(params)}" url = f"{TMDB_API_BASE}/tv/{tmdb_id}/season/{season_number}?{urlencode(params)}"
if callable(log): status, payload, body_text = _tmdb_get_json(url=url, timeout=timeout, log=log, log_responses=log_responses)
log(f"TMDB GET {url}")
try:
response = requests.get(url, timeout=timeout)
except Exception:
return None
status = getattr(response, "status_code", None)
if callable(log): if callable(log):
log(f"TMDB RESPONSE /tv/{{id}}/season/{{n}} status={status}") log(f"TMDB RESPONSE /tv/{{id}}/season/{{n}} status={status}")
if status != 200: if log_responses and payload is None and body_text:
log(f"TMDB RESPONSE_BODY /tv/{{id}}/season/{{n}} body={body_text[:2000]}")
if status != 200 or not isinstance(payload, dict):
return None return None
try:
payload = response.json() or {}
except Exception:
return None
if callable(log) and log_responses:
try:
dumped = json.dumps(payload, ensure_ascii=False)
except Exception:
dumped = str(payload)
log(f"TMDB RESPONSE_BODY /tv/{{id}}/season/{{n}} body={dumped[:2000]}")
plot = (payload.get("overview") or "").strip() plot = (payload.get("overview") or "").strip()
poster_path = (payload.get("poster_path") or "").strip() poster_path = (payload.get("poster_path") or "").strip()
@@ -594,27 +519,9 @@ def lookup_tv_season(
return None return None
params = {"api_key": api_key, "language": (language or "de-DE").strip()} params = {"api_key": api_key, "language": (language or "de-DE").strip()}
url = f"{TMDB_API_BASE}/tv/{tmdb_id}/season/{season_number}?{urlencode(params)}" url = f"{TMDB_API_BASE}/tv/{tmdb_id}/season/{season_number}?{urlencode(params)}"
if callable(log): status, payload, body_text = _tmdb_get_json(url=url, timeout=timeout, log=log, log_responses=log_responses)
log(f"TMDB GET {url}") episodes = (payload or {}).get("episodes") if isinstance(payload, dict) else []
try: episodes = episodes or []
response = requests.get(url, timeout=timeout)
except Exception as exc: # pragma: no cover
if callable(log):
log(f"TMDB ERROR /tv/{{id}}/season/{{n}} request_failed error={exc!r}")
return None
status = getattr(response, "status_code", None)
payload = None
body_text = ""
try:
payload = response.json() or {}
except Exception:
try:
body_text = (response.text or "").strip()
except Exception:
body_text = ""
episodes = (payload or {}).get("episodes") or []
if callable(log): if callable(log):
log(f"TMDB RESPONSE /tv/{{id}}/season/{{n}} status={status} episodes={len(episodes)}") log(f"TMDB RESPONSE /tv/{{id}}/season/{{n}} status={status} episodes={len(episodes)}")
if log_responses: if log_responses: