Compare commits

...

15 Commits

22 changed files with 2577 additions and 272 deletions

BIN
.coverage Normal file

Binary file not shown.

5
.gitignore vendored
View File

@@ -6,3 +6,8 @@
# Build outputs # Build outputs
/dist/ /dist/
# Local tests (not committed)
/tests/
/.pytest_cache/
/pytest.ini

7
.vscode/settings.json vendored Normal file
View File

@@ -0,0 +1,7 @@
{
"python.testing.pytestArgs": [
"tests"
],
"python.testing.unittestEnabled": false,
"python.testing.pytestEnabled": true
}

View File

@@ -20,5 +20,10 @@ ViewIT ist ein KodiAddon zum Durchsuchen und Abspielen von Inhalten der unter
- Plugins: `addon/plugins/*_plugin.py` - Plugins: `addon/plugins/*_plugin.py`
- Einstellungen: `addon/resources/settings.xml` - Einstellungen: `addon/resources/settings.xml`
## Tests mit Abdeckung
- Dev-Abhängigkeiten installieren: `./.venv/bin/pip install -r requirements-dev.txt`
- Tests + Coverage starten: `./.venv/bin/pytest`
- Optional (XML-Report): `./.venv/bin/pytest --cov-report=xml`
## Dokumentation ## Dokumentation
Siehe `docs/`. Siehe `docs/`.

View File

@@ -1,5 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<addon id="plugin.video.viewit" name="ViewIt" version="0.1.46" provider-name="ViewIt"> <addon id="plugin.video.viewit" name="ViewIt" version="0.1.50" provider-name="ViewIt">
<requires> <requires>
<import addon="xbmc.python" version="3.0.0" /> <import addon="xbmc.python" version="3.0.0" />
<import addon="script.module.requests" /> <import addon="script.module.requests" />

View File

@@ -12,6 +12,7 @@ from contextlib import contextmanager
from datetime import datetime from datetime import datetime
import importlib.util import importlib.util
import inspect import inspect
import json
import os import os
import re import re
import sys import sys
@@ -112,6 +113,10 @@ def _tmdb_prefetch_concurrency() -> int:
return max(1, min(20, value)) return max(1, min(20, value))
def _tmdb_enabled() -> bool:
return _get_setting_bool("tmdb_enabled", default=True)
def _log(message: str, level: int = xbmc.LOGINFO) -> None: def _log(message: str, level: int = xbmc.LOGINFO) -> None:
xbmc.log(f"[ViewIt] {message}", level) xbmc.log(f"[ViewIt] {message}", level)
@@ -142,6 +147,45 @@ def _busy_dialog():
_busy_close() _busy_close()
@contextmanager
def _progress_dialog(heading: str, message: str = ""):
"""Zeigt einen Fortschrittsdialog in Kodi und liefert eine Update-Funktion."""
dialog = None
try: # pragma: no cover - Kodi runtime
if xbmcgui is not None and hasattr(xbmcgui, "DialogProgress"):
dialog = xbmcgui.DialogProgress()
dialog.create(heading, message)
except Exception:
dialog = None
def _update(percent: int, text: str = "") -> bool:
if dialog is None:
return False
percent = max(0, min(100, int(percent)))
try: # Kodi Matrix/Nexus
dialog.update(percent, text)
except TypeError:
try: # Kodi Leia fallback
dialog.update(percent, text, "", "")
except Exception:
pass
except Exception:
pass
try:
return bool(dialog.iscanceled())
except Exception:
return False
try:
yield _update
finally:
if dialog is not None:
try:
dialog.close()
except Exception:
pass
def _get_handle() -> int: def _get_handle() -> int:
return int(sys.argv[1]) if len(sys.argv) > 1 else -1 return int(sys.argv[1]) if len(sys.argv) > 1 else -1
@@ -547,6 +591,8 @@ def _tmdb_file_log(message: str) -> None:
def _tmdb_labels_and_art(title: str) -> tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]: def _tmdb_labels_and_art(title: str) -> tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]:
if not _tmdb_enabled():
return {}, {}, []
title_key = (title or "").strip().casefold() title_key = (title or "").strip().casefold()
language = _get_setting_string("tmdb_language").strip() or "de-DE" language = _get_setting_string("tmdb_language").strip() or "de-DE"
show_plot = _get_setting_bool("tmdb_show_plot", default=True) show_plot = _get_setting_bool("tmdb_show_plot", default=True)
@@ -685,10 +731,14 @@ async def _tmdb_labels_and_art_bulk_async(
def _tmdb_labels_and_art_bulk( def _tmdb_labels_and_art_bulk(
titles: list[str], titles: list[str],
) -> dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]]: ) -> dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]]:
if not _tmdb_enabled():
return {}
return _run_async(_tmdb_labels_and_art_bulk_async(titles)) return _run_async(_tmdb_labels_and_art_bulk_async(titles))
def _tmdb_episode_labels_and_art(*, title: str, season_label: str, episode_label: str) -> tuple[dict[str, str], dict[str, str]]: def _tmdb_episode_labels_and_art(*, title: str, season_label: str, episode_label: str) -> tuple[dict[str, str], dict[str, str]]:
if not _tmdb_enabled():
return {"title": episode_label}, {}
title_key = (title or "").strip().casefold() title_key = (title or "").strip().casefold()
tmdb_id = _TMDB_ID_CACHE.get(title_key) tmdb_id = _TMDB_ID_CACHE.get(title_key)
if not tmdb_id: if not tmdb_id:
@@ -747,6 +797,8 @@ def _tmdb_episode_labels_and_art(*, title: str, season_label: str, episode_label
def _tmdb_episode_cast(*, title: str, season_label: str, episode_label: str) -> list[TmdbCastMember]: def _tmdb_episode_cast(*, title: str, season_label: str, episode_label: str) -> list[TmdbCastMember]:
if not _tmdb_enabled():
return []
show_episode_cast = _get_setting_bool("tmdb_show_episode_cast", default=False) show_episode_cast = _get_setting_bool("tmdb_show_episode_cast", default=False)
if not show_episode_cast: if not show_episode_cast:
return [] return []
@@ -906,21 +958,27 @@ def _show_plugin_search_results(plugin_name: str, query: str) -> None:
_set_content(handle, "movies" if plugin_name.casefold() == "einschalten" else "tvshows") _set_content(handle, "movies" if plugin_name.casefold() == "einschalten" else "tvshows")
_log(f"Suche nach Titeln (Plugin={plugin_name}): {query}") _log(f"Suche nach Titeln (Plugin={plugin_name}): {query}")
list_items: list[dict[str, object]] = []
canceled = False
try: try:
with _progress_dialog("Suche läuft", f"{plugin_name} (1/1) starte…") as progress:
canceled = progress(5, f"{plugin_name} (1/1) Suche…")
results = _run_async(plugin.search_titles(query)) results = _run_async(plugin.search_titles(query))
except Exception as exc:
_log(f"Suche fehlgeschlagen ({plugin_name}): {exc}", xbmc.LOGWARNING)
xbmcgui.Dialog().notification("Suche", "Suche fehlgeschlagen.", xbmcgui.NOTIFICATION_INFO, 3000)
xbmcplugin.endOfDirectory(handle)
return
results = [str(t).strip() for t in (results or []) if t and str(t).strip()] results = [str(t).strip() for t in (results or []) if t and str(t).strip()]
results.sort(key=lambda value: value.casefold()) results.sort(key=lambda value: value.casefold())
tmdb_prefetched: dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]] = {} tmdb_prefetched: dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]] = {}
if results: if results and not canceled:
with _busy_dialog(): canceled = progress(35, f"{plugin_name} (1/1) Metadaten…")
tmdb_prefetched = _tmdb_labels_and_art_bulk(list(results)) tmdb_prefetched = _tmdb_labels_and_art_bulk(list(results))
for title in results:
total_results = max(1, len(results))
for index, title in enumerate(results, start=1):
if canceled:
break
if index == 1 or index == total_results or (index % 10 == 0):
pct = 35 + int((index / float(total_results)) * 60)
canceled = progress(pct, f"{plugin_name} (1/1) aufbereiten {index}/{total_results}")
info_labels, art, cast = tmdb_prefetched.get(title, _tmdb_labels_and_art(title)) info_labels, art, cast = tmdb_prefetched.get(title, _tmdb_labels_and_art(title))
info_labels = dict(info_labels or {}) info_labels = dict(info_labels or {})
info_labels.setdefault("mediatype", "tvshow") info_labels.setdefault("mediatype", "tvshow")
@@ -931,15 +989,38 @@ def _show_plugin_search_results(plugin_name: str, query: str) -> None:
display_label = _label_with_duration(title, info_labels) display_label = _label_with_duration(title, info_labels)
display_label = _label_with_playstate(display_label, playstate) display_label = _label_with_playstate(display_label, playstate)
direct_play = bool(plugin_name.casefold() == "einschalten" and _get_setting_bool("einschalten_enable_playback", default=False)) direct_play = bool(plugin_name.casefold() == "einschalten" and _get_setting_bool("einschalten_enable_playback", default=False))
extra_params = _series_url_params(plugin, title)
list_items.append(
{
"label": display_label,
"action": "play_movie" if direct_play else "seasons",
"params": {"plugin": plugin_name, "title": title, **extra_params},
"is_folder": (not direct_play),
"info_labels": merged_info,
"art": art,
"cast": cast,
}
)
except Exception as exc:
_log(f"Suche fehlgeschlagen ({plugin_name}): {exc}", xbmc.LOGWARNING)
xbmcgui.Dialog().notification("Suche", "Suche fehlgeschlagen.", xbmcgui.NOTIFICATION_INFO, 3000)
xbmcplugin.endOfDirectory(handle)
return
if canceled and not list_items:
xbmcgui.Dialog().notification("Suche", "Suche abgebrochen.", xbmcgui.NOTIFICATION_INFO, 2500)
xbmcplugin.endOfDirectory(handle)
return
for item in list_items:
_add_directory_item( _add_directory_item(
handle, handle,
display_label, str(item["label"]),
"play_movie" if direct_play else "seasons", str(item["action"]),
{"plugin": plugin_name, "title": title}, dict(item["params"]),
is_folder=not direct_play, is_folder=bool(item["is_folder"]),
info_labels=merged_info, info_labels=item["info_labels"],
art=art, art=item["art"],
cast=cast, cast=item["cast"],
) )
xbmcplugin.endOfDirectory(handle) xbmcplugin.endOfDirectory(handle)
@@ -1012,6 +1093,17 @@ def _run_async(coro):
return asyncio.run(coro) return asyncio.run(coro)
def _series_url_params(plugin: BasisPlugin, title: str) -> dict[str, str]:
getter = getattr(plugin, "series_url_for_title", None)
if not callable(getter):
return {}
try:
series_url = str(getter(title) or "").strip()
except Exception:
return {}
return {"series_url": series_url} if series_url else {}
def _show_search() -> None: def _show_search() -> None:
_log("Suche gestartet.") _log("Suche gestartet.")
dialog = xbmcgui.Dialog() dialog = xbmcgui.Dialog()
@@ -1033,18 +1125,42 @@ def _show_search_results(query: str) -> None:
xbmcgui.Dialog().notification("Suche", "Keine Plugins gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcgui.Dialog().notification("Suche", "Keine Plugins gefunden.", xbmcgui.NOTIFICATION_INFO, 3000)
xbmcplugin.endOfDirectory(handle) xbmcplugin.endOfDirectory(handle)
return return
for plugin_name, plugin in plugins.items(): list_items: list[dict[str, object]] = []
canceled = False
plugin_entries = list(plugins.items())
total_plugins = max(1, len(plugin_entries))
with _progress_dialog("Suche läuft", "Suche gestartet…") as progress:
for plugin_index, (plugin_name, plugin) in enumerate(plugin_entries, start=1):
range_start = int(((plugin_index - 1) / float(total_plugins)) * 100)
range_end = int((plugin_index / float(total_plugins)) * 100)
canceled = progress(range_start, f"{plugin_name} ({plugin_index}/{total_plugins}) Suche…")
if canceled:
break
try: try:
results = _run_async(plugin.search_titles(query)) results = _run_async(plugin.search_titles(query))
except Exception as exc: except Exception as exc:
_log(f"Suche fehlgeschlagen ({plugin_name}): {exc}", xbmc.LOGWARNING) _log(f"Suche fehlgeschlagen ({plugin_name}): {exc}", xbmc.LOGWARNING)
continue continue
results = [str(t).strip() for t in (results or []) if t and str(t).strip()]
_log(f"Treffer ({plugin_name}): {len(results)}", xbmc.LOGDEBUG) _log(f"Treffer ({plugin_name}): {len(results)}", xbmc.LOGDEBUG)
tmdb_prefetched: dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]] = {} tmdb_prefetched: dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]] = {}
if results: if results:
with _busy_dialog(): canceled = progress(
range_start + int((range_end - range_start) * 0.35),
f"{plugin_name} ({plugin_index}/{total_plugins}) Metadaten…",
)
if canceled:
break
tmdb_prefetched = _tmdb_labels_and_art_bulk(list(results)) tmdb_prefetched = _tmdb_labels_and_art_bulk(list(results))
for title in results: total_results = max(1, len(results))
for title_index, title in enumerate(results, start=1):
if title_index == 1 or title_index == total_results or (title_index % 10 == 0):
canceled = progress(
range_start + int((range_end - range_start) * (0.35 + 0.65 * (title_index / float(total_results)))),
f"{plugin_name} ({plugin_index}/{total_plugins}) aufbereiten {title_index}/{total_results}",
)
if canceled:
break
info_labels, art, cast = tmdb_prefetched.get(title, _tmdb_labels_and_art(title)) info_labels, art, cast = tmdb_prefetched.get(title, _tmdb_labels_and_art(title))
info_labels = dict(info_labels or {}) info_labels = dict(info_labels or {})
info_labels.setdefault("mediatype", "tvshow") info_labels.setdefault("mediatype", "tvshow")
@@ -1058,20 +1174,42 @@ def _show_search_results(query: str) -> None:
direct_play = bool( direct_play = bool(
plugin_name.casefold() == "einschalten" and _get_setting_bool("einschalten_enable_playback", default=False) plugin_name.casefold() == "einschalten" and _get_setting_bool("einschalten_enable_playback", default=False)
) )
extra_params = _series_url_params(plugin, title)
list_items.append(
{
"label": label,
"action": "play_movie" if direct_play else "seasons",
"params": {"plugin": plugin_name, "title": title, **extra_params},
"is_folder": (not direct_play),
"info_labels": merged_info,
"art": art,
"cast": cast,
}
)
if canceled:
break
if not canceled:
progress(100, "Suche abgeschlossen")
if canceled and not list_items:
xbmcgui.Dialog().notification("Suche", "Suche abgebrochen.", xbmcgui.NOTIFICATION_INFO, 2500)
xbmcplugin.endOfDirectory(handle)
return
for item in list_items:
_add_directory_item( _add_directory_item(
handle, handle,
label, str(item["label"]),
"play_movie" if direct_play else "seasons", str(item["action"]),
{"plugin": plugin_name, "title": title}, dict(item["params"]),
is_folder=not direct_play, is_folder=bool(item["is_folder"]),
info_labels=merged_info, info_labels=item["info_labels"],
art=art, art=item["art"],
cast=cast, cast=item["cast"],
) )
xbmcplugin.endOfDirectory(handle) xbmcplugin.endOfDirectory(handle)
def _show_seasons(plugin_name: str, title: str) -> None: def _show_seasons(plugin_name: str, title: str, series_url: str = "") -> None:
handle = _get_handle() handle = _get_handle()
_log(f"Staffeln laden: {plugin_name} / {title}") _log(f"Staffeln laden: {plugin_name} / {title}")
plugin = _discover_plugins().get(plugin_name) plugin = _discover_plugins().get(plugin_name)
@@ -1079,6 +1217,13 @@ def _show_seasons(plugin_name: str, title: str) -> None:
xbmcgui.Dialog().notification("Staffeln", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcgui.Dialog().notification("Staffeln", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000)
xbmcplugin.endOfDirectory(handle) xbmcplugin.endOfDirectory(handle)
return return
if series_url:
remember_series_url = getattr(plugin, "remember_series_url", None)
if callable(remember_series_url):
try:
remember_series_url(title, series_url)
except Exception:
pass
# Einschalten liefert Filme. Für Playback soll nach dem Öffnen des Titels direkt ein # Einschalten liefert Filme. Für Playback soll nach dem Öffnen des Titels direkt ein
# einzelnes abspielbares Item angezeigt werden: <Titel> -> (<Titel> abspielbar). # einzelnes abspielbares Item angezeigt werden: <Titel> -> (<Titel> abspielbar).
@@ -1090,11 +1235,14 @@ def _show_seasons(plugin_name: str, title: str) -> None:
info_labels: dict[str, object] = {"title": title, "mediatype": "movie"} info_labels: dict[str, object] = {"title": title, "mediatype": "movie"}
info_labels = _apply_playstate_to_info(info_labels, playstate) info_labels = _apply_playstate_to_info(info_labels, playstate)
display_label = _label_with_playstate(title, playstate) display_label = _label_with_playstate(title, playstate)
movie_params = {"plugin": plugin_name, "title": title}
if series_url:
movie_params["series_url"] = series_url
_add_directory_item( _add_directory_item(
handle, handle,
display_label, display_label,
"play_movie", "play_movie",
{"plugin": plugin_name, "title": title}, movie_params,
is_folder=False, is_folder=False,
info_labels=info_labels, info_labels=info_labels,
) )
@@ -1113,11 +1261,16 @@ def _show_seasons(plugin_name: str, title: str) -> None:
info_labels: dict[str, object] = {"title": title, "mediatype": "movie"} info_labels: dict[str, object] = {"title": title, "mediatype": "movie"}
info_labels = _apply_playstate_to_info(info_labels, playstate) info_labels = _apply_playstate_to_info(info_labels, playstate)
display_label = _label_with_playstate(title, playstate) display_label = _label_with_playstate(title, playstate)
movie_params = {"plugin": plugin_name, "title": title}
if series_url:
movie_params["series_url"] = series_url
else:
movie_params.update(_series_url_params(plugin, title))
_add_directory_item( _add_directory_item(
handle, handle,
display_label, display_label,
"play_movie", "play_movie",
{"plugin": plugin_name, "title": title}, movie_params,
is_folder=False, is_folder=False,
info_labels=info_labels, info_labels=info_labels,
) )
@@ -1214,7 +1367,7 @@ def _show_seasons(plugin_name: str, title: str) -> None:
handle, handle,
_label_with_playstate(season, season_state), _label_with_playstate(season, season_state),
"episodes", "episodes",
{"plugin": plugin_name, "title": title, "season": season}, {"plugin": plugin_name, "title": title, "season": season, "series_url": series_url},
is_folder=True, is_folder=True,
info_labels=merged_labels or None, info_labels=merged_labels or None,
art=merged_art, art=merged_art,
@@ -1223,7 +1376,7 @@ def _show_seasons(plugin_name: str, title: str) -> None:
xbmcplugin.endOfDirectory(handle) xbmcplugin.endOfDirectory(handle)
def _show_episodes(plugin_name: str, title: str, season: str) -> None: def _show_episodes(plugin_name: str, title: str, season: str, series_url: str = "") -> None:
handle = _get_handle() handle = _get_handle()
_log(f"Episoden laden: {plugin_name} / {title} / {season}") _log(f"Episoden laden: {plugin_name} / {title} / {season}")
plugin = _discover_plugins().get(plugin_name) plugin = _discover_plugins().get(plugin_name)
@@ -1231,6 +1384,13 @@ def _show_episodes(plugin_name: str, title: str, season: str) -> None:
xbmcgui.Dialog().notification("Episoden", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcgui.Dialog().notification("Episoden", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000)
xbmcplugin.endOfDirectory(handle) xbmcplugin.endOfDirectory(handle)
return return
if series_url:
remember_series_url = getattr(plugin, "remember_series_url", None)
if callable(remember_series_url):
try:
remember_series_url(title, series_url)
except Exception:
pass
season_number = _extract_first_int(season) season_number = _extract_first_int(season)
if season_number is not None: if season_number is not None:
xbmcplugin.setPluginCategory(handle, f"{title} - Staffel {season_number}") xbmcplugin.setPluginCategory(handle, f"{title} - Staffel {season_number}")
@@ -1426,7 +1586,7 @@ def _show_genre_titles_page(plugin_name: str, genre: str, page: int = 1) -> None
handle, handle,
display_label, display_label,
"play_movie" if direct_play else "seasons", "play_movie" if direct_play else "seasons",
{"plugin": plugin_name, "title": title}, {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)},
is_folder=not direct_play, is_folder=not direct_play,
info_labels=info_labels, info_labels=info_labels,
art=art, art=art,
@@ -1443,7 +1603,7 @@ def _show_genre_titles_page(plugin_name: str, genre: str, page: int = 1) -> None
handle, handle,
_label_with_playstate(title, playstate), _label_with_playstate(title, playstate),
"play_movie" if direct_play else "seasons", "play_movie" if direct_play else "seasons",
{"plugin": plugin_name, "title": title}, {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)},
is_folder=not direct_play, is_folder=not direct_play,
info_labels=_apply_playstate_to_info({"title": title}, playstate), info_labels=_apply_playstate_to_info({"title": title}, playstate),
) )
@@ -1659,7 +1819,7 @@ def _show_popular(plugin_name: str | None = None, page: int = 1) -> None:
handle, handle,
display_label, display_label,
"seasons", "seasons",
{"plugin": plugin_name, "title": title}, {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)},
is_folder=True, is_folder=True,
info_labels=info_labels, info_labels=info_labels,
art=art, art=art,
@@ -1672,7 +1832,7 @@ def _show_popular(plugin_name: str | None = None, page: int = 1) -> None:
handle, handle,
_label_with_playstate(title, playstate), _label_with_playstate(title, playstate),
"seasons", "seasons",
{"plugin": plugin_name, "title": title}, {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)},
is_folder=True, is_folder=True,
info_labels=_apply_playstate_to_info({"title": title}, playstate), info_labels=_apply_playstate_to_info({"title": title}, playstate),
) )
@@ -1804,7 +1964,7 @@ def _show_new_titles(plugin_name: str, page: int = 1) -> None:
handle, handle,
display_label, display_label,
"play_movie" if direct_play else "seasons", "play_movie" if direct_play else "seasons",
{"plugin": plugin_name, "title": title}, {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)},
is_folder=not direct_play, is_folder=not direct_play,
info_labels=info_labels, info_labels=info_labels,
art=art, art=art,
@@ -1821,7 +1981,7 @@ def _show_new_titles(plugin_name: str, page: int = 1) -> None:
handle, handle,
_label_with_playstate(title, playstate), _label_with_playstate(title, playstate),
"play_movie" if direct_play else "seasons", "play_movie" if direct_play else "seasons",
{"plugin": plugin_name, "title": title}, {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)},
is_folder=not direct_play, is_folder=not direct_play,
info_labels=_apply_playstate_to_info({"title": title}, playstate), info_labels=_apply_playstate_to_info({"title": title}, playstate),
) )
@@ -1926,6 +2086,86 @@ def _show_genre_series_group(plugin_name: str, genre: str, group_code: str, page
handle = _get_handle() handle = _get_handle()
page_size = 10 page_size = 10
page = max(1, int(page or 1)) page = max(1, int(page or 1))
plugin = _discover_plugins().get(plugin_name)
if plugin is None:
xbmcgui.Dialog().notification("Genres", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000)
xbmcplugin.endOfDirectory(handle)
return
grouped_paging = getattr(plugin, "titles_for_genre_group_page", None)
grouped_has_more = getattr(plugin, "genre_group_has_more", None)
if callable(grouped_paging):
try:
page_items = [str(t).strip() for t in list(grouped_paging(genre, group_code, page, page_size) or []) if t and str(t).strip()]
except Exception as exc:
_log(f"Genre-Serien konnten nicht geladen werden ({plugin_name}/{genre}/{group_code} p{page}): {exc}", xbmc.LOGWARNING)
xbmcgui.Dialog().notification("Genres", "Serien konnten nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000)
xbmcplugin.endOfDirectory(handle)
return
xbmcplugin.setPluginCategory(handle, f"{genre} [{group_code}] ({page})")
show_tmdb = _get_setting_bool("tmdb_genre_metadata", default=False)
if page > 1:
_add_directory_item(
handle,
"Vorherige Seite",
"genre_series_group",
{"plugin": plugin_name, "genre": genre, "group": group_code, "page": str(page - 1)},
is_folder=True,
)
if page_items:
if show_tmdb:
with _busy_dialog():
tmdb_prefetched = _tmdb_labels_and_art_bulk(page_items)
for title in page_items:
info_labels, art, cast = tmdb_prefetched.get(title, _tmdb_labels_and_art(title))
info_labels = dict(info_labels or {})
info_labels.setdefault("mediatype", "tvshow")
if (info_labels.get("mediatype") or "").strip().casefold() == "tvshow":
info_labels.setdefault("tvshowtitle", title)
playstate = _title_playstate(plugin_name, title)
info_labels = _apply_playstate_to_info(dict(info_labels), playstate)
display_label = _label_with_duration(title, info_labels)
display_label = _label_with_playstate(display_label, playstate)
_add_directory_item(
handle,
display_label,
"seasons",
{"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)},
is_folder=True,
info_labels=info_labels,
art=art,
cast=cast,
)
else:
for title in page_items:
playstate = _title_playstate(plugin_name, title)
_add_directory_item(
handle,
_label_with_playstate(title, playstate),
"seasons",
{"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)},
is_folder=True,
info_labels=_apply_playstate_to_info({"title": title}, playstate),
)
show_next = False
if callable(grouped_has_more):
try:
show_next = bool(grouped_has_more(genre, group_code, page, page_size))
except Exception:
show_next = False
elif len(page_items) >= page_size:
show_next = True
if show_next:
_add_directory_item(
handle,
"Nächste Seite",
"genre_series_group",
{"plugin": plugin_name, "genre": genre, "group": group_code, "page": str(page + 1)},
is_folder=True,
)
xbmcplugin.endOfDirectory(handle)
return
try: try:
titles = _get_genre_titles(plugin_name, genre) titles = _get_genre_titles(plugin_name, genre)
@@ -1973,7 +2213,7 @@ def _show_genre_series_group(plugin_name: str, genre: str, group_code: str, page
handle, handle,
display_label, display_label,
"seasons", "seasons",
{"plugin": plugin_name, "title": title}, {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)},
is_folder=True, is_folder=True,
info_labels=info_labels, info_labels=info_labels,
art=art, art=art,
@@ -1986,7 +2226,7 @@ def _show_genre_series_group(plugin_name: str, genre: str, group_code: str, page
handle, handle,
_label_with_playstate(title, playstate), _label_with_playstate(title, playstate),
"seasons", "seasons",
{"plugin": plugin_name, "title": title}, {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)},
is_folder=True, is_folder=True,
info_labels=_apply_playstate_to_info({"title": title}, playstate), info_labels=_apply_playstate_to_info({"title": title}, playstate),
) )
@@ -2360,12 +2600,13 @@ def run() -> None:
elif action == "settings": elif action == "settings":
_open_settings() _open_settings()
elif action == "seasons": elif action == "seasons":
_show_seasons(params.get("plugin", ""), params.get("title", "")) _show_seasons(params.get("plugin", ""), params.get("title", ""), params.get("series_url", ""))
elif action == "episodes": elif action == "episodes":
_show_episodes( _show_episodes(
params.get("plugin", ""), params.get("plugin", ""),
params.get("title", ""), params.get("title", ""),
params.get("season", ""), params.get("season", ""),
params.get("series_url", ""),
) )
elif action == "play_episode": elif action == "play_episode":
_play_episode( _play_episode(
@@ -2378,6 +2619,15 @@ def run() -> None:
elif action == "play_movie": elif action == "play_movie":
plugin_name = params.get("plugin", "") plugin_name = params.get("plugin", "")
title = params.get("title", "") title = params.get("title", "")
series_url = params.get("series_url", "")
if series_url:
plugin = _discover_plugins().get(plugin_name)
remember_series_url = getattr(plugin, "remember_series_url", None) if plugin is not None else None
if callable(remember_series_url):
try:
remember_series_url(title, series_url)
except Exception:
pass
# Einschalten liefert Filme (keine Staffeln/Episoden). Für Playback nutzen wir: # Einschalten liefert Filme (keine Staffeln/Episoden). Für Playback nutzen wir:
# <Titel> -> Stream -> <Titel>. # <Titel> -> Stream -> <Titel>.
if (plugin_name or "").casefold() == "einschalten": if (plugin_name or "").casefold() == "einschalten":

View File

@@ -54,10 +54,39 @@ def get_setting_bool(addon_id: str, setting_id: str, *, default: bool = False) -
return default return default
def notify_url(addon_id: str, *, heading: str, url: str, enabled_setting_id: str) -> None: def get_setting_int(addon_id: str, setting_id: str, *, default: int = 0) -> int:
if xbmcaddon is None:
return default
try:
addon = xbmcaddon.Addon(addon_id)
getter = getattr(addon, "getSettingInt", None)
if getter is not None:
return int(getter(setting_id))
raw = addon.getSetting(setting_id)
return int(str(raw).strip())
except Exception:
return default
def _is_logging_enabled(addon_id: str, *, global_setting_id: str, plugin_setting_id: Optional[str]) -> bool:
if not get_setting_bool(addon_id, global_setting_id, default=False):
return False
if plugin_setting_id:
return get_setting_bool(addon_id, plugin_setting_id, default=False)
return True
def notify_url(
addon_id: str,
*,
heading: str,
url: str,
enabled_setting_id: str,
plugin_setting_id: Optional[str] = None,
) -> None:
if xbmcgui is None: if xbmcgui is None:
return return
if not get_setting_bool(addon_id, enabled_setting_id, default=False): if not _is_logging_enabled(addon_id, global_setting_id=enabled_setting_id, plugin_setting_id=plugin_setting_id):
return return
try: try:
xbmcgui.Dialog().notification(heading, url, xbmcgui.NOTIFICATION_INFO, 3000) xbmcgui.Dialog().notification(heading, url, xbmcgui.NOTIFICATION_INFO, 3000)
@@ -96,16 +125,92 @@ def _append_text_file(path: str, content: str) -> None:
return return
def log_url(addon_id: str, *, enabled_setting_id: str, log_filename: str, url: str, kind: str = "VISIT") -> None: def _rotate_log_file(path: str, *, max_bytes: int, max_files: int) -> None:
if not get_setting_bool(addon_id, enabled_setting_id, default=False): if max_bytes <= 0 or max_files <= 0:
return
try:
if not os.path.exists(path) or os.path.getsize(path) <= max_bytes:
return
except Exception:
return
try:
for index in range(max_files - 1, 0, -1):
older = f"{path}.{index}"
newer = f"{path}.{index + 1}"
if os.path.exists(older):
if index + 1 > max_files:
os.remove(older)
else:
os.replace(older, newer)
os.replace(path, f"{path}.1")
except Exception:
return
def _prune_dump_files(directory: str, *, prefix: str, max_files: int) -> None:
if not directory or max_files <= 0:
return
try:
entries = [
os.path.join(directory, name)
for name in os.listdir(directory)
if name.startswith(prefix) and name.endswith(".html")
]
if len(entries) <= max_files:
return
entries.sort(key=lambda path: os.path.getmtime(path))
for path in entries[: len(entries) - max_files]:
try:
os.remove(path)
except Exception:
pass
except Exception:
return
def log_url(
addon_id: str,
*,
enabled_setting_id: str,
log_filename: str,
url: str,
kind: str = "VISIT",
request_id: Optional[str] = None,
plugin_setting_id: Optional[str] = None,
max_mb_setting_id: str = "log_max_mb",
max_files_setting_id: str = "log_max_files",
) -> None:
if not _is_logging_enabled(addon_id, global_setting_id=enabled_setting_id, plugin_setting_id=plugin_setting_id):
return return
timestamp = datetime.utcnow().isoformat(timespec="seconds") + "Z" timestamp = datetime.utcnow().isoformat(timespec="seconds") + "Z"
line = f"{timestamp}\t{kind}\t{url}\n" request_part = f"\t{request_id}" if request_id else ""
line = f"{timestamp}\t{kind}{request_part}\t{url}\n"
log_dir = _profile_logs_dir(addon_id) log_dir = _profile_logs_dir(addon_id)
if log_dir: path = os.path.join(log_dir, log_filename) if log_dir else os.path.join(os.path.dirname(__file__), log_filename)
_append_text_file(os.path.join(log_dir, log_filename), line) max_mb = get_setting_int(addon_id, max_mb_setting_id, default=5)
return max_files = get_setting_int(addon_id, max_files_setting_id, default=3)
_append_text_file(os.path.join(os.path.dirname(__file__), log_filename), line) _rotate_log_file(path, max_bytes=max_mb * 1024 * 1024, max_files=max_files)
_append_text_file(path, line)
def log_error(
addon_id: str,
*,
enabled_setting_id: str,
log_filename: str,
message: str,
request_id: Optional[str] = None,
plugin_setting_id: Optional[str] = None,
) -> None:
log_url(
addon_id,
enabled_setting_id=enabled_setting_id,
plugin_setting_id=plugin_setting_id,
log_filename=log_filename,
url=message,
kind="ERROR",
request_id=request_id,
)
def dump_response_html( def dump_response_html(
@@ -115,14 +220,20 @@ def dump_response_html(
url: str, url: str,
body: str, body: str,
filename_prefix: str, filename_prefix: str,
request_id: Optional[str] = None,
plugin_setting_id: Optional[str] = None,
max_files_setting_id: str = "dump_max_files",
) -> None: ) -> None:
if not get_setting_bool(addon_id, enabled_setting_id, default=False): if not _is_logging_enabled(addon_id, global_setting_id=enabled_setting_id, plugin_setting_id=plugin_setting_id):
return return
timestamp = datetime.utcnow().strftime("%Y%m%d_%H%M%S_%f") timestamp = datetime.utcnow().strftime("%Y%m%d_%H%M%S_%f")
digest = hashlib.md5(url.encode("utf-8")).hexdigest() # nosec - filename only digest = hashlib.md5(url.encode("utf-8")).hexdigest() # nosec - filename only
filename = f"{filename_prefix}_{timestamp}_{digest}.html" filename = f"{filename_prefix}_{timestamp}_{digest}.html"
log_dir = _profile_logs_dir(addon_id) log_dir = _profile_logs_dir(addon_id)
path = os.path.join(log_dir, filename) if log_dir else os.path.join(os.path.dirname(__file__), filename) path = os.path.join(log_dir, filename) if log_dir else os.path.join(os.path.dirname(__file__), filename)
content = f"<!-- {url} -->\n{body or ''}" request_line = f" request_id={request_id}" if request_id else ""
content = f"<!-- {url}{request_line} -->\n{body or ''}"
if log_dir:
max_files = get_setting_int(addon_id, max_files_setting_id, default=200)
_prune_dump_files(log_dir, prefix=filename_prefix, max_files=max_files)
_append_text_file(path, content) _append_text_file(path, content)

View File

@@ -8,7 +8,11 @@ Dieses Plugin ist weitgehend kompatibel zur Serienstream-Integration:
from __future__ import annotations from __future__ import annotations
from dataclasses import dataclass from dataclasses import dataclass
from html import unescape
import hashlib
import json
import re import re
import time
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, TypeAlias from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, TypeAlias
try: # pragma: no cover - optional dependency try: # pragma: no cover - optional dependency
@@ -25,11 +29,13 @@ else:
try: # pragma: no cover - optional Kodi helpers try: # pragma: no cover - optional Kodi helpers
import xbmcaddon # type: ignore[import-not-found] import xbmcaddon # type: ignore[import-not-found]
import xbmcgui # type: ignore[import-not-found]
except ImportError: # pragma: no cover - allow running outside Kodi except ImportError: # pragma: no cover - allow running outside Kodi
xbmcaddon = None xbmcaddon = None
xbmcgui = None
from plugin_interface import BasisPlugin from plugin_interface import BasisPlugin
from plugin_helpers import dump_response_html, get_setting_bool, log_url, notify_url from plugin_helpers import dump_response_html, get_setting_bool, get_setting_string, log_error, log_url, notify_url
from http_session_pool import get_requests_session from http_session_pool import get_requests_session
from regex_patterns import DIGITS, SEASON_EPISODE_TAG, SEASON_EPISODE_URL, STAFFEL_NUM_IN_URL from regex_patterns import DIGITS, SEASON_EPISODE_TAG, SEASON_EPISODE_URL, STAFFEL_NUM_IN_URL
@@ -41,25 +47,28 @@ else: # pragma: no cover
BeautifulSoupT: TypeAlias = Any BeautifulSoupT: TypeAlias = Any
BASE_URL = "https://aniworld.to" SETTING_BASE_URL = "aniworld_base_url"
ANIME_BASE_URL = f"{BASE_URL}/anime/stream" DEFAULT_BASE_URL = "https://aniworld.to"
POPULAR_ANIMES_URL = f"{BASE_URL}/beliebte-animes"
GENRES_URL = f"{BASE_URL}/animes"
LATEST_EPISODES_URL = f"{BASE_URL}/neue-episoden"
SEARCH_URL = f"{BASE_URL}/search?q={{query}}"
SEARCH_API_URL = f"{BASE_URL}/ajax/search"
DEFAULT_PREFERRED_HOSTERS = ["voe"] DEFAULT_PREFERRED_HOSTERS = ["voe"]
DEFAULT_TIMEOUT = 20 DEFAULT_TIMEOUT = 20
ADDON_ID = "plugin.video.viewit" ADDON_ID = "plugin.video.viewit"
GLOBAL_SETTING_LOG_URLS = "debug_log_urls" GLOBAL_SETTING_LOG_URLS = "debug_log_urls"
GLOBAL_SETTING_DUMP_HTML = "debug_dump_html" GLOBAL_SETTING_DUMP_HTML = "debug_dump_html"
GLOBAL_SETTING_SHOW_URL_INFO = "debug_show_url_info" GLOBAL_SETTING_SHOW_URL_INFO = "debug_show_url_info"
GLOBAL_SETTING_LOG_ERRORS = "debug_log_errors"
SETTING_LOG_URLS = "log_urls_aniworld"
SETTING_DUMP_HTML = "dump_html_aniworld"
SETTING_SHOW_URL_INFO = "show_url_info_aniworld"
SETTING_LOG_ERRORS = "log_errors_aniworld"
HEADERS = { HEADERS = {
"User-Agent": "Mozilla/5.0 (Kodi; ViewIt) AppleWebKit/537.36 (KHTML, like Gecko)", "User-Agent": "Mozilla/5.0 (Kodi; ViewIt) AppleWebKit/537.36 (KHTML, like Gecko)",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
"Accept-Language": "de-DE,de;q=0.9,en;q=0.8", "Accept-Language": "de-DE,de;q=0.9,en;q=0.8",
"Connection": "keep-alive", "Connection": "keep-alive",
} }
SESSION_CACHE_TTL_SECONDS = 300
SESSION_CACHE_PREFIX = "viewit.aniworld"
SESSION_CACHE_MAX_TITLE_URLS = 800
@dataclass @dataclass
@@ -93,17 +102,122 @@ class SeasonInfo:
episodes: List[EpisodeInfo] episodes: List[EpisodeInfo]
def _get_base_url() -> str:
base = get_setting_string(ADDON_ID, SETTING_BASE_URL, default=DEFAULT_BASE_URL).strip()
if not base:
base = DEFAULT_BASE_URL
return base.rstrip("/")
def _anime_base_url() -> str:
return f"{_get_base_url()}/anime/stream"
def _popular_animes_url() -> str:
return f"{_get_base_url()}/beliebte-animes"
def _genres_url() -> str:
return f"{_get_base_url()}/animes"
def _latest_episodes_url() -> str:
return f"{_get_base_url()}/neue-episoden"
def _search_url(query: str) -> str:
return f"{_get_base_url()}/search?q={query}"
def _search_api_url() -> str:
return f"{_get_base_url()}/ajax/search"
def _absolute_url(href: str) -> str: def _absolute_url(href: str) -> str:
return f"{BASE_URL}{href}" if href.startswith("/") else href return f"{_get_base_url()}{href}" if href.startswith("/") else href
def _session_window() -> Any:
if xbmcgui is None:
return None
try:
return xbmcgui.Window(10000)
except Exception:
return None
def _session_cache_key(name: str) -> str:
base_hash = hashlib.sha1(_get_base_url().encode("utf-8")).hexdigest()[:12]
return f"{SESSION_CACHE_PREFIX}.{base_hash}.{name}"
def _session_cache_get(name: str) -> Any:
window = _session_window()
if window is None:
return None
raw = ""
try:
raw = window.getProperty(_session_cache_key(name)) or ""
except Exception:
return None
if not raw:
return None
try:
payload = json.loads(raw)
except Exception:
return None
if not isinstance(payload, dict):
return None
expires_at = payload.get("expires_at")
data = payload.get("data")
try:
if float(expires_at or 0) <= time.time():
return None
except Exception:
return None
return data
def _session_cache_set(name: str, data: Any, *, ttl_seconds: int = SESSION_CACHE_TTL_SECONDS) -> None:
window = _session_window()
if window is None:
return
payload = {
"expires_at": float(time.time() + max(1, int(ttl_seconds))),
"data": data,
}
try:
raw = json.dumps(payload, ensure_ascii=False, separators=(",", ":"))
except Exception:
return
if len(raw) > 240_000:
return
try:
window.setProperty(_session_cache_key(name), raw)
except Exception:
return
def _log_url(url: str, *, kind: str = "VISIT") -> None: def _log_url(url: str, *, kind: str = "VISIT") -> None:
log_url(ADDON_ID, enabled_setting_id=GLOBAL_SETTING_LOG_URLS, log_filename="aniworld_urls.log", url=url, kind=kind) log_url(
ADDON_ID,
enabled_setting_id=GLOBAL_SETTING_LOG_URLS,
plugin_setting_id=SETTING_LOG_URLS,
log_filename="aniworld_urls.log",
url=url,
kind=kind,
)
def _log_visit(url: str) -> None: def _log_visit(url: str) -> None:
_log_url(url, kind="VISIT") _log_url(url, kind="VISIT")
notify_url(ADDON_ID, heading="AniWorld", url=url, enabled_setting_id=GLOBAL_SETTING_SHOW_URL_INFO) notify_url(
ADDON_ID,
heading="AniWorld",
url=url,
enabled_setting_id=GLOBAL_SETTING_SHOW_URL_INFO,
plugin_setting_id=SETTING_SHOW_URL_INFO,
)
def _log_parsed_url(url: str) -> None: def _log_parsed_url(url: str) -> None:
@@ -114,12 +228,23 @@ def _log_response_html(url: str, body: str) -> None:
dump_response_html( dump_response_html(
ADDON_ID, ADDON_ID,
enabled_setting_id=GLOBAL_SETTING_DUMP_HTML, enabled_setting_id=GLOBAL_SETTING_DUMP_HTML,
plugin_setting_id=SETTING_DUMP_HTML,
url=url, url=url,
body=body, body=body,
filename_prefix="aniworld_response", filename_prefix="aniworld_response",
) )
def _log_error(message: str) -> None:
log_error(
ADDON_ID,
enabled_setting_id=GLOBAL_SETTING_LOG_ERRORS,
plugin_setting_id=SETTING_LOG_ERRORS,
log_filename="aniworld_errors.log",
message=message,
)
def _normalize_search_text(value: str) -> str: def _normalize_search_text(value: str) -> str:
value = (value or "").casefold() value = (value or "").casefold()
value = re.sub(r"[^a-z0-9]+", " ", value) value = re.sub(r"[^a-z0-9]+", " ", value)
@@ -137,10 +262,8 @@ def _matches_query(query: str, *, title: str) -> bool:
normalized_query = _normalize_search_text(query) normalized_query = _normalize_search_text(query)
if not normalized_query: if not normalized_query:
return False return False
haystack = _normalize_search_text(title) haystack = f" {_normalize_search_text(title)} "
if not haystack: return f" {normalized_query} " in haystack
return False
return normalized_query in haystack
def _ensure_requests() -> None: def _ensure_requests() -> None:
@@ -166,8 +289,12 @@ def _get_soup(url: str, *, session: Optional[RequestsSession] = None) -> Beautif
_ensure_requests() _ensure_requests()
_log_visit(url) _log_visit(url)
sess = session or get_requests_session("aniworld", headers=HEADERS) sess = session or get_requests_session("aniworld", headers=HEADERS)
try:
response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT) response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT)
response.raise_for_status() response.raise_for_status()
except Exception as exc:
_log_error(f"GET {url} failed: {exc}")
raise
if response.url and response.url != url: if response.url and response.url != url:
_log_url(response.url, kind="REDIRECT") _log_url(response.url, kind="REDIRECT")
_log_response_html(url, response.text) _log_response_html(url, response.text)
@@ -176,18 +303,48 @@ def _get_soup(url: str, *, session: Optional[RequestsSession] = None) -> Beautif
return BeautifulSoup(response.text, "html.parser") return BeautifulSoup(response.text, "html.parser")
def _get_soup_simple(url: str) -> BeautifulSoupT: def _get_html_simple(url: str) -> str:
_ensure_requests() _ensure_requests()
_log_visit(url) _log_visit(url)
sess = get_requests_session("aniworld", headers=HEADERS) sess = get_requests_session("aniworld", headers=HEADERS)
try:
response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT) response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT)
response.raise_for_status() response.raise_for_status()
except Exception as exc:
_log_error(f"GET {url} failed: {exc}")
raise
if response.url and response.url != url: if response.url and response.url != url:
_log_url(response.url, kind="REDIRECT") _log_url(response.url, kind="REDIRECT")
_log_response_html(url, response.text) body = response.text
if _looks_like_cloudflare_challenge(response.text): _log_response_html(url, body)
if _looks_like_cloudflare_challenge(body):
raise RuntimeError("Cloudflare-Schutz erkannt. requests reicht ggf. nicht aus.") raise RuntimeError("Cloudflare-Schutz erkannt. requests reicht ggf. nicht aus.")
return BeautifulSoup(response.text, "html.parser") return body
def _get_soup_simple(url: str) -> BeautifulSoupT:
body = _get_html_simple(url)
return BeautifulSoup(body, "html.parser")
def _extract_genre_names_from_html(body: str) -> List[str]:
names: List[str] = []
seen: set[str] = set()
pattern = re.compile(
r"<div[^>]*class=[\"'][^\"']*seriesGenreList[^\"']*[\"'][^>]*>.*?<h3[^>]*>(.*?)</h3>",
re.IGNORECASE | re.DOTALL,
)
for match in pattern.finditer(body or ""):
text = re.sub(r"<[^>]+>", " ", match.group(1) or "")
text = unescape(re.sub(r"\s+", " ", text)).strip()
if not text:
continue
key = text.casefold()
if key in seen:
continue
seen.add(key)
names.append(text)
return names
def _post_json(url: str, *, payload: Dict[str, str], session: Optional[RequestsSession] = None) -> Any: def _post_json(url: str, *, payload: Dict[str, str], session: Optional[RequestsSession] = None) -> Any:
@@ -354,15 +511,16 @@ def _extract_latest_episodes(soup: BeautifulSoupT) -> List[LatestEpisode]:
return episodes return episodes
def scrape_anime_detail(anime_identifier: str, max_seasons: Optional[int] = None) -> List[SeasonInfo]: def scrape_anime_detail(
anime_identifier: str,
max_seasons: Optional[int] = None,
*,
load_episodes: bool = True,
) -> List[SeasonInfo]:
_ensure_requests() _ensure_requests()
anime_url = _series_root_url(_absolute_url(anime_identifier)) anime_url = _series_root_url(_absolute_url(anime_identifier))
_log_url(anime_url, kind="ANIME") _log_url(anime_url, kind="ANIME")
session = get_requests_session("aniworld", headers=HEADERS) session = get_requests_session("aniworld", headers=HEADERS)
try:
_get_soup(BASE_URL, session=session)
except Exception:
pass
soup = _get_soup(anime_url, session=session) soup = _get_soup(anime_url, session=session)
base_anime_url = _series_root_url(_extract_canonical_url(soup, anime_url)) base_anime_url = _series_root_url(_extract_canonical_url(soup, anime_url))
@@ -382,6 +540,8 @@ def scrape_anime_detail(anime_identifier: str, max_seasons: Optional[int] = None
seasons: List[SeasonInfo] = [] seasons: List[SeasonInfo] = []
for number, url in season_links: for number, url in season_links:
episodes: List[EpisodeInfo] = []
if load_episodes:
season_soup = _get_soup(url, session=session) season_soup = _get_soup(url, session=session)
episodes = _extract_episodes(season_soup) episodes = _extract_episodes(season_soup)
seasons.append(SeasonInfo(number=number, url=url, episodes=episodes)) seasons.append(SeasonInfo(number=number, url=url, episodes=episodes))
@@ -394,7 +554,7 @@ def resolve_redirect(target_url: str) -> Optional[str]:
normalized_url = _absolute_url(target_url) normalized_url = _absolute_url(target_url)
_log_visit(normalized_url) _log_visit(normalized_url)
session = get_requests_session("aniworld", headers=HEADERS) session = get_requests_session("aniworld", headers=HEADERS)
_get_soup(BASE_URL, session=session) _get_soup(_get_base_url(), session=session)
response = session.get(normalized_url, headers=HEADERS, timeout=DEFAULT_TIMEOUT, allow_redirects=True) response = session.get(normalized_url, headers=HEADERS, timeout=DEFAULT_TIMEOUT, allow_redirects=True)
if response.url: if response.url:
_log_url(response.url, kind="RESOLVED") _log_url(response.url, kind="RESOLVED")
@@ -405,7 +565,7 @@ def fetch_episode_hoster_names(episode_url: str) -> List[str]:
_ensure_requests() _ensure_requests()
normalized_url = _absolute_url(episode_url) normalized_url = _absolute_url(episode_url)
session = get_requests_session("aniworld", headers=HEADERS) session = get_requests_session("aniworld", headers=HEADERS)
_get_soup(BASE_URL, session=session) _get_soup(_get_base_url(), session=session)
soup = _get_soup(normalized_url, session=session) soup = _get_soup(normalized_url, session=session)
names: List[str] = [] names: List[str] = []
seen: set[str] = set() seen: set[str] = set()
@@ -440,7 +600,7 @@ def fetch_episode_stream_link(
normalized_url = _absolute_url(episode_url) normalized_url = _absolute_url(episode_url)
preferred = [hoster.lower() for hoster in (preferred_hosters or DEFAULT_PREFERRED_HOSTERS)] preferred = [hoster.lower() for hoster in (preferred_hosters or DEFAULT_PREFERRED_HOSTERS)]
session = get_requests_session("aniworld", headers=HEADERS) session = get_requests_session("aniworld", headers=HEADERS)
_get_soup(BASE_URL, session=session) _get_soup(_get_base_url(), session=session)
soup = _get_soup(normalized_url, session=session) soup = _get_soup(normalized_url, session=session)
candidates: List[Tuple[str, str]] = [] candidates: List[Tuple[str, str]] = []
for anchor in soup.select(".hosterSiteVideo a.watchEpisode"): for anchor in soup.select(".hosterSiteVideo a.watchEpisode"):
@@ -476,10 +636,10 @@ def search_animes(query: str) -> List[SeriesResult]:
return [] return []
session = get_requests_session("aniworld", headers=HEADERS) session = get_requests_session("aniworld", headers=HEADERS)
try: try:
session.get(BASE_URL, headers=HEADERS, timeout=DEFAULT_TIMEOUT) session.get(_get_base_url(), headers=HEADERS, timeout=DEFAULT_TIMEOUT)
except Exception: except Exception:
pass pass
data = _post_json(SEARCH_API_URL, payload={"keyword": query}, session=session) data = _post_json(_search_api_url(), payload={"keyword": query}, session=session)
results: List[SeriesResult] = [] results: List[SeriesResult] = []
seen: set[str] = set() seen: set[str] = set()
if isinstance(data, list): if isinstance(data, list):
@@ -507,7 +667,7 @@ def search_animes(query: str) -> List[SeriesResult]:
results.append(SeriesResult(title=title, description=description, url=url)) results.append(SeriesResult(title=title, description=description, url=url))
return results return results
soup = _get_soup_simple(SEARCH_URL.format(query=requests.utils.quote(query))) soup = _get_soup_simple(_search_url(requests.utils.quote(query)))
for anchor in soup.select("a[href^='/anime/stream/'][href]"): for anchor in soup.select("a[href^='/anime/stream/'][href]"):
href = (anchor.get("href") or "").strip() href = (anchor.get("href") or "").strip()
if not href or "/staffel-" in href or "/episode-" in href: if not href or "/staffel-" in href or "/episode-" in href:
@@ -530,11 +690,14 @@ def search_animes(query: str) -> List[SeriesResult]:
class AniworldPlugin(BasisPlugin): class AniworldPlugin(BasisPlugin):
name = "AniWorld (aniworld.to)" name = "Aniworld"
def __init__(self) -> None: def __init__(self) -> None:
self._anime_results: Dict[str, SeriesResult] = {} self._anime_results: Dict[str, SeriesResult] = {}
self._title_url_cache: Dict[str, str] = self._load_title_url_cache()
self._genre_names_cache: Optional[List[str]] = None
self._season_cache: Dict[str, List[SeasonInfo]] = {} self._season_cache: Dict[str, List[SeasonInfo]] = {}
self._season_links_cache: Dict[str, List[SeasonInfo]] = {}
self._episode_label_cache: Dict[Tuple[str, str], Dict[str, EpisodeInfo]] = {} self._episode_label_cache: Dict[Tuple[str, str], Dict[str, EpisodeInfo]] = {}
self._popular_cache: Optional[List[SeriesResult]] = None self._popular_cache: Optional[List[SeriesResult]] = None
self._genre_cache: Optional[Dict[str, List[SeriesResult]]] = None self._genre_cache: Optional[Dict[str, List[SeriesResult]]] = None
@@ -552,6 +715,132 @@ class AniworldPlugin(BasisPlugin):
if REQUESTS_IMPORT_ERROR: if REQUESTS_IMPORT_ERROR:
print(f"AniworldPlugin Importfehler: {REQUESTS_IMPORT_ERROR}") print(f"AniworldPlugin Importfehler: {REQUESTS_IMPORT_ERROR}")
def _load_title_url_cache(self) -> Dict[str, str]:
raw = _session_cache_get("title_urls")
if not isinstance(raw, dict):
return {}
result: Dict[str, str] = {}
for key, value in raw.items():
key_text = str(key or "").strip().casefold()
url_text = str(value or "").strip()
if not key_text or not url_text:
continue
result[key_text] = url_text
return result
def _save_title_url_cache(self) -> None:
if not self._title_url_cache:
return
while len(self._title_url_cache) > SESSION_CACHE_MAX_TITLE_URLS:
self._title_url_cache.pop(next(iter(self._title_url_cache)))
_session_cache_set("title_urls", self._title_url_cache)
def _remember_anime_result(
self,
title: str,
url: str,
description: str = "",
*,
persist: bool = True,
) -> bool:
title = (title or "").strip()
url = (url or "").strip()
if not title:
return False
changed = False
current = self._anime_results.get(title)
if current is None or (url and current.url != url) or (description and current.description != description):
self._anime_results[title] = SeriesResult(title=title, description=description, url=url)
changed = True
if url:
key = title.casefold()
if self._title_url_cache.get(key) != url:
self._title_url_cache[key] = url
changed = True
if changed and persist:
self._save_title_url_cache()
return changed
@staticmethod
def _season_links_cache_name(series_url: str) -> str:
digest = hashlib.sha1((series_url or "").encode("utf-8")).hexdigest()[:20]
return f"season_links.{digest}"
@staticmethod
def _season_episodes_cache_name(season_url: str) -> str:
digest = hashlib.sha1((season_url or "").encode("utf-8")).hexdigest()[:20]
return f"season_episodes.{digest}"
def _load_session_season_links(self, series_url: str) -> Optional[List[SeasonInfo]]:
raw = _session_cache_get(self._season_links_cache_name(series_url))
if not isinstance(raw, list):
return None
seasons: List[SeasonInfo] = []
for item in raw:
if not isinstance(item, dict):
continue
try:
number = int(item.get("number"))
except Exception:
continue
url = str(item.get("url") or "").strip()
if number <= 0 or not url:
continue
seasons.append(SeasonInfo(number=number, url=url, episodes=[]))
if not seasons:
return None
seasons.sort(key=lambda s: s.number)
return seasons
def _save_session_season_links(self, series_url: str, seasons: List[SeasonInfo]) -> None:
payload = [{"number": int(season.number), "url": season.url} for season in seasons if season.url]
if payload:
_session_cache_set(self._season_links_cache_name(series_url), payload)
def _load_session_season_episodes(self, season_url: str) -> Optional[List[EpisodeInfo]]:
raw = _session_cache_get(self._season_episodes_cache_name(season_url))
if not isinstance(raw, list):
return None
episodes: List[EpisodeInfo] = []
for item in raw:
if not isinstance(item, dict):
continue
try:
number = int(item.get("number"))
except Exception:
continue
title = str(item.get("title") or "").strip()
original_title = str(item.get("original_title") or "").strip()
url = str(item.get("url") or "").strip()
if number <= 0:
continue
episodes.append(
EpisodeInfo(
number=number,
title=title or f"Episode {number}",
original_title=original_title,
url=url,
)
)
if not episodes:
return None
episodes.sort(key=lambda item: item.number)
return episodes
def _save_session_season_episodes(self, season_url: str, episodes: List[EpisodeInfo]) -> None:
payload = []
for item in episodes:
payload.append(
{
"number": int(item.number),
"title": item.title,
"original_title": item.original_title,
"url": item.url,
}
)
if payload:
_session_cache_set(self._season_episodes_cache_name(season_url), payload)
def capabilities(self) -> set[str]: def capabilities(self) -> set[str]:
return {"popular_series", "genres", "latest_episodes"} return {"popular_series", "genres", "latest_episodes"}
@@ -566,6 +855,12 @@ class AniworldPlugin(BasisPlugin):
wanted = title.casefold().strip() wanted = title.casefold().strip()
cached_url = self._title_url_cache.get(wanted, "")
if cached_url:
result = SeriesResult(title=title, description="", url=cached_url)
self._anime_results[title] = result
return result
for candidate in self._anime_results.values(): for candidate in self._anime_results.values():
if candidate.title and candidate.title.casefold().strip() == wanted: if candidate.title and candidate.title.casefold().strip() == wanted:
return candidate return candidate
@@ -573,7 +868,7 @@ class AniworldPlugin(BasisPlugin):
try: try:
for entry in self._ensure_popular(): for entry in self._ensure_popular():
if entry.title and entry.title.casefold().strip() == wanted: if entry.title and entry.title.casefold().strip() == wanted:
self._anime_results[entry.title] = entry self._remember_anime_result(entry.title, entry.url, entry.description)
return entry return entry
except Exception: except Exception:
pass pass
@@ -582,7 +877,7 @@ class AniworldPlugin(BasisPlugin):
for entries in self._ensure_genres().values(): for entries in self._ensure_genres().values():
for entry in entries: for entry in entries:
if entry.title and entry.title.casefold().strip() == wanted: if entry.title and entry.title.casefold().strip() == wanted:
self._anime_results[entry.title] = entry self._remember_anime_result(entry.title, entry.url, entry.description)
return entry return entry
except Exception: except Exception:
pass pass
@@ -590,7 +885,7 @@ class AniworldPlugin(BasisPlugin):
try: try:
for entry in search_animes(title): for entry in search_animes(title):
if entry.title and entry.title.casefold().strip() == wanted: if entry.title and entry.title.casefold().strip() == wanted:
self._anime_results[entry.title] = entry self._remember_anime_result(entry.title, entry.url, entry.description)
return entry return entry
except Exception: except Exception:
pass pass
@@ -600,8 +895,9 @@ class AniworldPlugin(BasisPlugin):
def _ensure_popular(self) -> List[SeriesResult]: def _ensure_popular(self) -> List[SeriesResult]:
if self._popular_cache is not None: if self._popular_cache is not None:
return list(self._popular_cache) return list(self._popular_cache)
soup = _get_soup_simple(POPULAR_ANIMES_URL) soup = _get_soup_simple(_popular_animes_url())
results: List[SeriesResult] = [] results: List[SeriesResult] = []
cache_dirty = False
seen: set[str] = set() seen: set[str] = set()
for anchor in soup.select("div.seriesListContainer a[href^='/anime/stream/']"): for anchor in soup.select("div.seriesListContainer a[href^='/anime/stream/']"):
href = (anchor.get("href") or "").strip() href = (anchor.get("href") or "").strip()
@@ -623,6 +919,9 @@ class AniworldPlugin(BasisPlugin):
continue continue
seen.add(key) seen.add(key)
results.append(SeriesResult(title=title, description=description, url=url)) results.append(SeriesResult(title=title, description=description, url=url))
cache_dirty = self._remember_anime_result(title, url, description, persist=False) or cache_dirty
if cache_dirty:
self._save_title_url_cache()
self._popular_cache = list(results) self._popular_cache = list(results)
return list(results) return list(results)
@@ -630,7 +929,11 @@ class AniworldPlugin(BasisPlugin):
if not self._requests_available: if not self._requests_available:
return [] return []
entries = self._ensure_popular() entries = self._ensure_popular()
self._anime_results.update({entry.title: entry for entry in entries if entry.title}) cache_dirty = False
for entry in entries:
cache_dirty = self._remember_anime_result(entry.title, entry.url, entry.description, persist=False) or cache_dirty
if cache_dirty:
self._save_title_url_cache()
return [entry.title for entry in entries if entry.title] return [entry.title for entry in entries if entry.title]
def latest_episodes(self, page: int = 1) -> List[LatestEpisode]: def latest_episodes(self, page: int = 1) -> List[LatestEpisode]:
@@ -646,7 +949,7 @@ class AniworldPlugin(BasisPlugin):
if cached is not None: if cached is not None:
return list(cached) return list(cached)
url = LATEST_EPISODES_URL url = _latest_episodes_url()
if page > 1: if page > 1:
url = f"{url}?page={page}" url = f"{url}?page={page}"
@@ -658,8 +961,9 @@ class AniworldPlugin(BasisPlugin):
def _ensure_genres(self) -> Dict[str, List[SeriesResult]]: def _ensure_genres(self) -> Dict[str, List[SeriesResult]]:
if self._genre_cache is not None: if self._genre_cache is not None:
return {key: list(value) for key, value in self._genre_cache.items()} return {key: list(value) for key, value in self._genre_cache.items()}
soup = _get_soup_simple(GENRES_URL) soup = _get_soup_simple(_genres_url())
results: Dict[str, List[SeriesResult]] = {} results: Dict[str, List[SeriesResult]] = {}
cache_dirty = False
genre_blocks = soup.select("#seriesContainer div.genre") genre_blocks = soup.select("#seriesContainer div.genre")
if not genre_blocks: if not genre_blocks:
genre_blocks = soup.select("div.genre") genre_blocks = soup.select("div.genre")
@@ -685,9 +989,14 @@ class AniworldPlugin(BasisPlugin):
continue continue
seen.add(key) seen.add(key)
entries.append(SeriesResult(title=title, description="", url=url)) entries.append(SeriesResult(title=title, description="", url=url))
cache_dirty = self._remember_anime_result(title, url, persist=False) or cache_dirty
if entries: if entries:
results[genre_name] = entries results[genre_name] = entries
if cache_dirty:
self._save_title_url_cache()
self._genre_cache = {key: list(value) for key, value in results.items()} self._genre_cache = {key: list(value) for key, value in results.items()}
self._genre_names_cache = sorted(self._genre_cache.keys(), key=str.casefold)
_session_cache_set("genres", self._genre_names_cache)
# Für spätere Auflösung (Seasons/Episoden) die Titel->URL Zuordnung auffüllen. # Für spätere Auflösung (Seasons/Episoden) die Titel->URL Zuordnung auffüllen.
for entries in results.values(): for entries in results.values():
for entry in entries: for entry in entries:
@@ -697,11 +1006,31 @@ class AniworldPlugin(BasisPlugin):
self._anime_results[entry.title] = entry self._anime_results[entry.title] = entry
return {key: list(value) for key, value in results.items()} return {key: list(value) for key, value in results.items()}
def _ensure_genre_names(self) -> List[str]:
if self._genre_names_cache is not None:
return list(self._genre_names_cache)
cached = _session_cache_get("genres")
if isinstance(cached, list):
names = [str(value).strip() for value in cached if str(value).strip()]
if names:
self._genre_names_cache = sorted(set(names), key=str.casefold)
return list(self._genre_names_cache)
try:
body = _get_html_simple(_genres_url())
names = _extract_genre_names_from_html(body)
except Exception:
names = []
if not names:
mapping = self._ensure_genres()
names = list(mapping.keys())
self._genre_names_cache = sorted({name for name in names if name}, key=str.casefold)
_session_cache_set("genres", self._genre_names_cache)
return list(self._genre_names_cache)
def genres(self) -> List[str]: def genres(self) -> List[str]:
if not self._requests_available: if not self._requests_available:
return [] return []
genres = list(self._ensure_genres().keys()) return self._ensure_genre_names()
return [g for g in genres if g]
def titles_for_genre(self, genre: str) -> List[str]: def titles_for_genre(self, genre: str) -> List[str]:
genre = (genre or "").strip() genre = (genre or "").strip()
@@ -718,7 +1047,11 @@ class AniworldPlugin(BasisPlugin):
if not entries: if not entries:
return [] return []
# Zusätzlich sicherstellen, dass die Titel im Cache sind. # Zusätzlich sicherstellen, dass die Titel im Cache sind.
self._anime_results.update({entry.title: entry for entry in entries if entry.title and entry.title not in self._anime_results}) cache_dirty = False
for entry in entries:
cache_dirty = self._remember_anime_result(entry.title, entry.url, entry.description, persist=False) or cache_dirty
if cache_dirty:
self._save_title_url_cache()
return [entry.title for entry in entries if entry.title] return [entry.title for entry in entries if entry.title]
def _season_label(self, number: int) -> str: def _season_label(self, number: int) -> str:
@@ -738,17 +1071,81 @@ class AniworldPlugin(BasisPlugin):
cache_key = (title, season_label) cache_key = (title, season_label)
self._episode_label_cache[cache_key] = {self._episode_label(info): info for info in season_info.episodes} self._episode_label_cache[cache_key] = {self._episode_label(info): info for info in season_info.episodes}
def remember_series_url(self, title: str, series_url: str) -> None:
title = (title or "").strip()
series_url = (series_url or "").strip()
if not title or not series_url:
return
self._remember_anime_result(title, series_url)
def series_url_for_title(self, title: str) -> str:
title = (title or "").strip()
if not title:
return ""
direct = self._anime_results.get(title)
if direct and direct.url:
return direct.url
wanted = title.casefold().strip()
cached_url = self._title_url_cache.get(wanted, "")
if cached_url:
return cached_url
for candidate in self._anime_results.values():
if candidate.title and candidate.title.casefold().strip() == wanted and candidate.url:
return candidate.url
return ""
def _ensure_season_links(self, title: str) -> List[SeasonInfo]:
cached = self._season_links_cache.get(title)
if cached is not None:
return list(cached)
anime = self._find_series_by_title(title)
if not anime:
return []
session_links = self._load_session_season_links(anime.url)
if session_links:
self._season_links_cache[title] = list(session_links)
return list(session_links)
seasons = scrape_anime_detail(anime.url, load_episodes=False)
self._season_links_cache[title] = list(seasons)
self._save_session_season_links(anime.url, seasons)
return list(seasons)
def _ensure_season_episodes(self, title: str, season_number: int) -> Optional[SeasonInfo]:
seasons = self._season_cache.get(title) or []
for season in seasons:
if season.number == season_number and season.episodes:
return season
links = self._ensure_season_links(title)
target = next((season for season in links if season.number == season_number), None)
if not target:
return None
cached_episodes = self._load_session_season_episodes(target.url)
if cached_episodes:
season_info = SeasonInfo(number=target.number, url=target.url, episodes=list(cached_episodes))
updated = [season for season in seasons if season.number != season_number]
updated.append(season_info)
updated.sort(key=lambda item: item.number)
self._season_cache[title] = updated
return season_info
season_soup = _get_soup(target.url, session=get_requests_session("aniworld", headers=HEADERS))
season_info = SeasonInfo(number=target.number, url=target.url, episodes=_extract_episodes(season_soup))
updated = [season for season in seasons if season.number != season_number]
updated.append(season_info)
updated.sort(key=lambda item: item.number)
self._season_cache[title] = updated
self._save_session_season_episodes(target.url, season_info.episodes)
return season_info
def _lookup_episode(self, title: str, season_label: str, episode_label: str) -> Optional[EpisodeInfo]: def _lookup_episode(self, title: str, season_label: str, episode_label: str) -> Optional[EpisodeInfo]:
cache_key = (title, season_label) cache_key = (title, season_label)
cached = self._episode_label_cache.get(cache_key) cached = self._episode_label_cache.get(cache_key)
if cached: if cached:
return cached.get(episode_label) return cached.get(episode_label)
seasons = self._ensure_seasons(title)
number = self._parse_season_number(season_label) number = self._parse_season_number(season_label)
if number is None: if number is None:
return None return None
for season_info in seasons: season_info = self._ensure_season_episodes(title, number)
if season_info.number == number: if season_info:
self._cache_episode_labels(title, season_label, season_info) self._cache_episode_labels(title, season_label, season_info)
return self._episode_label_cache.get(cache_key, {}).get(episode_label) return self._episode_label_cache.get(cache_key, {}).get(episode_label)
return None return None
@@ -758,6 +1155,7 @@ class AniworldPlugin(BasisPlugin):
if not query: if not query:
self._anime_results.clear() self._anime_results.clear()
self._season_cache.clear() self._season_cache.clear()
self._season_links_cache.clear()
self._episode_label_cache.clear() self._episode_label_cache.clear()
self._popular_cache = None self._popular_cache = None
return [] return []
@@ -770,32 +1168,34 @@ class AniworldPlugin(BasisPlugin):
self._season_cache.clear() self._season_cache.clear()
self._episode_label_cache.clear() self._episode_label_cache.clear()
raise RuntimeError(f"AniWorld-Suche fehlgeschlagen: {exc}") from exc raise RuntimeError(f"AniWorld-Suche fehlgeschlagen: {exc}") from exc
self._anime_results = {result.title: result for result in results} self._anime_results = {}
cache_dirty = False
for result in results:
cache_dirty = self._remember_anime_result(result.title, result.url, result.description, persist=False) or cache_dirty
if cache_dirty:
self._save_title_url_cache()
self._season_cache.clear() self._season_cache.clear()
self._season_links_cache.clear()
self._episode_label_cache.clear() self._episode_label_cache.clear()
return [result.title for result in results] return [result.title for result in results]
def _ensure_seasons(self, title: str) -> List[SeasonInfo]: def _ensure_seasons(self, title: str) -> List[SeasonInfo]:
if title in self._season_cache: if title in self._season_cache:
return self._season_cache[title] return self._season_cache[title]
anime = self._find_series_by_title(title) seasons = self._ensure_season_links(title)
if not anime:
return []
seasons = scrape_anime_detail(anime.url)
self._season_cache[title] = list(seasons) self._season_cache[title] = list(seasons)
return list(seasons) return list(seasons)
def seasons_for(self, title: str) -> List[str]: def seasons_for(self, title: str) -> List[str]:
seasons = self._ensure_seasons(title) seasons = self._ensure_seasons(title)
return [self._season_label(season.number) for season in seasons if season.episodes] return [self._season_label(season.number) for season in seasons]
def episodes_for(self, title: str, season: str) -> List[str]: def episodes_for(self, title: str, season: str) -> List[str]:
seasons = self._ensure_seasons(title)
number = self._parse_season_number(season) number = self._parse_season_number(season)
if number is None: if number is None:
return [] return []
for season_info in seasons: season_info = self._ensure_season_episodes(title, number)
if season_info.number == number: if season_info:
labels = [self._episode_label(info) for info in season_info.episodes] labels = [self._episode_label(info) for info in season_info.episodes]
self._cache_episode_labels(title, season, season_info) self._cache_episode_labels(title, season, season_info)
return labels return labels

View File

@@ -30,19 +30,18 @@ except ImportError: # pragma: no cover - allow running outside Kodi
xbmcaddon = None xbmcaddon = None
from plugin_interface import BasisPlugin from plugin_interface import BasisPlugin
from plugin_helpers import dump_response_html, get_setting_bool, log_url, notify_url from plugin_helpers import dump_response_html, get_setting_bool, log_error, log_url, notify_url
ADDON_ID = "plugin.video.viewit" ADDON_ID = "plugin.video.viewit"
SETTING_BASE_URL = "einschalten_base_url" SETTING_BASE_URL = "einschalten_base_url"
SETTING_INDEX_PATH = "einschalten_index_path"
SETTING_NEW_TITLES_PATH = "einschalten_new_titles_path"
SETTING_SEARCH_PATH = "einschalten_search_path"
SETTING_GENRES_PATH = "einschalten_genres_path"
SETTING_ENABLE_PLAYBACK = "einschalten_enable_playback"
SETTING_WATCH_PATH_TEMPLATE = "einschalten_watch_path_template"
GLOBAL_SETTING_LOG_URLS = "debug_log_urls" GLOBAL_SETTING_LOG_URLS = "debug_log_urls"
GLOBAL_SETTING_DUMP_HTML = "debug_dump_html" GLOBAL_SETTING_DUMP_HTML = "debug_dump_html"
GLOBAL_SETTING_SHOW_URL_INFO = "debug_show_url_info" GLOBAL_SETTING_SHOW_URL_INFO = "debug_show_url_info"
GLOBAL_SETTING_LOG_ERRORS = "debug_log_errors"
SETTING_LOG_URLS = "log_urls_einschalten"
SETTING_DUMP_HTML = "dump_html_einschalten"
SETTING_SHOW_URL_INFO = "show_url_info_einschalten"
SETTING_LOG_ERRORS = "log_errors_einschalten"
DEFAULT_BASE_URL = "" DEFAULT_BASE_URL = ""
DEFAULT_INDEX_PATH = "/" DEFAULT_INDEX_PATH = "/"
@@ -153,16 +152,36 @@ def _extract_ng_state_payload(html: str) -> Dict[str, Any]:
def _notify_url(url: str) -> None: def _notify_url(url: str) -> None:
notify_url(ADDON_ID, heading="einschalten", url=url, enabled_setting_id=GLOBAL_SETTING_SHOW_URL_INFO) notify_url(
ADDON_ID,
heading="Einschalten",
url=url,
enabled_setting_id=GLOBAL_SETTING_SHOW_URL_INFO,
plugin_setting_id=SETTING_SHOW_URL_INFO,
)
def _log_url(url: str, *, kind: str = "VISIT") -> None: def _log_url(url: str, *, kind: str = "VISIT") -> None:
log_url(ADDON_ID, enabled_setting_id=GLOBAL_SETTING_LOG_URLS, log_filename="einschalten_urls.log", url=url, kind=kind) log_url(
ADDON_ID,
enabled_setting_id=GLOBAL_SETTING_LOG_URLS,
plugin_setting_id=SETTING_LOG_URLS,
log_filename="einschalten_urls.log",
url=url,
kind=kind,
)
def _log_debug_line(message: str) -> None: def _log_debug_line(message: str) -> None:
try: try:
log_url(ADDON_ID, enabled_setting_id=GLOBAL_SETTING_LOG_URLS, log_filename="einschalten_debug.log", url=message, kind="DEBUG") log_url(
ADDON_ID,
enabled_setting_id=GLOBAL_SETTING_LOG_URLS,
plugin_setting_id=SETTING_LOG_URLS,
log_filename="einschalten_debug.log",
url=message,
kind="DEBUG",
)
except Exception: except Exception:
pass pass
@@ -174,6 +193,7 @@ def _log_titles(items: list[MovieItem], *, context: str) -> None:
log_url( log_url(
ADDON_ID, ADDON_ID,
enabled_setting_id=GLOBAL_SETTING_LOG_URLS, enabled_setting_id=GLOBAL_SETTING_LOG_URLS,
plugin_setting_id=SETTING_LOG_URLS,
log_filename="einschalten_titles.log", log_filename="einschalten_titles.log",
url=f"{context}:count={len(items)}", url=f"{context}:count={len(items)}",
kind="TITLE", kind="TITLE",
@@ -182,6 +202,7 @@ def _log_titles(items: list[MovieItem], *, context: str) -> None:
log_url( log_url(
ADDON_ID, ADDON_ID,
enabled_setting_id=GLOBAL_SETTING_LOG_URLS, enabled_setting_id=GLOBAL_SETTING_LOG_URLS,
plugin_setting_id=SETTING_LOG_URLS,
log_filename="einschalten_titles.log", log_filename="einschalten_titles.log",
url=f"{context}:id={item.id} title={item.title}", url=f"{context}:id={item.id} title={item.title}",
kind="TITLE", kind="TITLE",
@@ -194,11 +215,22 @@ def _log_response_html(url: str, body: str) -> None:
dump_response_html( dump_response_html(
ADDON_ID, ADDON_ID,
enabled_setting_id=GLOBAL_SETTING_DUMP_HTML, enabled_setting_id=GLOBAL_SETTING_DUMP_HTML,
plugin_setting_id=SETTING_DUMP_HTML,
url=url, url=url,
body=body, body=body,
filename_prefix="einschalten_response", filename_prefix="einschalten_response",
) )
def _log_error(message: str) -> None:
log_error(
ADDON_ID,
enabled_setting_id=GLOBAL_SETTING_LOG_ERRORS,
plugin_setting_id=SETTING_LOG_ERRORS,
log_filename="einschalten_errors.log",
message=message,
)
def _u_matches(value: Any, expected_path: str) -> bool: def _u_matches(value: Any, expected_path: str) -> bool:
raw = (value or "").strip() raw = (value or "").strip()
if not raw: if not raw:
@@ -474,7 +506,7 @@ def _parse_ng_state_genres(payload: Dict[str, Any]) -> Dict[str, int]:
class EinschaltenPlugin(BasisPlugin): class EinschaltenPlugin(BasisPlugin):
"""Metadata-Plugin für eine autorisierte Quelle.""" """Metadata-Plugin für eine autorisierte Quelle."""
name = "einschalten" name = "Einschalten"
def __init__(self) -> None: def __init__(self) -> None:
self.is_available = REQUESTS_AVAILABLE self.is_available = REQUESTS_AVAILABLE
@@ -501,21 +533,21 @@ class EinschaltenPlugin(BasisPlugin):
base = self._get_base_url() base = self._get_base_url()
if not base: if not base:
return "" return ""
path = _get_setting_text(SETTING_INDEX_PATH, default=DEFAULT_INDEX_PATH).strip() or "/" path = DEFAULT_INDEX_PATH
return urljoin(base + "/", path.lstrip("/")) return urljoin(base + "/", path.lstrip("/"))
def _new_titles_url(self) -> str: def _new_titles_url(self) -> str:
base = self._get_base_url() base = self._get_base_url()
if not base: if not base:
return "" return ""
path = _get_setting_text(SETTING_NEW_TITLES_PATH, default=DEFAULT_NEW_TITLES_PATH).strip() or "/movies/new" path = DEFAULT_NEW_TITLES_PATH
return urljoin(base + "/", path.lstrip("/")) return urljoin(base + "/", path.lstrip("/"))
def _genres_url(self) -> str: def _genres_url(self) -> str:
base = self._get_base_url() base = self._get_base_url()
if not base: if not base:
return "" return ""
path = _get_setting_text(SETTING_GENRES_PATH, default=DEFAULT_GENRES_PATH).strip() or "/genres" path = DEFAULT_GENRES_PATH
return urljoin(base + "/", path.lstrip("/")) return urljoin(base + "/", path.lstrip("/"))
def _api_genres_url(self) -> str: def _api_genres_url(self) -> str:
@@ -528,7 +560,7 @@ class EinschaltenPlugin(BasisPlugin):
base = self._get_base_url() base = self._get_base_url()
if not base: if not base:
return "" return ""
path = _get_setting_text(SETTING_SEARCH_PATH, default=DEFAULT_SEARCH_PATH).strip() or "/search" path = DEFAULT_SEARCH_PATH
url = urljoin(base + "/", path.lstrip("/")) url = urljoin(base + "/", path.lstrip("/"))
return f"{url}?{urlencode({'query': query})}" return f"{url}?{urlencode({'query': query})}"
@@ -570,8 +602,6 @@ class EinschaltenPlugin(BasisPlugin):
base = self._get_base_url() base = self._get_base_url()
if not base: if not base:
return "" return ""
template = _get_setting_text(SETTING_WATCH_PATH_TEMPLATE, default=DEFAULT_WATCH_PATH_TEMPLATE).strip()
if not template:
template = DEFAULT_WATCH_PATH_TEMPLATE template = DEFAULT_WATCH_PATH_TEMPLATE
try: try:
path = template.format(id=int(movie_id)) path = template.format(id=int(movie_id))
@@ -624,7 +654,8 @@ class EinschaltenPlugin(BasisPlugin):
_log_response_html(resp.url or url, resp.text) _log_response_html(resp.url or url, resp.text)
self._detail_html_by_id[movie_id] = resp.text or "" self._detail_html_by_id[movie_id] = resp.text or ""
return resp.text or "" return resp.text or ""
except Exception: except Exception as exc:
_log_error(f"GET {url} failed: {exc}")
return "" return ""
def _fetch_watch_payload(self, movie_id: int) -> dict[str, object]: def _fetch_watch_payload(self, movie_id: int) -> dict[str, object]:
@@ -645,7 +676,8 @@ class EinschaltenPlugin(BasisPlugin):
_log_response_html(resp.url or url, resp.text) _log_response_html(resp.url or url, resp.text)
data = resp.json() data = resp.json()
return dict(data) if isinstance(data, dict) else {} return dict(data) if isinstance(data, dict) else {}
except Exception: except Exception as exc:
_log_error(f"GET {url} failed: {exc}")
return {} return {}
def _watch_stream_url(self, movie_id: int) -> str: def _watch_stream_url(self, movie_id: int) -> str:
@@ -996,21 +1028,17 @@ class EinschaltenPlugin(BasisPlugin):
movie_id = self._ensure_title_id(title) movie_id = self._ensure_title_id(title)
if movie_id is not None: if movie_id is not None:
self._fetch_movie_detail(movie_id) self._fetch_movie_detail(movie_id)
if _get_setting_bool(SETTING_ENABLE_PLAYBACK, default=False):
# Playback: expose a single "Stream" folder (inside: 1 playable item = Filmtitel). # Playback: expose a single "Stream" folder (inside: 1 playable item = Filmtitel).
return ["Stream"] return ["Stream"]
return ["Details"]
def episodes_for(self, title: str, season: str) -> List[str]: def episodes_for(self, title: str, season: str) -> List[str]:
season = (season or "").strip() season = (season or "").strip()
if season.casefold() == "stream" and _get_setting_bool(SETTING_ENABLE_PLAYBACK, default=False): if season.casefold() == "stream":
title = (title or "").strip() title = (title or "").strip()
return [title] if title else [] return [title] if title else []
return [] return []
def stream_link_for(self, title: str, season: str, episode: str) -> Optional[str]: def stream_link_for(self, title: str, season: str, episode: str) -> Optional[str]:
if not _get_setting_bool(SETTING_ENABLE_PLAYBACK, default=False):
return None
title = (title or "").strip() title = (title or "").strip()
season = (season or "").strip() season = (season or "").strip()
episode = (episode or "").strip() episode = (episode or "").strip()

View File

@@ -0,0 +1,668 @@
"""Filmpalast Integration (movie-style provider).
Hinweis:
- Der Parser ist bewusst defensiv und arbeitet mit mehreren Fallback-Selektoren,
da Filmpalast-Layouts je Domain variieren koennen.
"""
from __future__ import annotations
from dataclasses import dataclass
import re
from urllib.parse import quote, urlencode
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, TypeAlias
try: # pragma: no cover - optional dependency
import requests
from bs4 import BeautifulSoup # type: ignore[import-not-found]
except ImportError as exc: # pragma: no cover - optional dependency
requests = None
BeautifulSoup = None
REQUESTS_AVAILABLE = False
REQUESTS_IMPORT_ERROR = exc
else:
REQUESTS_AVAILABLE = True
REQUESTS_IMPORT_ERROR = None
from plugin_interface import BasisPlugin
from plugin_helpers import dump_response_html, get_setting_bool, get_setting_string, log_error, log_url, notify_url
from http_session_pool import get_requests_session
if TYPE_CHECKING: # pragma: no cover
from requests import Session as RequestsSession
from bs4 import BeautifulSoup as BeautifulSoupT # type: ignore[import-not-found]
else: # pragma: no cover
RequestsSession: TypeAlias = Any
BeautifulSoupT: TypeAlias = Any
ADDON_ID = "plugin.video.viewit"
SETTING_BASE_URL = "filmpalast_base_url"
DEFAULT_BASE_URL = "https://filmpalast.to"
DEFAULT_TIMEOUT = 20
DEFAULT_PREFERRED_HOSTERS = ["voe", "vidoza", "streamtape", "doodstream", "mixdrop"]
SERIES_HINT_PREFIX = "series://filmpalast/"
SEASON_EPISODE_RE = re.compile(r"\bS\s*(\d{1,2})\s*E\s*(\d{1,3})\b", re.IGNORECASE)
GLOBAL_SETTING_LOG_URLS = "debug_log_urls"
GLOBAL_SETTING_DUMP_HTML = "debug_dump_html"
GLOBAL_SETTING_SHOW_URL_INFO = "debug_show_url_info"
GLOBAL_SETTING_LOG_ERRORS = "debug_log_errors"
SETTING_LOG_URLS = "log_urls_filmpalast"
SETTING_DUMP_HTML = "dump_html_filmpalast"
SETTING_SHOW_URL_INFO = "show_url_info_filmpalast"
SETTING_LOG_ERRORS = "log_errors_filmpalast"
HEADERS = {
"User-Agent": "Mozilla/5.0 (Kodi; ViewIt) AppleWebKit/537.36 (KHTML, like Gecko)",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
"Accept-Language": "de-DE,de;q=0.9,en;q=0.8",
"Connection": "keep-alive",
}
@dataclass(frozen=True)
class SearchHit:
title: str
url: str
@dataclass(frozen=True)
class EpisodeEntry:
season: int
episode: int
suffix: str
url: str
def _get_base_url() -> str:
base = get_setting_string(ADDON_ID, SETTING_BASE_URL, default=DEFAULT_BASE_URL).strip()
if not base:
base = DEFAULT_BASE_URL
return base.rstrip("/")
def _absolute_url(url: str) -> str:
url = (url or "").strip()
if not url:
return ""
if url.startswith("http://") or url.startswith("https://"):
return url
if url.startswith("//"):
return f"https:{url}"
if url.startswith("/"):
return f"{_get_base_url()}{url}"
return f"{_get_base_url()}/{url.lstrip('/')}"
def _normalize_search_text(value: str) -> str:
value = (value or "").casefold()
value = re.sub(r"[^a-z0-9]+", " ", value)
value = re.sub(r"\s+", " ", value).strip()
return value
def _matches_query(query: str, *, title: str) -> bool:
normalized_query = _normalize_search_text(query)
if not normalized_query:
return False
haystack = f" {_normalize_search_text(title)} "
return f" {normalized_query} " in haystack
def _is_probably_content_url(url: str) -> bool:
lower = (url or "").casefold()
if not lower:
return False
block_markers = (
"/genre/",
"/kategorie/",
"/category/",
"/tag/",
"/login",
"/register",
"/kontakt",
"/impressum",
"/datenschutz",
"/dmca",
"/agb",
"javascript:",
"#",
)
if any(marker in lower for marker in block_markers):
return False
allow_markers = ("/stream/", "/film/", "/movie/", "/serien/", "/serie/", "/title/")
return any(marker in lower for marker in allow_markers)
def _log_url_event(url: str, *, kind: str = "VISIT") -> None:
log_url(
ADDON_ID,
enabled_setting_id=GLOBAL_SETTING_LOG_URLS,
plugin_setting_id=SETTING_LOG_URLS,
log_filename="filmpalast_urls.log",
url=url,
kind=kind,
)
def _log_visit(url: str) -> None:
_log_url_event(url, kind="VISIT")
notify_url(
ADDON_ID,
heading="Filmpalast",
url=url,
enabled_setting_id=GLOBAL_SETTING_SHOW_URL_INFO,
plugin_setting_id=SETTING_SHOW_URL_INFO,
)
def _log_response_html(url: str, body: str) -> None:
dump_response_html(
ADDON_ID,
enabled_setting_id=GLOBAL_SETTING_DUMP_HTML,
plugin_setting_id=SETTING_DUMP_HTML,
url=url,
body=body,
filename_prefix="filmpalast_response",
)
def _log_error_message(message: str) -> None:
log_error(
ADDON_ID,
enabled_setting_id=GLOBAL_SETTING_LOG_ERRORS,
plugin_setting_id=SETTING_LOG_ERRORS,
log_filename="filmpalast_errors.log",
message=message,
)
def _is_series_hint_url(value: str) -> bool:
return (value or "").startswith(SERIES_HINT_PREFIX)
def _series_hint_value(title: str) -> str:
safe_title = quote((title or "").strip(), safe="")
return f"{SERIES_HINT_PREFIX}{safe_title}" if safe_title else SERIES_HINT_PREFIX
def _extract_number(value: str) -> Optional[int]:
match = re.search(r"(\d+)", value or "")
if not match:
return None
try:
return int(match.group(1))
except Exception:
return None
def _strip_series_alias(title: str) -> str:
return re.sub(r"\s*\(serie\)\s*$", "", title or "", flags=re.IGNORECASE).strip()
def _get_soup(url: str, *, session: Optional[RequestsSession] = None) -> BeautifulSoupT:
if requests is None or BeautifulSoup is None:
raise RuntimeError("requests/bs4 sind nicht verfuegbar.")
_log_visit(url)
sess = session or get_requests_session("filmpalast", headers=HEADERS)
try:
response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT)
response.raise_for_status()
except Exception as exc:
_log_error_message(f"GET {url} failed: {exc}")
raise
if response.url and response.url != url:
_log_url_event(response.url, kind="REDIRECT")
_log_response_html(url, response.text)
return BeautifulSoup(response.text, "html.parser")
class FilmpalastPlugin(BasisPlugin):
name = "Filmpalast"
def __init__(self) -> None:
self._title_to_url: Dict[str, str] = {}
self._series_entries: Dict[str, Dict[int, Dict[int, EpisodeEntry]]] = {}
self._hoster_cache: Dict[str, Dict[str, str]] = {}
self._requests_available = REQUESTS_AVAILABLE
self._default_preferred_hosters: List[str] = list(DEFAULT_PREFERRED_HOSTERS)
self._preferred_hosters: List[str] = list(self._default_preferred_hosters)
self.is_available = True
self.unavailable_reason: Optional[str] = None
if not self._requests_available: # pragma: no cover - optional dependency
self.is_available = False
self.unavailable_reason = (
"requests/bs4 fehlen. Installiere 'requests' und 'beautifulsoup4'."
)
if REQUESTS_IMPORT_ERROR:
print(f"FilmpalastPlugin Importfehler: {REQUESTS_IMPORT_ERROR}")
def _lookup_title_url(self, title: str) -> str:
title = (title or "").strip()
if not title:
return ""
direct = self._title_to_url.get(title)
if direct:
return direct
wanted = title.casefold()
for key, value in self._title_to_url.items():
if key.casefold() == wanted and value:
return value
return ""
def _series_key_for_title(self, title: str) -> str:
title = (title or "").strip()
if not title:
return ""
if title in self._series_entries:
return title
wanted = title.casefold()
for key in self._series_entries.keys():
if key.casefold() == wanted:
return key
return ""
def _has_series_entries(self, title: str) -> bool:
return bool(self._series_key_for_title(title))
def _episode_entry_from_hit(self, hit: SearchHit) -> Optional[Tuple[str, EpisodeEntry]]:
title = (hit.title or "").strip()
if not title:
return None
marker = SEASON_EPISODE_RE.search(title)
if not marker:
return None
try:
season_number = int(marker.group(1))
episode_number = int(marker.group(2))
except Exception:
return None
series_title = re.sub(r"\s+", " ", title[: marker.start()] or "").strip(" -|:;,_")
if not series_title:
return None
suffix = re.sub(r"\s+", " ", title[marker.end() :] or "").strip(" -|:;,_")
entry = EpisodeEntry(season=season_number, episode=episode_number, suffix=suffix, url=hit.url)
return (series_title, entry)
def _add_series_entry(self, series_title: str, entry: EpisodeEntry) -> None:
if not series_title or not entry.url:
return
seasons = self._series_entries.setdefault(series_title, {})
episodes = seasons.setdefault(entry.season, {})
if entry.episode not in episodes:
episodes[entry.episode] = entry
def _ensure_series_entries_for_title(self, title: str) -> str:
series_key = self._series_key_for_title(title)
if series_key:
return series_key
original_title = (title or "").strip()
lookup_title = _strip_series_alias(original_title)
if not lookup_title:
return ""
if not self._requests_available:
return ""
wanted = _normalize_search_text(lookup_title)
hits = self._search_hits(lookup_title)
for hit in hits:
parsed = self._episode_entry_from_hit(hit)
if not parsed:
continue
series_title, entry = parsed
if wanted and _normalize_search_text(series_title) != wanted:
continue
self._add_series_entry(series_title, entry)
self._title_to_url.setdefault(series_title, _series_hint_value(series_title))
resolved = self._series_key_for_title(original_title) or self._series_key_for_title(lookup_title)
if resolved and original_title and original_title != resolved:
self._series_entries[original_title] = self._series_entries[resolved]
self._title_to_url.setdefault(original_title, _series_hint_value(resolved))
return original_title
return resolved
def _detail_url_for_selection(self, title: str, season: str, episode: str) -> str:
series_key = self._series_key_for_title(title) or self._ensure_series_entries_for_title(title)
if series_key:
season_number = _extract_number(season)
episode_number = _extract_number(episode)
if season_number is None or episode_number is None:
return ""
entry = self._series_entries.get(series_key, {}).get(season_number, {}).get(episode_number)
return entry.url if entry else ""
return self._ensure_title_url(title)
def _search_hits(self, query: str) -> List[SearchHit]:
query = (query or "").strip()
if not query:
return []
if not self._requests_available or requests is None:
return []
session = get_requests_session("filmpalast", headers=HEADERS)
search_requests = [(_absolute_url(f"/search/title/{quote(query)}"), None)]
hits: List[SearchHit] = []
seen_titles: set[str] = set()
seen_urls: set[str] = set()
for base_url, params in search_requests:
try:
request_url = base_url if not params else f"{base_url}?{urlencode(params)}"
_log_url_event(request_url, kind="GET")
_log_visit(request_url)
response = session.get(base_url, params=params, headers=HEADERS, timeout=DEFAULT_TIMEOUT)
response.raise_for_status()
if response.url and response.url != request_url:
_log_url_event(response.url, kind="REDIRECT")
_log_response_html(request_url, response.text)
soup = BeautifulSoup(response.text, "html.parser")
except Exception as exc:
_log_error_message(f"search request failed ({base_url}): {exc}")
continue
anchors = soup.select("article.liste h2 a[href], article.liste h3 a[href]")
if not anchors:
anchors = soup.select("a[href*='/stream/'][title], a[href*='/stream/']")
for anchor in anchors:
href = (anchor.get("href") or "").strip()
if not href:
continue
url = _absolute_url(href).split("#", 1)[0].split("?", 1)[0].rstrip("/")
if not _is_probably_content_url(url):
continue
title = (anchor.get("title") or anchor.get_text(" ", strip=True)).strip()
title = (title or "").strip()
if not title:
continue
if title.casefold() in {"details/play", "play", "details"}:
continue
if not _matches_query(query, title=title):
continue
title_key = title.casefold()
url_key = url.casefold()
if title_key in seen_titles or url_key in seen_urls:
continue
seen_titles.add(title_key)
seen_urls.add(url_key)
_log_url_event(url, kind="PARSE")
hits.append(SearchHit(title=title, url=url))
if hits:
break
return hits
async def search_titles(self, query: str) -> List[str]:
hits = self._search_hits(query)
self._title_to_url = {}
self._series_entries = {}
self._hoster_cache.clear()
movie_titles: List[str] = []
series_titles_seen: set[str] = set()
for hit in hits:
parsed = self._episode_entry_from_hit(hit)
if parsed:
series_title, entry = parsed
self._add_series_entry(series_title, entry)
if series_title.casefold() not in series_titles_seen:
self._title_to_url[series_title] = _series_hint_value(series_title)
series_titles_seen.add(series_title.casefold())
continue
title = (hit.title or "").strip()
if not title:
continue
movie_titles.append(title)
self._title_to_url[title] = hit.url
titles: List[str] = list(movie_titles)
movie_keys = {entry.casefold() for entry in movie_titles}
for series_title in sorted(self._series_entries.keys(), key=lambda value: value.casefold()):
if series_title.casefold() in movie_keys:
alias = f"{series_title} (Serie)"
self._title_to_url[alias] = self._title_to_url.get(series_title, _series_hint_value(series_title))
self._series_entries[alias] = self._series_entries[series_title]
titles.append(alias)
else:
titles.append(series_title)
titles.sort(key=lambda value: value.casefold())
return titles
def _ensure_title_url(self, title: str) -> str:
title = (title or "").strip()
if not title:
return ""
direct = self._lookup_title_url(title)
if direct and _is_series_hint_url(direct):
return ""
if direct:
self._title_to_url[title] = direct
return direct
if self._has_series_entries(title) or self._ensure_series_entries_for_title(title):
self._title_to_url[title] = _series_hint_value(title)
return ""
wanted = title.casefold()
hits = self._search_hits(title)
for hit in hits:
if self._episode_entry_from_hit(hit):
continue
if hit.title.casefold() == wanted and hit.url:
self._title_to_url[title] = hit.url
return hit.url
return ""
def remember_series_url(self, title: str, series_url: str) -> None:
title = (title or "").strip()
series_url = (series_url or "").strip()
if not title or not series_url:
return
self._title_to_url[title] = series_url
self._hoster_cache.clear()
def series_url_for_title(self, title: str) -> str:
title = (title or "").strip()
if not title:
return ""
direct = self._lookup_title_url(title)
if direct:
return direct
series_key = self._series_key_for_title(title)
if series_key:
return _series_hint_value(series_key)
return ""
def is_movie(self, title: str) -> bool:
title = (title or "").strip()
if not title:
return False
direct = self._lookup_title_url(title)
if direct:
return not _is_series_hint_url(direct)
if SEASON_EPISODE_RE.search(title):
return False
if self._has_series_entries(title):
return False
if self._ensure_series_entries_for_title(title):
return False
return True
@staticmethod
def _normalize_hoster_name(name: str) -> str:
name = (name or "").strip()
if not name:
return ""
name = re.sub(r"\s+", " ", name)
return name
def _extract_hoster_links(self, soup: BeautifulSoupT) -> Dict[str, str]:
hosters: Dict[str, str] = {}
if not soup:
return hosters
# Primäres Layout: jeder Hoster in eigener UL mit hostName + Play-Link.
for block in soup.select("ul.currentStreamLinks"):
host_name_node = block.select_one("li.hostBg .hostName")
host_name = self._normalize_hoster_name(host_name_node.get_text(" ", strip=True) if host_name_node else "")
play_anchor = block.select_one("li.streamPlayBtn a[href], a.button.iconPlay[href]")
href = (play_anchor.get("href") if play_anchor else "") or ""
play_url = _absolute_url(href).strip()
if not play_url:
continue
if not host_name:
host_name = self._normalize_hoster_name(play_anchor.get_text(" ", strip=True) if play_anchor else "")
if not host_name:
host_name = "Unbekannt"
if host_name not in hosters:
hosters[host_name] = play_url
# Fallback: direkte Play-Buttons im Stream-Bereich.
if not hosters:
for anchor in soup.select("#grap-stream-list a.button.iconPlay[href], .streamLinksWrapper a.button.iconPlay[href]"):
href = (anchor.get("href") or "").strip()
play_url = _absolute_url(href).strip()
if not play_url:
continue
text_name = self._normalize_hoster_name(anchor.get_text(" ", strip=True))
host_name = text_name if text_name and text_name.casefold() not in {"play", "details play"} else "Unbekannt"
if host_name in hosters:
host_name = f"{host_name} #{len(hosters) + 1}"
hosters[host_name] = play_url
return hosters
def _hosters_for_detail_url(self, detail_url: str) -> Dict[str, str]:
detail_url = (detail_url or "").strip()
if not detail_url:
return {}
cached = self._hoster_cache.get(detail_url)
if cached is not None:
return dict(cached)
if not self._requests_available:
return {}
try:
soup = _get_soup(detail_url, session=get_requests_session("filmpalast", headers=HEADERS))
except Exception:
return {}
hosters = self._extract_hoster_links(soup)
for url in hosters.values():
_log_url_event(url, kind="PARSE")
self._hoster_cache[detail_url] = dict(hosters)
return dict(hosters)
def seasons_for(self, title: str) -> List[str]:
title = (title or "").strip()
if not title:
return []
series_key = self._series_key_for_title(title) or self._ensure_series_entries_for_title(title)
if series_key:
seasons = sorted(self._series_entries.get(series_key, {}).keys())
return [f"Staffel {number}" for number in seasons]
detail_url = self._ensure_title_url(title)
return ["Film"] if detail_url else []
def episodes_for(self, title: str, season: str) -> List[str]:
title = (title or "").strip()
series_key = self._series_key_for_title(title) or self._ensure_series_entries_for_title(title)
if series_key:
season_number = _extract_number(season)
if season_number is None:
return []
episodes = self._series_entries.get(series_key, {}).get(season_number, {})
labels: List[str] = []
for episode_number in sorted(episodes.keys()):
entry = episodes[episode_number]
label = f"Episode {episode_number}"
if entry.suffix:
label = f"{label} - {entry.suffix}"
labels.append(label)
return labels
return ["Stream"] if self._ensure_title_url(title) else []
def available_hosters_for(self, title: str, season: str, episode: str) -> List[str]:
detail_url = self._detail_url_for_selection(title, season, episode)
hosters = self._hosters_for_detail_url(detail_url)
return list(hosters.keys())
def stream_link_for(self, title: str, season: str, episode: str) -> Optional[str]:
detail_url = self._detail_url_for_selection(title, season, episode)
if not detail_url:
return None
hosters = self._hosters_for_detail_url(detail_url)
if hosters:
for preferred in self._preferred_hosters:
preferred_key = (preferred or "").strip().casefold()
if not preferred_key:
continue
for host_name, host_url in hosters.items():
if preferred_key in host_name.casefold() or preferred_key in host_url.casefold():
_log_url_event(host_url, kind="FOUND")
return host_url
first = next(iter(hosters.values()))
_log_url_event(first, kind="FOUND")
return first
if not self._requests_available:
return detail_url
try:
soup = _get_soup(detail_url, session=get_requests_session("filmpalast", headers=HEADERS))
except Exception:
return detail_url
candidates: List[str] = []
for iframe in soup.select("iframe[src]"):
src = (iframe.get("src") or "").strip()
if src:
candidates.append(_absolute_url(src))
for anchor in soup.select("a[href]"):
href = (anchor.get("href") or "").strip()
if not href:
continue
lower = href.casefold()
if "watch" in lower or "stream" in lower or "player" in lower:
candidates.append(_absolute_url(href))
deduped: List[str] = []
seen: set[str] = set()
for candidate in candidates:
key = candidate.casefold()
if key in seen:
continue
seen.add(key)
deduped.append(candidate)
if deduped:
_log_url_event(deduped[0], kind="FOUND")
return deduped[0]
return detail_url
def set_preferred_hosters(self, hosters: List[str]) -> None:
normalized = [str(hoster).strip().lower() for hoster in hosters if str(hoster).strip()]
if normalized:
self._preferred_hosters = normalized
def reset_preferred_hosters(self) -> None:
self._preferred_hosters = list(self._default_preferred_hosters)
def resolve_stream_link(self, link: str) -> Optional[str]:
if not link:
return None
resolved = link
if self._requests_available:
try:
session = get_requests_session("filmpalast", headers=HEADERS)
response = session.get(link, headers=HEADERS, timeout=DEFAULT_TIMEOUT, allow_redirects=True)
response.raise_for_status()
resolved = (response.url or link).strip() or link
except Exception:
resolved = link
try:
from resolveurl_backend import resolve as resolve_with_resolveurl
except Exception:
resolve_with_resolveurl = None
if callable(resolve_with_resolveurl):
resolved_by_resolveurl = resolve_with_resolveurl(resolved)
if resolved_by_resolveurl:
_log_url_event("ResolveURL", kind="HOSTER_RESOLVER")
_log_url_event(resolved_by_resolveurl, kind="MEDIA")
return resolved_by_resolveurl
if resolved:
_log_url_event(resolved, kind="FINAL")
return resolved
return None

View File

@@ -10,9 +10,13 @@ from __future__ import annotations
from dataclasses import dataclass, field from dataclasses import dataclass, field
from datetime import datetime from datetime import datetime
from html import unescape
import json
import hashlib import hashlib
import os import os
import re import re
import time
import unicodedata
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, TypeAlias from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, TypeAlias
try: # pragma: no cover - optional dependency try: # pragma: no cover - optional dependency
@@ -37,7 +41,7 @@ except ImportError: # pragma: no cover - allow running outside Kodi
xbmcgui = None xbmcgui = None
from plugin_interface import BasisPlugin from plugin_interface import BasisPlugin
from plugin_helpers import dump_response_html, get_setting_bool, log_url, notify_url from plugin_helpers import dump_response_html, get_setting_bool, get_setting_string, log_error, log_url, notify_url
from http_session_pool import get_requests_session from http_session_pool import get_requests_session
from regex_patterns import SEASON_EPISODE_TAG, SEASON_EPISODE_URL from regex_patterns import SEASON_EPISODE_TAG, SEASON_EPISODE_URL
@@ -49,22 +53,28 @@ else: # pragma: no cover
BeautifulSoupT: TypeAlias = Any BeautifulSoupT: TypeAlias = Any
BASE_URL = "https://s.to" SETTING_BASE_URL = "serienstream_base_url"
SERIES_BASE_URL = f"{BASE_URL}/serie/stream" DEFAULT_BASE_URL = "https://s.to"
POPULAR_SERIES_URL = f"{BASE_URL}/beliebte-serien"
LATEST_EPISODES_URL = f"{BASE_URL}"
DEFAULT_PREFERRED_HOSTERS = ["voe"] DEFAULT_PREFERRED_HOSTERS = ["voe"]
DEFAULT_TIMEOUT = 20 DEFAULT_TIMEOUT = 20
ADDON_ID = "plugin.video.viewit" ADDON_ID = "plugin.video.viewit"
GLOBAL_SETTING_LOG_URLS = "debug_log_urls" GLOBAL_SETTING_LOG_URLS = "debug_log_urls"
GLOBAL_SETTING_DUMP_HTML = "debug_dump_html" GLOBAL_SETTING_DUMP_HTML = "debug_dump_html"
GLOBAL_SETTING_SHOW_URL_INFO = "debug_show_url_info" GLOBAL_SETTING_SHOW_URL_INFO = "debug_show_url_info"
GLOBAL_SETTING_LOG_ERRORS = "debug_log_errors"
SETTING_LOG_URLS = "log_urls_serienstream"
SETTING_DUMP_HTML = "dump_html_serienstream"
SETTING_SHOW_URL_INFO = "show_url_info_serienstream"
SETTING_LOG_ERRORS = "log_errors_serienstream"
HEADERS = { HEADERS = {
"User-Agent": "Mozilla/5.0 (Kodi; ViewIt) AppleWebKit/537.36 (KHTML, like Gecko)", "User-Agent": "Mozilla/5.0 (Kodi; ViewIt) AppleWebKit/537.36 (KHTML, like Gecko)",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
"Accept-Language": "de-DE,de;q=0.9,en;q=0.8", "Accept-Language": "de-DE,de;q=0.9,en;q=0.8",
"Connection": "keep-alive", "Connection": "keep-alive",
} }
SESSION_CACHE_TTL_SECONDS = 300
SESSION_CACHE_PREFIX = "viewit.serienstream"
SESSION_CACHE_MAX_TITLE_URLS = 800
@dataclass @dataclass
@@ -101,15 +111,96 @@ class SeasonInfo:
episodes: List[EpisodeInfo] episodes: List[EpisodeInfo]
def _get_base_url() -> str:
base = get_setting_string(ADDON_ID, SETTING_BASE_URL, default=DEFAULT_BASE_URL).strip()
if not base:
base = DEFAULT_BASE_URL
return base.rstrip("/")
def _series_base_url() -> str:
return f"{_get_base_url()}/serie/stream"
def _popular_series_url() -> str:
return f"{_get_base_url()}/beliebte-serien"
def _latest_episodes_url() -> str:
return f"{_get_base_url()}"
def _absolute_url(href: str) -> str: def _absolute_url(href: str) -> str:
return f"{BASE_URL}{href}" if href.startswith("/") else href return f"{_get_base_url()}{href}" if href.startswith("/") else href
def _session_window() -> Any:
if xbmcgui is None:
return None
try:
return xbmcgui.Window(10000)
except Exception:
return None
def _session_cache_key(name: str) -> str:
base_hash = hashlib.sha1(_get_base_url().encode("utf-8")).hexdigest()[:12]
return f"{SESSION_CACHE_PREFIX}.{base_hash}.{name}"
def _session_cache_get(name: str) -> Any:
window = _session_window()
if window is None:
return None
raw = ""
try:
raw = window.getProperty(_session_cache_key(name)) or ""
except Exception:
return None
if not raw:
return None
try:
payload = json.loads(raw)
except Exception:
return None
if not isinstance(payload, dict):
return None
expires_at = payload.get("expires_at")
data = payload.get("data")
try:
if float(expires_at or 0) <= time.time():
return None
except Exception:
return None
return data
def _session_cache_set(name: str, data: Any, *, ttl_seconds: int = SESSION_CACHE_TTL_SECONDS) -> None:
window = _session_window()
if window is None:
return
payload = {
"expires_at": float(time.time() + max(1, int(ttl_seconds))),
"data": data,
}
try:
raw = json.dumps(payload, ensure_ascii=False, separators=(",", ":"))
except Exception:
return
# Kodi-Properties sind kein Dauer-Storage; begrenzen, damit UI stabil bleibt.
if len(raw) > 240_000:
return
try:
window.setProperty(_session_cache_key(name), raw)
except Exception:
return
def _normalize_series_url(identifier: str) -> str: def _normalize_series_url(identifier: str) -> str:
if identifier.startswith("http://") or identifier.startswith("https://"): if identifier.startswith("http://") or identifier.startswith("https://"):
return identifier.rstrip("/") return identifier.rstrip("/")
slug = identifier.strip("/") slug = identifier.strip("/")
return f"{SERIES_BASE_URL}/{slug}" return f"{_series_base_url()}/{slug}"
def _series_root_url(url: str) -> str: def _series_root_url(url: str) -> str:
@@ -147,16 +238,60 @@ def _normalize_search_text(value: str) -> str:
return value return value
def _matches_query(query: str, *, title: str) -> bool:
normalized_query = _normalize_search_text(query)
if not normalized_query:
return False
haystack = f" {_normalize_search_text(title)} "
return f" {normalized_query} " in haystack
def _is_episode_tba(title: str, original_title: str) -> bool:
combined = f"{title} {original_title}".casefold()
markers = ("tba", "demnächst", "demnaechst", "coming soon", "to be announced")
return any(marker in combined for marker in markers)
def _row_is_upcoming(row: BeautifulSoupT) -> bool:
classes = row.get("class") or []
if isinstance(classes, str):
classes = classes.split()
if "upcoming" in classes:
return True
badge = row.select_one(".badge-upcoming")
if badge and (badge.get_text(" ", strip=True) or "").strip():
return True
watch_cell = row.select_one(".episode-watch-cell")
if watch_cell:
text = watch_cell.get_text(" ", strip=True).casefold()
if "tba" in text:
return True
return False
def _get_setting_bool(setting_id: str, *, default: bool = False) -> bool: def _get_setting_bool(setting_id: str, *, default: bool = False) -> bool:
return get_setting_bool(ADDON_ID, setting_id, default=default) return get_setting_bool(ADDON_ID, setting_id, default=default)
def _notify_url(url: str) -> None: def _notify_url(url: str) -> None:
notify_url(ADDON_ID, heading="Serienstream", url=url, enabled_setting_id=GLOBAL_SETTING_SHOW_URL_INFO) notify_url(
ADDON_ID,
heading="Serienstream",
url=url,
enabled_setting_id=GLOBAL_SETTING_SHOW_URL_INFO,
plugin_setting_id=SETTING_SHOW_URL_INFO,
)
def _log_url(url: str, *, kind: str = "VISIT") -> None: def _log_url(url: str, *, kind: str = "VISIT") -> None:
log_url(ADDON_ID, enabled_setting_id=GLOBAL_SETTING_LOG_URLS, log_filename="serienstream_urls.log", url=url, kind=kind) log_url(
ADDON_ID,
enabled_setting_id=GLOBAL_SETTING_LOG_URLS,
plugin_setting_id=SETTING_LOG_URLS,
log_filename="serienstream_urls.log",
url=url,
kind=kind,
)
def _log_parsed_url(url: str) -> None: def _log_parsed_url(url: str) -> None:
@@ -167,12 +302,23 @@ def _log_response_html(url: str, body: str) -> None:
dump_response_html( dump_response_html(
ADDON_ID, ADDON_ID,
enabled_setting_id=GLOBAL_SETTING_DUMP_HTML, enabled_setting_id=GLOBAL_SETTING_DUMP_HTML,
plugin_setting_id=SETTING_DUMP_HTML,
url=url, url=url,
body=body, body=body,
filename_prefix="s_to_response", filename_prefix="s_to_response",
) )
def _log_error(message: str) -> None:
log_error(
ADDON_ID,
enabled_setting_id=GLOBAL_SETTING_LOG_ERRORS,
plugin_setting_id=SETTING_LOG_ERRORS,
log_filename="serienstream_errors.log",
message=message,
)
def _ensure_requests() -> None: def _ensure_requests() -> None:
if requests is None or BeautifulSoup is None: if requests is None or BeautifulSoup is None:
raise RuntimeError("requests/bs4 sind nicht verfuegbar.") raise RuntimeError("requests/bs4 sind nicht verfuegbar.")
@@ -196,8 +342,12 @@ def _get_soup(url: str, *, session: Optional[RequestsSession] = None) -> Beautif
_ensure_requests() _ensure_requests()
_log_visit(url) _log_visit(url)
sess = session or get_requests_session("serienstream", headers=HEADERS) sess = session or get_requests_session("serienstream", headers=HEADERS)
try:
response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT) response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT)
response.raise_for_status() response.raise_for_status()
except Exception as exc:
_log_error(f"GET {url} failed: {exc}")
raise
if response.url and response.url != url: if response.url and response.url != url:
_log_url(response.url, kind="REDIRECT") _log_url(response.url, kind="REDIRECT")
_log_response_html(url, response.text) _log_response_html(url, response.text)
@@ -206,34 +356,62 @@ def _get_soup(url: str, *, session: Optional[RequestsSession] = None) -> Beautif
return BeautifulSoup(response.text, "html.parser") return BeautifulSoup(response.text, "html.parser")
def _get_soup_simple(url: str) -> BeautifulSoupT: def _get_html_simple(url: str) -> str:
_ensure_requests() _ensure_requests()
_log_visit(url) _log_visit(url)
sess = get_requests_session("serienstream", headers=HEADERS) sess = get_requests_session("serienstream", headers=HEADERS)
try:
response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT) response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT)
response.raise_for_status() response.raise_for_status()
except Exception as exc:
_log_error(f"GET {url} failed: {exc}")
raise
if response.url and response.url != url: if response.url and response.url != url:
_log_url(response.url, kind="REDIRECT") _log_url(response.url, kind="REDIRECT")
_log_response_html(url, response.text) body = response.text
if _looks_like_cloudflare_challenge(response.text): _log_response_html(url, body)
if _looks_like_cloudflare_challenge(body):
raise RuntimeError("Cloudflare-Schutz erkannt. requests reicht ggf. nicht aus.") raise RuntimeError("Cloudflare-Schutz erkannt. requests reicht ggf. nicht aus.")
return BeautifulSoup(response.text, "html.parser") return body
def _get_soup_simple(url: str) -> BeautifulSoupT:
body = _get_html_simple(url)
return BeautifulSoup(body, "html.parser")
def _extract_genre_names_from_html(body: str) -> List[str]:
names: List[str] = []
seen: set[str] = set()
pattern = re.compile(
r"<div[^>]*class=[\"'][^\"']*background-1[^\"']*[\"'][^>]*>.*?<h3[^>]*>(.*?)</h3>",
re.IGNORECASE | re.DOTALL,
)
for match in pattern.finditer(body or ""):
text = re.sub(r"<[^>]+>", " ", match.group(1) or "")
text = unescape(re.sub(r"\s+", " ", text)).strip()
if not text:
continue
key = text.casefold()
if key in seen:
continue
seen.add(key)
names.append(text)
return names
def search_series(query: str) -> List[SeriesResult]: def search_series(query: str) -> List[SeriesResult]:
"""Sucht Serien im (/serien)-Katalog (Genre-liste) nach Titel/Alt-Titel.""" """Sucht Serien im (/serien)-Katalog (Genre-liste) nach Titel/Alt-Titel."""
_ensure_requests() _ensure_requests()
normalized_query = _normalize_search_text(query) if not _normalize_search_text(query):
if not normalized_query:
return [] return []
# Direkter Abruf wie in fetch_serien.py. # Direkter Abruf wie in fetch_serien.py.
catalog_url = f"{BASE_URL}/serien?by=genre" catalog_url = f"{_get_base_url()}/serien?by=genre"
soup = _get_soup_simple(catalog_url) soup = _get_soup_simple(catalog_url)
results: List[SeriesResult] = [] results: List[SeriesResult] = []
for series in parse_series_catalog(soup).values(): for series in parse_series_catalog(soup).values():
for entry in series: for entry in series:
haystack = _normalize_search_text(entry.title) if entry.title and _matches_query(query, title=entry.title):
if entry.title and normalized_query in haystack:
results.append(entry) results.append(entry)
return results return results
@@ -349,6 +527,8 @@ def _extract_episodes(soup: BeautifulSoupT) -> List[EpisodeInfo]:
# Neues Layout (Stand: 2026-01): Episoden-Tabelle mit Zeilen und onclick-URL. # Neues Layout (Stand: 2026-01): Episoden-Tabelle mit Zeilen und onclick-URL.
rows = soup.select("table.episode-table tbody tr.episode-row") rows = soup.select("table.episode-table tbody tr.episode-row")
for index, row in enumerate(rows): for index, row in enumerate(rows):
if _row_is_upcoming(row):
continue
onclick = (row.get("onclick") or "").strip() onclick = (row.get("onclick") or "").strip()
url = "" url = ""
if onclick: if onclick:
@@ -376,6 +556,8 @@ def _extract_episodes(soup: BeautifulSoupT) -> List[EpisodeInfo]:
original_title = (original_tag.get_text(strip=True) if original_tag else "").strip() original_title = (original_tag.get_text(strip=True) if original_tag else "").strip()
if not title: if not title:
title = f"Episode {number}" title = f"Episode {number}"
if _is_episode_tba(title, original_title):
continue
hosters: List[str] = [] hosters: List[str] = []
for img in row.select(".episode-watch-cell img"): for img in row.select(".episode-watch-cell img"):
@@ -424,7 +606,7 @@ def fetch_episode_stream_link(
session = get_requests_session("serienstream", headers=HEADERS) session = get_requests_session("serienstream", headers=HEADERS)
# Preflight optional: Startseite kann 5xx liefern, Zielseite aber funktionieren. # Preflight optional: Startseite kann 5xx liefern, Zielseite aber funktionieren.
try: try:
_get_soup(BASE_URL, session=session) _get_soup(_get_base_url(), session=session)
except Exception: except Exception:
pass pass
soup = _get_soup(normalized_url, session=session) soup = _get_soup(normalized_url, session=session)
@@ -453,7 +635,7 @@ def fetch_episode_hoster_names(episode_url: str) -> List[str]:
session = get_requests_session("serienstream", headers=HEADERS) session = get_requests_session("serienstream", headers=HEADERS)
# Preflight optional: Startseite kann 5xx liefern, Zielseite aber funktionieren. # Preflight optional: Startseite kann 5xx liefern, Zielseite aber funktionieren.
try: try:
_get_soup(BASE_URL, session=session) _get_soup(_get_base_url(), session=session)
except Exception: except Exception:
pass pass
soup = _get_soup(normalized_url, session=session) soup = _get_soup(normalized_url, session=session)
@@ -503,10 +685,10 @@ def _extract_latest_episodes(soup: BeautifulSoupT) -> List[LatestEpisode]:
episode_text = (anchor.select_one(".ep-episode").get_text(strip=True) if anchor.select_one(".ep-episode") else "").strip() episode_text = (anchor.select_one(".ep-episode").get_text(strip=True) if anchor.select_one(".ep-episode") else "").strip()
season_number: Optional[int] = None season_number: Optional[int] = None
episode_number: Optional[int] = None episode_number: Optional[int] = None
match = re.search(r"S\\s*(\\d+)", season_text, re.IGNORECASE) match = re.search(r"S\s*(\d+)", season_text, re.IGNORECASE)
if match: if match:
season_number = int(match.group(1)) season_number = int(match.group(1))
match = re.search(r"E\\s*(\\d+)", episode_text, re.IGNORECASE) match = re.search(r"E\s*(\d+)", episode_text, re.IGNORECASE)
if match: if match:
episode_number = int(match.group(1)) episode_number = int(match.group(1))
if season_number is None or episode_number is None: if season_number is None or episode_number is None:
@@ -546,7 +728,7 @@ def resolve_redirect(target_url: str) -> Optional[str]:
session = get_requests_session("serienstream", headers=HEADERS) session = get_requests_session("serienstream", headers=HEADERS)
# Preflight optional: Startseite kann 5xx liefern, Zielseite aber funktionieren. # Preflight optional: Startseite kann 5xx liefern, Zielseite aber funktionieren.
try: try:
_get_soup(BASE_URL, session=session) _get_soup(_get_base_url(), session=session)
except Exception: except Exception:
pass pass
response = session.get( response = session.get(
@@ -563,17 +745,14 @@ def resolve_redirect(target_url: str) -> Optional[str]:
def scrape_series_detail( def scrape_series_detail(
series_identifier: str, series_identifier: str,
max_seasons: Optional[int] = None, max_seasons: Optional[int] = None,
*,
load_episodes: bool = True,
) -> List[SeasonInfo]: ) -> List[SeasonInfo]:
_ensure_requests() _ensure_requests()
series_url = _series_root_url(_normalize_series_url(series_identifier)) series_url = _series_root_url(_normalize_series_url(series_identifier))
_log_url(series_url, kind="SERIES") _log_url(series_url, kind="SERIES")
_notify_url(series_url) _notify_url(series_url)
session = get_requests_session("serienstream", headers=HEADERS) session = get_requests_session("serienstream", headers=HEADERS)
# Preflight ist optional; manche Umgebungen/Provider leiten die Startseite um.
try:
_get_soup(BASE_URL, session=session)
except Exception:
pass
soup = _get_soup(series_url, session=session) soup = _get_soup(series_url, session=session)
base_series_url = _series_root_url(_extract_canonical_url(soup, series_url)) base_series_url = _series_root_url(_extract_canonical_url(soup, series_url))
@@ -592,6 +771,8 @@ def scrape_series_detail(
season_links = season_links[:max_seasons] season_links = season_links[:max_seasons]
seasons: List[SeasonInfo] = [] seasons: List[SeasonInfo] = []
for number, url in season_links: for number, url in season_links:
episodes: List[EpisodeInfo] = []
if load_episodes:
season_soup = _get_soup(url, session=session) season_soup = _get_soup(url, session=session)
episodes = _extract_episodes(season_soup) episodes = _extract_episodes(season_soup)
seasons.append(SeasonInfo(number=number, url=url, episodes=episodes)) seasons.append(SeasonInfo(number=number, url=url, episodes=episodes))
@@ -602,14 +783,20 @@ def scrape_series_detail(
class SerienstreamPlugin(BasisPlugin): class SerienstreamPlugin(BasisPlugin):
"""Downloader-Plugin, das Serien von s.to ueber requests/bs4 bereitstellt.""" """Downloader-Plugin, das Serien von s.to ueber requests/bs4 bereitstellt."""
name = "Serienstream (s.to)" name = "Serienstream"
POPULAR_GENRE_LABEL = "⭐ Beliebte Serien" POPULAR_GENRE_LABEL = "⭐ Beliebte Serien"
def __init__(self) -> None: def __init__(self) -> None:
self._series_results: Dict[str, SeriesResult] = {} self._series_results: Dict[str, SeriesResult] = {}
self._title_url_cache: Dict[str, str] = self._load_title_url_cache()
self._genre_names_cache: Optional[List[str]] = None
self._season_cache: Dict[str, List[SeasonInfo]] = {} self._season_cache: Dict[str, List[SeasonInfo]] = {}
self._season_links_cache: Dict[str, List[SeasonInfo]] = {}
self._episode_label_cache: Dict[Tuple[str, str], Dict[str, EpisodeInfo]] = {} self._episode_label_cache: Dict[Tuple[str, str], Dict[str, EpisodeInfo]] = {}
self._catalog_cache: Optional[Dict[str, List[SeriesResult]]] = None self._catalog_cache: Optional[Dict[str, List[SeriesResult]]] = None
self._genre_group_cache: Dict[str, Dict[str, List[str]]] = {}
self._genre_page_titles_cache: Dict[Tuple[str, int], List[str]] = {}
self._genre_page_count_cache: Dict[str, int] = {}
self._popular_cache: Optional[List[SeriesResult]] = None self._popular_cache: Optional[List[SeriesResult]] = None
self._requests_available = REQUESTS_AVAILABLE self._requests_available = REQUESTS_AVAILABLE
self._default_preferred_hosters: List[str] = list(DEFAULT_PREFERRED_HOSTERS) self._default_preferred_hosters: List[str] = list(DEFAULT_PREFERRED_HOSTERS)
@@ -632,21 +819,171 @@ class SerienstreamPlugin(BasisPlugin):
print(f"Importfehler: {REQUESTS_IMPORT_ERROR}") print(f"Importfehler: {REQUESTS_IMPORT_ERROR}")
return return
def _load_title_url_cache(self) -> Dict[str, str]:
raw = _session_cache_get("title_urls")
if not isinstance(raw, dict):
return {}
result: Dict[str, str] = {}
for key, value in raw.items():
key_text = str(key or "").strip().casefold()
url_text = str(value or "").strip()
if not key_text or not url_text:
continue
result[key_text] = url_text
return result
def _save_title_url_cache(self) -> None:
if not self._title_url_cache:
return
# Begrenzt die Session-Daten auf die jüngsten Einträge.
while len(self._title_url_cache) > SESSION_CACHE_MAX_TITLE_URLS:
self._title_url_cache.pop(next(iter(self._title_url_cache)))
_session_cache_set("title_urls", self._title_url_cache)
def _remember_series_result(self, title: str, url: str, description: str = "") -> None:
title = (title or "").strip()
url = (url or "").strip()
if not title:
return
if url:
self._series_results[title] = SeriesResult(title=title, description=description, url=url)
cache_key = title.casefold()
if self._title_url_cache.get(cache_key) != url:
self._title_url_cache[cache_key] = url
self._save_title_url_cache()
return
current = self._series_results.get(title)
if current is None:
self._series_results[title] = SeriesResult(title=title, description=description, url="")
@staticmethod
def _season_links_cache_name(series_url: str) -> str:
digest = hashlib.sha1((series_url or "").encode("utf-8")).hexdigest()[:20]
return f"season_links.{digest}"
@staticmethod
def _season_episodes_cache_name(season_url: str) -> str:
digest = hashlib.sha1((season_url or "").encode("utf-8")).hexdigest()[:20]
return f"season_episodes.{digest}"
def _load_session_season_links(self, series_url: str) -> Optional[List[SeasonInfo]]:
raw = _session_cache_get(self._season_links_cache_name(series_url))
if not isinstance(raw, list):
return None
seasons: List[SeasonInfo] = []
for item in raw:
if not isinstance(item, dict):
continue
try:
number = int(item.get("number"))
except Exception:
continue
url = str(item.get("url") or "").strip()
if number <= 0 or not url:
continue
seasons.append(SeasonInfo(number=number, url=url, episodes=[]))
if not seasons:
return None
seasons.sort(key=lambda s: s.number)
return seasons
def _save_session_season_links(self, series_url: str, seasons: List[SeasonInfo]) -> None:
payload = [{"number": int(season.number), "url": season.url} for season in seasons if season.url]
if payload:
_session_cache_set(self._season_links_cache_name(series_url), payload)
def _load_session_season_episodes(self, season_url: str) -> Optional[List[EpisodeInfo]]:
raw = _session_cache_get(self._season_episodes_cache_name(season_url))
if not isinstance(raw, list):
return None
episodes: List[EpisodeInfo] = []
for item in raw:
if not isinstance(item, dict):
continue
try:
number = int(item.get("number"))
except Exception:
continue
title = str(item.get("title") or "").strip()
original_title = str(item.get("original_title") or "").strip()
url = str(item.get("url") or "").strip()
season_label = str(item.get("season_label") or "").strip()
languages = [str(lang).strip() for lang in list(item.get("languages") or []) if str(lang).strip()]
hosters = [str(host).strip() for host in list(item.get("hosters") or []) if str(host).strip()]
if number <= 0:
continue
episodes.append(
EpisodeInfo(
number=number,
title=title or f"Episode {number}",
original_title=original_title,
url=url,
season_label=season_label,
languages=languages,
hosters=hosters,
)
)
if not episodes:
return None
episodes.sort(key=lambda item: item.number)
return episodes
def _save_session_season_episodes(self, season_url: str, episodes: List[EpisodeInfo]) -> None:
payload = []
for item in episodes:
payload.append(
{
"number": int(item.number),
"title": item.title,
"original_title": item.original_title,
"url": item.url,
"season_label": item.season_label,
"languages": list(item.languages or []),
"hosters": list(item.hosters or []),
}
)
if payload:
_session_cache_set(self._season_episodes_cache_name(season_url), payload)
def _ensure_catalog(self) -> Dict[str, List[SeriesResult]]: def _ensure_catalog(self) -> Dict[str, List[SeriesResult]]:
if self._catalog_cache is not None: if self._catalog_cache is not None:
return self._catalog_cache return self._catalog_cache
# Stand: 2026-01 liefert `?by=genre` konsistente Gruppen für `genres()`. # Stand: 2026-01 liefert `?by=genre` konsistente Gruppen für `genres()`.
catalog_url = f"{BASE_URL}/serien?by=genre" catalog_url = f"{_get_base_url()}/serien?by=genre"
soup = _get_soup_simple(catalog_url) soup = _get_soup_simple(catalog_url)
self._catalog_cache = parse_series_catalog(soup) self._catalog_cache = parse_series_catalog(soup)
_session_cache_set("genres", sorted(self._catalog_cache.keys(), key=str.casefold))
return self._catalog_cache return self._catalog_cache
def _ensure_genre_names(self) -> List[str]:
if self._genre_names_cache is not None:
return list(self._genre_names_cache)
cached = _session_cache_get("genres")
if isinstance(cached, list):
genres = [str(value).strip() for value in cached if str(value).strip()]
if genres:
self._genre_names_cache = sorted(set(genres), key=str.casefold)
return list(self._genre_names_cache)
catalog_url = f"{_get_base_url()}/serien?by=genre"
try:
body = _get_html_simple(catalog_url)
genres = _extract_genre_names_from_html(body)
except Exception:
genres = []
if not genres:
catalog = self._ensure_catalog()
genres = sorted(catalog.keys(), key=str.casefold)
else:
genres = sorted(set(genres), key=str.casefold)
self._genre_names_cache = list(genres)
_session_cache_set("genres", self._genre_names_cache)
return list(self._genre_names_cache)
def genres(self) -> List[str]: def genres(self) -> List[str]:
"""Optional: Liefert alle Genres aus dem Serien-Katalog.""" """Optional: Liefert alle Genres aus dem Serien-Katalog."""
if not self._requests_available: if not self._requests_available:
return [] return []
catalog = self._ensure_catalog() return self._ensure_genre_names()
return sorted(catalog.keys(), key=str.casefold)
def capabilities(self) -> set[str]: def capabilities(self) -> set[str]:
"""Meldet unterstützte Features für Router-Menüs.""" """Meldet unterstützte Features für Router-Menüs."""
@@ -657,7 +994,8 @@ class SerienstreamPlugin(BasisPlugin):
if not self._requests_available: if not self._requests_available:
return [] return []
entries = self._ensure_popular() entries = self._ensure_popular()
self._series_results.update({entry.title: entry for entry in entries if entry.title}) for entry in entries:
self._remember_series_result(entry.title, entry.url, entry.description)
return [entry.title for entry in entries if entry.title] return [entry.title for entry in entries if entry.title]
def titles_for_genre(self, genre: str) -> List[str]: def titles_for_genre(self, genre: str) -> List[str]:
@@ -671,14 +1009,172 @@ class SerienstreamPlugin(BasisPlugin):
return self.popular_series() return self.popular_series()
catalog = self._ensure_catalog() catalog = self._ensure_catalog()
entries = catalog.get(genre, []) entries = catalog.get(genre, [])
self._series_results.update({entry.title: entry for entry in entries if entry.title}) for entry in entries:
self._remember_series_result(entry.title, entry.url, entry.description)
return [entry.title for entry in entries if entry.title] return [entry.title for entry in entries if entry.title]
@staticmethod
def _title_group_key(title: str) -> str:
raw = (title or "").strip()
if not raw:
return "#"
for char in raw:
if char.isdigit():
return "0-9"
if char.isalpha():
normalized = char.casefold()
if normalized == "ä":
normalized = "a"
elif normalized == "ö":
normalized = "o"
elif normalized == "ü":
normalized = "u"
elif normalized == "ß":
normalized = "s"
return normalized.upper()
return "#"
@classmethod
def _group_matches(cls, group_code: str, title: str) -> bool:
key = cls._title_group_key(title)
if group_code == "0-9":
return key == "0-9"
if key == "0-9" or key == "#":
return False
if group_code == "A-E":
return "A" <= key <= "E"
if group_code == "F-J":
return "F" <= key <= "J"
if group_code == "K-O":
return "K" <= key <= "O"
if group_code == "P-T":
return "P" <= key <= "T"
if group_code == "U-Z":
return "U" <= key <= "Z"
return False
def _ensure_genre_group_cache(self, genre: str) -> Dict[str, List[str]]:
cached = self._genre_group_cache.get(genre)
if cached is not None:
return cached
titles = self.titles_for_genre(genre)
grouped: Dict[str, List[str]] = {}
for title in titles:
for code in ("A-E", "F-J", "K-O", "P-T", "U-Z", "0-9"):
if self._group_matches(code, title):
grouped.setdefault(code, []).append(title)
break
for code in grouped:
grouped[code].sort(key=str.casefold)
self._genre_group_cache[genre] = grouped
return grouped
@staticmethod
def _genre_slug(genre: str) -> str:
value = (genre or "").strip().casefold()
value = value.replace("&", " und ")
value = unicodedata.normalize("NFKD", value)
value = "".join(ch for ch in value if not unicodedata.combining(ch))
value = re.sub(r"[^a-z0-9]+", "-", value).strip("-")
return value
def _fetch_genre_page_titles(self, genre: str, page: int) -> Tuple[List[str], int]:
slug = self._genre_slug(genre)
if not slug:
return [], 1
cache_key = (slug, page)
cached = self._genre_page_titles_cache.get(cache_key)
cached_pages = self._genre_page_count_cache.get(slug)
if cached is not None and cached_pages is not None:
return list(cached), int(cached_pages)
url = f"{_get_base_url()}/genre/{slug}"
if page > 1:
url = f"{url}?page={int(page)}"
soup = _get_soup_simple(url)
titles: List[str] = []
seen: set[str] = set()
for anchor in soup.select("a.show-card[href]"):
href = (anchor.get("href") or "").strip()
series_url = _absolute_url(href).split("#", 1)[0].split("?", 1)[0].rstrip("/")
if "/serie/" not in series_url:
continue
img = anchor.select_one("img[alt]")
title = ((img.get("alt") if img else "") or "").strip()
if not title:
continue
key = title.casefold()
if key in seen:
continue
seen.add(key)
self._remember_series_result(title, series_url)
titles.append(title)
max_page = 1
for anchor in soup.select("a[href*='?page=']"):
href = (anchor.get("href") or "").strip()
match = re.search(r"[?&]page=(\d+)", href)
if not match:
continue
try:
max_page = max(max_page, int(match.group(1)))
except Exception:
continue
self._genre_page_titles_cache[cache_key] = list(titles)
self._genre_page_count_cache[slug] = max_page
return list(titles), max_page
def titles_for_genre_group_page(self, genre: str, group_code: str, page: int = 1, page_size: int = 10) -> List[str]:
genre = (genre or "").strip()
group_code = (group_code or "").strip()
page = max(1, int(page or 1))
page_size = max(1, int(page_size or 10))
needed = page * page_size + 1
matched: List[str] = []
try:
_, max_pages = self._fetch_genre_page_titles(genre, 1)
for page_index in range(1, max_pages + 1):
page_titles, _ = self._fetch_genre_page_titles(genre, page_index)
for title in page_titles:
if self._group_matches(group_code, title):
matched.append(title)
if len(matched) >= needed:
break
start = (page - 1) * page_size
end = start + page_size
return list(matched[start:end])
except Exception:
grouped = self._ensure_genre_group_cache(genre)
titles = grouped.get(group_code, [])
start = (page - 1) * page_size
end = start + page_size
return list(titles[start:end])
def genre_group_has_more(self, genre: str, group_code: str, page: int = 1, page_size: int = 10) -> bool:
genre = (genre or "").strip()
group_code = (group_code or "").strip()
page = max(1, int(page or 1))
page_size = max(1, int(page_size or 10))
needed = page * page_size + 1
count = 0
try:
_, max_pages = self._fetch_genre_page_titles(genre, 1)
for page_index in range(1, max_pages + 1):
page_titles, _ = self._fetch_genre_page_titles(genre, page_index)
for title in page_titles:
if self._group_matches(group_code, title):
count += 1
if count >= needed:
return True
return False
except Exception:
grouped = self._ensure_genre_group_cache(genre)
titles = grouped.get(group_code, [])
return len(titles) > (page * page_size)
def _ensure_popular(self) -> List[SeriesResult]: def _ensure_popular(self) -> List[SeriesResult]:
"""Laedt und cached die Liste der beliebten Serien aus `/beliebte-serien`.""" """Laedt und cached die Liste der beliebten Serien aus `/beliebte-serien`."""
if self._popular_cache is not None: if self._popular_cache is not None:
return list(self._popular_cache) return list(self._popular_cache)
soup = _get_soup_simple(POPULAR_SERIES_URL) soup = _get_soup_simple(_popular_series_url())
results: List[SeriesResult] = [] results: List[SeriesResult] = []
seen: set[str] = set() seen: set[str] = set()
@@ -703,7 +1199,7 @@ class SerienstreamPlugin(BasisPlugin):
if not title or title in seen: if not title or title in seen:
continue continue
url = _absolute_url(href).split("#", 1)[0].split("?", 1)[0].rstrip("/") url = _absolute_url(href).split("#", 1)[0].split("?", 1)[0].rstrip("/")
url = re.sub(r"/staffel-\\d+(?:/.*)?$", "", url).rstrip("/") url = re.sub(r"/staffel-\d+(?:/.*)?$", "", url).rstrip("/")
if not url: if not url:
continue continue
_log_parsed_url(url) _log_parsed_url(url)
@@ -749,19 +1245,104 @@ class SerienstreamPlugin(BasisPlugin):
self._episode_label(info): info for info in season_info.episodes self._episode_label(info): info for info in season_info.episodes
} }
def _ensure_season_links(self, title: str) -> List[SeasonInfo]:
cached = self._season_links_cache.get(title)
if cached is not None:
return list(cached)
series = self._series_results.get(title)
if not series:
cached_url = self._title_url_cache.get(title.casefold().strip(), "")
if cached_url:
series = SeriesResult(title=title, description="", url=cached_url)
self._series_results[title] = series
if not series:
catalog = self._ensure_catalog()
lookup_key = title.casefold().strip()
for entries in catalog.values():
for entry in entries:
if entry.title.casefold().strip() == lookup_key:
series = entry
self._remember_series_result(entry.title, entry.url, entry.description)
break
if series:
break
if not series:
return []
session_links = self._load_session_season_links(series.url)
if session_links:
self._season_links_cache[title] = list(session_links)
return list(session_links)
try:
seasons = scrape_series_detail(series.url, load_episodes=False)
except Exception as exc: # pragma: no cover - defensive logging
raise RuntimeError(f"Serienstream-Staffeln konnten nicht geladen werden: {exc}") from exc
self._season_links_cache[title] = list(seasons)
self._save_session_season_links(series.url, seasons)
return list(seasons)
def remember_series_url(self, title: str, series_url: str) -> None:
title = (title or "").strip()
series_url = (series_url or "").strip()
if not title or not series_url:
return
self._remember_series_result(title, series_url)
def series_url_for_title(self, title: str) -> str:
title = (title or "").strip()
if not title:
return ""
direct = self._series_results.get(title)
if direct and direct.url:
return direct.url
cached_url = self._title_url_cache.get(title.casefold().strip(), "")
if cached_url:
return cached_url
lookup_key = title.casefold().strip()
for entry in self._series_results.values():
if entry.title.casefold().strip() == lookup_key and entry.url:
return entry.url
return ""
def _ensure_season_episodes(self, title: str, season_number: int) -> Optional[SeasonInfo]:
seasons = self._season_cache.get(title) or []
for season in seasons:
if season.number == season_number and season.episodes:
return season
links = self._ensure_season_links(title)
target = next((season for season in links if season.number == season_number), None)
if not target:
return None
cached_episodes = self._load_session_season_episodes(target.url)
if cached_episodes:
season_info = SeasonInfo(number=target.number, url=target.url, episodes=list(cached_episodes))
updated = [season for season in seasons if season.number != season_number]
updated.append(season_info)
updated.sort(key=lambda item: item.number)
self._season_cache[title] = updated
return season_info
try:
season_soup = _get_soup(target.url, session=get_requests_session("serienstream", headers=HEADERS))
season_info = SeasonInfo(number=target.number, url=target.url, episodes=_extract_episodes(season_soup))
except Exception as exc: # pragma: no cover - defensive logging
raise RuntimeError(f"Serienstream-Episoden konnten nicht geladen werden: {exc}") from exc
updated = [season for season in seasons if season.number != season_number]
updated.append(season_info)
updated.sort(key=lambda item: item.number)
self._season_cache[title] = updated
self._save_session_season_episodes(target.url, season_info.episodes)
return season_info
def _lookup_episode(self, title: str, season_label: str, episode_label: str) -> Optional[EpisodeInfo]: def _lookup_episode(self, title: str, season_label: str, episode_label: str) -> Optional[EpisodeInfo]:
cache_key = (title, season_label) cache_key = (title, season_label)
cached = self._episode_label_cache.get(cache_key) cached = self._episode_label_cache.get(cache_key)
if cached: if cached:
return cached.get(episode_label) return cached.get(episode_label)
seasons = self._ensure_seasons(title)
number = self._parse_season_number(season_label) number = self._parse_season_number(season_label)
if number is None: if number is None:
return None return None
season_info = self._ensure_season_episodes(title, number)
for season_info in seasons: if season_info:
if season_info.number == number:
self._cache_episode_labels(title, season_label, season_info) self._cache_episode_labels(title, season_label, season_info)
return self._episode_label_cache.get(cache_key, {}).get(episode_label) return self._episode_label_cache.get(cache_key, {}).get(episode_label)
return None return None
@@ -771,6 +1352,7 @@ class SerienstreamPlugin(BasisPlugin):
if not query: if not query:
self._series_results.clear() self._series_results.clear()
self._season_cache.clear() self._season_cache.clear()
self._season_links_cache.clear()
self._episode_label_cache.clear() self._episode_label_cache.clear()
self._catalog_cache = None self._catalog_cache = None
return [] return []
@@ -786,8 +1368,11 @@ class SerienstreamPlugin(BasisPlugin):
self._episode_label_cache.clear() self._episode_label_cache.clear()
self._catalog_cache = None self._catalog_cache = None
raise RuntimeError(f"Serienstream-Suche fehlgeschlagen: {exc}") from exc raise RuntimeError(f"Serienstream-Suche fehlgeschlagen: {exc}") from exc
self._series_results = {result.title: result for result in results} self._series_results = {}
for result in results:
self._remember_series_result(result.title, result.url, result.description)
self._season_cache.clear() self._season_cache.clear()
self._season_links_cache.clear()
self._episode_label_cache.clear() self._episode_label_cache.clear()
return [result.title for result in results] return [result.title for result in results]
@@ -814,33 +1399,27 @@ class SerienstreamPlugin(BasisPlugin):
for entry in entries: for entry in entries:
if entry.title.casefold().strip() == lookup_key: if entry.title.casefold().strip() == lookup_key:
series = entry series = entry
self._series_results[entry.title] = entry self._remember_series_result(entry.title, entry.url, entry.description)
break break
if series: if series:
break break
if not series: if not series:
return [] return []
try: seasons = self._ensure_season_links(title)
seasons = scrape_series_detail(series.url)
except Exception as exc: # pragma: no cover - defensive logging
raise RuntimeError(f"Serienstream-Staffeln konnten nicht geladen werden: {exc}") from exc
self._clear_episode_cache_for_title(title) self._clear_episode_cache_for_title(title)
self._season_cache[title] = seasons self._season_cache[title] = list(seasons)
return seasons return list(seasons)
def seasons_for(self, title: str) -> List[str]: def seasons_for(self, title: str) -> List[str]:
seasons = self._ensure_seasons(title) seasons = self._ensure_seasons(title)
# Serienstream liefert gelegentlich Staffeln ohne Episoden (z.B. Parsing-/Layoutwechsel). return [self._season_label(season.number) for season in seasons]
# Diese sollen im UI nicht als auswählbarer Menüpunkt erscheinen.
return [self._season_label(season.number) for season in seasons if season.episodes]
def episodes_for(self, title: str, season: str) -> List[str]: def episodes_for(self, title: str, season: str) -> List[str]:
seasons = self._ensure_seasons(title)
number = self._parse_season_number(season) number = self._parse_season_number(season)
if number is None: if number is None:
return [] return []
for season_info in seasons: season_info = self._ensure_season_episodes(title, number)
if season_info.number == number: if season_info:
labels = [self._episode_label(info) for info in season_info.episodes] labels = [self._episode_label(info) for info in season_info.episodes]
self._cache_episode_labels(title, season, season_info) self._cache_episode_labels(title, season, season_info)
return labels return labels
@@ -894,7 +1473,7 @@ class SerienstreamPlugin(BasisPlugin):
if cached is not None: if cached is not None:
return list(cached) return list(cached)
url = LATEST_EPISODES_URL url = _latest_episodes_url()
if page > 1: if page > 1:
url = f"{url}?page={page}" url = f"{url}?page={page}"
soup = _get_soup_simple(url) soup = _get_soup_simple(url)

View File

@@ -44,7 +44,7 @@ except ImportError: # pragma: no cover - allow running outside Kodi
xbmcgui = None xbmcgui = None
from plugin_interface import BasisPlugin from plugin_interface import BasisPlugin
from plugin_helpers import dump_response_html, get_setting_bool, log_url, notify_url from plugin_helpers import dump_response_html, get_setting_bool, log_error, log_url, notify_url
from regex_patterns import DIGITS from regex_patterns import DIGITS
if TYPE_CHECKING: # pragma: no cover if TYPE_CHECKING: # pragma: no cover
@@ -61,6 +61,11 @@ DEFAULT_BASE_URL = "https://www.meineseite"
GLOBAL_SETTING_LOG_URLS = "debug_log_urls" GLOBAL_SETTING_LOG_URLS = "debug_log_urls"
GLOBAL_SETTING_DUMP_HTML = "debug_dump_html" GLOBAL_SETTING_DUMP_HTML = "debug_dump_html"
GLOBAL_SETTING_SHOW_URL_INFO = "debug_show_url_info" GLOBAL_SETTING_SHOW_URL_INFO = "debug_show_url_info"
GLOBAL_SETTING_LOG_ERRORS = "debug_log_errors"
SETTING_LOG_URLS = "log_urls_topstreamfilm"
SETTING_DUMP_HTML = "dump_html_topstreamfilm"
SETTING_SHOW_URL_INFO = "show_url_info_topstreamfilm"
SETTING_LOG_ERRORS = "log_errors_topstreamfilm"
SETTING_GENRE_MAX_PAGES = "topstream_genre_max_pages" SETTING_GENRE_MAX_PAGES = "topstream_genre_max_pages"
DEFAULT_TIMEOUT = 20 DEFAULT_TIMEOUT = 20
DEFAULT_PREFERRED_HOSTERS = ["supervideo", "dropload", "voe"] DEFAULT_PREFERRED_HOSTERS = ["supervideo", "dropload", "voe"]
@@ -101,10 +106,8 @@ def _matches_query(query: str, *, title: str, description: str) -> bool:
normalized_query = _normalize_search_text(query) normalized_query = _normalize_search_text(query)
if not normalized_query: if not normalized_query:
return False return False
haystack = _normalize_search_text(title) haystack = f" {_normalize_search_text(title)} "
if not haystack: return f" {normalized_query} " in haystack
return False
return normalized_query in haystack
def _strip_der_film_suffix(title: str) -> str: def _strip_der_film_suffix(title: str) -> str:
@@ -119,7 +122,7 @@ def _strip_der_film_suffix(title: str) -> str:
class TopstreamfilmPlugin(BasisPlugin): class TopstreamfilmPlugin(BasisPlugin):
"""Integration fuer eine HTML-basierte Suchseite.""" """Integration fuer eine HTML-basierte Suchseite."""
name = "TopStreamFilm" name = "Topstreamfilm"
def __init__(self) -> None: def __init__(self) -> None:
self._session: RequestsSession | None = None self._session: RequestsSession | None = None
@@ -348,20 +351,43 @@ class TopstreamfilmPlugin(BasisPlugin):
return default return default
def _notify_url(self, url: str) -> None: def _notify_url(self, url: str) -> None:
notify_url(ADDON_ID, heading=self.name, url=url, enabled_setting_id=GLOBAL_SETTING_SHOW_URL_INFO) notify_url(
ADDON_ID,
heading=self.name,
url=url,
enabled_setting_id=GLOBAL_SETTING_SHOW_URL_INFO,
plugin_setting_id=SETTING_SHOW_URL_INFO,
)
def _log_url(self, url: str, *, kind: str = "VISIT") -> None: def _log_url(self, url: str, *, kind: str = "VISIT") -> None:
log_url(ADDON_ID, enabled_setting_id=GLOBAL_SETTING_LOG_URLS, log_filename="topstream_urls.log", url=url, kind=kind) log_url(
ADDON_ID,
enabled_setting_id=GLOBAL_SETTING_LOG_URLS,
plugin_setting_id=SETTING_LOG_URLS,
log_filename="topstream_urls.log",
url=url,
kind=kind,
)
def _log_response_html(self, url: str, body: str) -> None: def _log_response_html(self, url: str, body: str) -> None:
dump_response_html( dump_response_html(
ADDON_ID, ADDON_ID,
enabled_setting_id=GLOBAL_SETTING_DUMP_HTML, enabled_setting_id=GLOBAL_SETTING_DUMP_HTML,
plugin_setting_id=SETTING_DUMP_HTML,
url=url, url=url,
body=body, body=body,
filename_prefix="topstream_response", filename_prefix="topstream_response",
) )
def _log_error(self, message: str) -> None:
log_error(
ADDON_ID,
enabled_setting_id=GLOBAL_SETTING_LOG_ERRORS,
plugin_setting_id=SETTING_LOG_ERRORS,
log_filename="topstream_errors.log",
message=message,
)
def capabilities(self) -> set[str]: def capabilities(self) -> set[str]:
return {"genres", "popular_series"} return {"genres", "popular_series"}
@@ -557,8 +583,12 @@ class TopstreamfilmPlugin(BasisPlugin):
session = self._get_session() session = self._get_session()
self._log_url(url, kind="VISIT") self._log_url(url, kind="VISIT")
self._notify_url(url) self._notify_url(url)
try:
response = session.get(url, timeout=DEFAULT_TIMEOUT) response = session.get(url, timeout=DEFAULT_TIMEOUT)
response.raise_for_status() response.raise_for_status()
except Exception as exc:
self._log_error(f"GET {url} failed: {exc}")
raise
self._log_url(response.url, kind="OK") self._log_url(response.url, kind="OK")
self._log_response_html(response.url, response.text) self._log_response_html(response.url, response.text)
return BeautifulSoup(response.text, "html.parser") return BeautifulSoup(response.text, "html.parser")
@@ -803,12 +833,16 @@ class TopstreamfilmPlugin(BasisPlugin):
request_url = f"{url}?{urlencode(params)}" request_url = f"{url}?{urlencode(params)}"
self._log_url(request_url, kind="GET") self._log_url(request_url, kind="GET")
self._notify_url(request_url) self._notify_url(request_url)
try:
response = session.get( response = session.get(
url, url,
params=params, params=params,
timeout=DEFAULT_TIMEOUT, timeout=DEFAULT_TIMEOUT,
) )
response.raise_for_status() response.raise_for_status()
except Exception as exc:
self._log_error(f"GET {request_url} failed: {exc}")
raise
self._log_url(response.url, kind="OK") self._log_url(response.url, kind="OK")
self._log_response_html(response.url, response.text) self._log_response_html(response.url, response.text)

View File

@@ -1,24 +1,52 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<settings> <settings>
<category label="Allgemein"> <category label="Logging">
<setting id="debug_log_urls" type="bool" label="Debug: URL-Log aktivieren (global)" default="false" /> <setting id="debug_log_urls" type="bool" label="URL-Logging aktivieren (global)" default="false" />
<setting id="debug_dump_html" type="bool" label="Debug: HTML-Antworten speichern (global)" default="false" /> <setting id="debug_dump_html" type="bool" label="HTML-Dumps aktivieren (global)" default="false" />
<setting id="debug_show_url_info" type="bool" label="Debug: Aufgerufene URL anzeigen (global)" default="false" /> <setting id="debug_show_url_info" type="bool" label="URL-Info anzeigen (global)" default="false" />
<setting id="debug_log_errors" type="bool" label="Fehler-Logging aktivieren (global)" default="false" />
<setting id="log_max_mb" type="number" label="URL-Log: max. Datei-Größe (MB)" default="5" />
<setting id="log_max_files" type="number" label="URL-Log: max. Rotationen" default="3" />
<setting id="dump_max_files" type="number" label="HTML-Dumps: max. Dateien pro Plugin" default="200" />
<setting id="log_urls_serienstream" type="bool" label="Serienstream: URL-Logging" default="false" />
<setting id="dump_html_serienstream" type="bool" label="Serienstream: HTML-Dumps" default="false" />
<setting id="show_url_info_serienstream" type="bool" label="Serienstream: URL-Info anzeigen" default="false" />
<setting id="log_errors_serienstream" type="bool" label="Serienstream: Fehler loggen" default="false" />
<setting id="log_urls_aniworld" type="bool" label="Aniworld: URL-Logging" default="false" />
<setting id="dump_html_aniworld" type="bool" label="Aniworld: HTML-Dumps" default="false" />
<setting id="show_url_info_aniworld" type="bool" label="Aniworld: URL-Info anzeigen" default="false" />
<setting id="log_errors_aniworld" type="bool" label="Aniworld: Fehler loggen" default="false" />
<setting id="log_urls_topstreamfilm" type="bool" label="Topstreamfilm: URL-Logging" default="false" />
<setting id="dump_html_topstreamfilm" type="bool" label="Topstreamfilm: HTML-Dumps" default="false" />
<setting id="show_url_info_topstreamfilm" type="bool" label="Topstreamfilm: URL-Info anzeigen" default="false" />
<setting id="log_errors_topstreamfilm" type="bool" label="Topstreamfilm: Fehler loggen" default="false" />
<setting id="log_urls_einschalten" type="bool" label="Einschalten: URL-Logging" default="false" />
<setting id="dump_html_einschalten" type="bool" label="Einschalten: HTML-Dumps" default="false" />
<setting id="show_url_info_einschalten" type="bool" label="Einschalten: URL-Info anzeigen" default="false" />
<setting id="log_errors_einschalten" type="bool" label="Einschalten: Fehler loggen" default="false" />
<setting id="log_urls_filmpalast" type="bool" label="Filmpalast: URL-Logging" default="false" />
<setting id="dump_html_filmpalast" type="bool" label="Filmpalast: HTML-Dumps" default="false" />
<setting id="show_url_info_filmpalast" type="bool" label="Filmpalast: URL-Info anzeigen" default="false" />
<setting id="log_errors_filmpalast" type="bool" label="Filmpalast: Fehler loggen" default="false" />
</category> </category>
<category label="TopStream"> <category label="TopStream">
<setting id="topstream_base_url" type="text" label="Basis-URL (z.B. https://www.meineseite)" default="https://www.meineseite" /> <setting id="topstream_base_url" type="text" label="Domain (BASE_URL)" default="https://topstreamfilm.live" />
<setting id="topstream_genre_max_pages" type="number" label="Genres: max. Seiten laden (Pagination)" default="20" /> <setting id="topstream_genre_max_pages" type="number" label="Genres: max. Seiten laden (Pagination)" default="20" />
</category> </category>
<category label="SerienStream">
<setting id="serienstream_base_url" type="text" label="Domain (BASE_URL)" default="https://s.to" />
</category>
<category label="AniWorld">
<setting id="aniworld_base_url" type="text" label="Domain (BASE_URL)" default="https://aniworld.to" />
</category>
<category label="Einschalten"> <category label="Einschalten">
<setting id="einschalten_base_url" type="text" label="Basis-URL (nur eigene/autorisiert betriebene Quelle)" default="" /> <setting id="einschalten_base_url" type="text" label="Domain (BASE_URL)" default="https://einschalten.in" />
<setting id="einschalten_index_path" type="text" label="Index-Pfad (z.B. /)" default="/" /> </category>
<setting id="einschalten_new_titles_path" type="text" label="Neue-Titel-Pfad (z.B. /movies/new)" default="/movies/new" /> <category label="Filmpalast">
<setting id="einschalten_search_path" type="text" label="Suche-Pfad (z.B. /search)" default="/search" /> <setting id="filmpalast_base_url" type="text" label="Domain (BASE_URL)" default="https://filmpalast.to" />
<setting id="einschalten_genres_path" type="text" label="Genres-Pfad (z.B. /genres)" default="/genres" />
<setting id="einschalten_enable_playback" type="bool" label="Wiedergabe aktivieren (nur autorisierte Quellen)" default="false" />
<setting id="einschalten_watch_path_template" type="text" label="Watch-Pfad-Template (z.B. /api/movies/{id}/watch)" default="/api/movies/{id}/watch" />
</category> </category>
<category label="TMDB"> <category label="TMDB">
<setting id="tmdb_enabled" type="bool" label="TMDB aktivieren" default="true" />
<setting id="tmdb_api_key" type="text" label="TMDB API Key" default="" /> <setting id="tmdb_api_key" type="text" label="TMDB API Key" default="" />
<setting id="tmdb_language" type="text" label="TMDB Sprache (z.B. de-DE)" default="de-DE" /> <setting id="tmdb_language" type="text" label="TMDB Sprache (z.B. de-DE)" default="de-DE" />
<setting id="tmdb_prefetch_concurrency" type="number" label="TMDB: Parallelität (Prefetch, 1-20)" default="6" /> <setting id="tmdb_prefetch_concurrency" type="number" label="TMDB: Parallelität (Prefetch, 1-20)" default="6" />

54
docs/DEFAULT_ROUTER.md Normal file
View File

@@ -0,0 +1,54 @@
# ViewIT Hauptlogik (`addon/default.py`)
Dieses Dokument beschreibt den Einstiegspunkt des Addons und die zentrale Steuerlogik.
## Aufgabe der Datei
`addon/default.py` ist der Router des Addons. Er:
- lädt die PluginModule dynamisch,
- stellt die KodiNavigation bereit,
- übersetzt UIAktionen in PluginAufrufe,
- startet die Wiedergabe und verwaltet Playstate/Resume.
## Ablauf (high level)
1. **PluginDiscovery**: Lädt alle `addon/plugins/*.py` (ohne `_`Prefix) und instanziiert Klassen, die von `BasisPlugin` erben.
2. **Navigation**: Baut KodiListen (Serien/Staffeln/Episoden) auf Basis der PluginAntworten.
3. **Playback**: Holt StreamLinks aus dem Plugin und startet die Wiedergabe.
4. **Playstate**: Speichert ResumeDaten lokal (`playstate.json`) und setzt `playcount`/ResumeInfos.
## Routing & Aktionen
Die Datei arbeitet mit URLParametern (KodiPluginStandard). Typische Aktionen:
- `search` → Suche über ein Plugin
- `seasons` → Staffeln für einen Titel
- `episodes` → Episoden für eine Staffel
- `play` → StreamLink auflösen und abspielen
Die genaue Aktion wird aus den QueryParametern gelesen und an das entsprechende Plugin delegiert.
## Playstate (Resume/Watched)
- **Speicherort**: `playstate.json` im AddonProfilordner.
- **Key**: Kombination aus PluginName, Titel, Staffel, Episode.
- **Verwendung**:
- `playcount` wird gesetzt, wenn „gesehen“ markiert ist.
- `resume_position`/`resume_total` werden gesetzt, wenn vorhanden.
## Wichtige Hilfsfunktionen
- **PluginLoader**: findet & instanziiert Plugins.
- **UIHelper**: setzt ContentType, baut Verzeichnisseinträge.
- **PlaystateHelper**: `_load_playstate`, `_save_playstate`, `_apply_playstate_to_info`.
## Fehlerbehandlung
- PluginImportfehler werden isoliert behandelt, damit das Addon nicht komplett ausfällt.
- NetzwerkFehler werden in Plugins abgefangen, `default.py` sollte nur saubere Fehlermeldungen weitergeben.
## Debugging
- Globale DebugSettings werden über `addon/resources/settings.xml` gesteuert.
- Plugins loggen URLs/HTML optional (siehe jeweilige PluginDoku).
## Änderungen & Erweiterungen
Für neue Aktionen:
1. Neue Aktion im Router registrieren.
2. UIEinträge passend anlegen.
3. Entsprechende PluginMethode definieren oder erweitern.
## Hinweis zur Erstellung
Teile dieser Dokumentation wurden KIgestützt erstellt und bei Bedarf manuell angepasst.

109
docs/PLUGIN_DEVELOPMENT.md Normal file
View File

@@ -0,0 +1,109 @@
# ViewIT Entwicklerdoku Plugins (`addon/plugins/*_plugin.py`)
Diese Doku beschreibt, wie Plugins im ViewITAddon aufgebaut sind und wie neue ProviderIntegrationen entwickelt werden.
## Grundlagen
- Jedes Plugin ist eine einzelne Datei unter `addon/plugins/`.
- Dateinamen **ohne** `_`Prefix werden automatisch geladen.
- Jede Datei enthält eine Klasse, die von `BasisPlugin` erbt.
## PflichtMethoden (BasisPlugin)
Jedes Plugin muss diese Methoden implementieren:
- `async search_titles(query: str) -> list[str]`
- `seasons_for(title: str) -> list[str]`
- `episodes_for(title: str, season: str) -> list[str]`
## Vertrag Plugin ↔ Hauptlogik (`default.py`)
Die Hauptlogik ruft Plugin-Methoden auf und verarbeitet ausschließlich deren Rückgaben.
Wesentliche Rückgaben an die Hauptlogik:
- `search_titles(...)` → Liste von Titel-Strings für die Trefferliste
- `seasons_for(...)` → Liste von Staffel-Labels
- `episodes_for(...)` → Liste von Episoden-Labels
- `stream_link_for(...)` → Hoster-/Player-Link (nicht zwingend finale Media-URL)
- `resolve_stream_link(...)` → finale/spielbare URL nach Redirect/Resolver
- Optional `available_hosters_for(...)` → auswählbare Hoster-Namen im Dialog
- Optional `series_url_for_title(...)` → stabile Detail-URL pro Titel für Folgeaufrufe
- Optional `remember_series_url(...)` → Übernahme einer bereits bekannten Detail-URL
Standard für Film-Provider (ohne echte Staffeln):
- `seasons_for(title)` gibt `["Film"]` zurück
- `episodes_for(title, "Film")` gibt `["Stream"]` zurück
## Optionale Features (Capabilities)
Über `capabilities()` kann das Plugin zusätzliche Funktionen anbieten:
- `popular_series``popular_series()`
- `genres``genres()` + `titles_for_genre(genre)`
- `latest_episodes``latest_episodes(page=1)`
## Empfohlene Struktur
- Konstanten für URLs/Endpoints (BASE_URL, Pfade, Templates)
- `requests` + `bs4` optional (fehlt beides, Plugin sollte sauber deaktivieren)
- HelperFunktionen für Parsing und Normalisierung
- Caches für Such, Staffel und EpisodenDaten
## Suche (aktuelle Policy)
- **Nur TitelMatches**
- **Wortbasierter Match** nach Normalisierung (Lowercase + NichtAlnum → Leerzeichen)
- Keine Teilwort-Treffer innerhalb eines Wortes (Beispiel: `hund` matcht nicht `thunder`)
- Keine Beschreibung/Plot/Meta für Matches
## Namensgebung
- PluginKlassenname: `XxxPlugin`
- Anzeigename (Property `name`): **mit Großbuchstaben beginnen** (z.B. `Serienstream`, `Einschalten`)
## Settings pro Plugin
Standard: `*_base_url` (Domain / BASE_URL)
- Beispiele:
- `serienstream_base_url`
- `aniworld_base_url`
- `einschalten_base_url`
- `topstream_base_url`
- `filmpalast_base_url`
## Playback
- `stream_link_for(...)` implementieren (liefert bevorzugten Hoster-Link).
- `available_hosters_for(...)` bereitstellen, wenn die Seite mehrere Hoster anbietet.
- `resolve_stream_link(...)` nach einheitlichem Flow umsetzen:
1. Redirects auflösen (falls vorhanden)
2. ResolveURL (`resolveurl_backend.resolve`) versuchen
3. Bei Fehlschlag auf den besten verfügbaren Link zurückfallen
- Optional `set_preferred_hosters(...)` unterstützen, damit die Hoster-Auswahl aus der Hauptlogik direkt greift.
## StandardFlow (empfohlen)
1. **Suche**: nur Titel liefern und Titel→Detail-URL mappen.
2. **Navigation**: `series_url_for_title`/`remember_series_url` unterstützen, damit URLs zwischen Aufrufen stabil bleiben.
3. **Auswahl Hoster**: Hoster-Namen aus der Detailseite extrahieren und anbieten.
4. **Playback**: Hoster-Link liefern, danach konsistent über `resolve_stream_link` finalisieren.
5. **Fallbacks**: bei Layout-Unterschieden defensiv parsen und Logging aktivierbar halten.
## Debugging
Global gesteuert über Settings:
- `debug_log_urls`
- `debug_dump_html`
- `debug_show_url_info`
Plugins sollten die Helper aus `addon/plugin_helpers.py` nutzen:
- `log_url(...)`
- `dump_response_html(...)`
- `notify_url(...)`
## Template
`addon/plugins/_template_plugin.py` dient als Startpunkt für neue Provider.
## Build & Test
- ZIP bauen: `./scripts/build_kodi_zip.sh`
- AddonOrdner: `./scripts/build_install_addon.sh`
## BeispielCheckliste
- [ ] `name` korrekt gesetzt
- [ ] `*_base_url` in Settings vorhanden
- [ ] Suche matcht nur Titel und wortbasiert
- [ ] `stream_link_for` + `resolve_stream_link` folgen dem Standard-Flow
- [ ] Optional: `available_hosters_for` + `set_preferred_hosters` vorhanden
- [ ] Optional: `series_url_for_title` + `remember_series_url` vorhanden
- [ ] Fehlerbehandlung und Timeouts vorhanden
- [ ] Optional: Caches für Performance
## Hinweis zur Erstellung
Teile dieser Dokumentation wurden KIgestützt erstellt und bei Bedarf manuell angepasst.

View File

@@ -6,12 +6,17 @@ Dieses Dokument beschreibt, wie das Plugin-System von **ViewIt** funktioniert un
ViewIt lädt Provider-Integrationen dynamisch aus `addon/plugins/*.py`. Jede Datei enthält eine Klasse, die von `BasisPlugin` erbt. Beim Start werden alle Plugins instanziiert und nur aktiv genutzt, wenn sie verfügbar sind. ViewIt lädt Provider-Integrationen dynamisch aus `addon/plugins/*.py`. Jede Datei enthält eine Klasse, die von `BasisPlugin` erbt. Beim Start werden alle Plugins instanziiert und nur aktiv genutzt, wenn sie verfügbar sind.
Weitere Details:
- `docs/DEFAULT_ROUTER.md` (Hauptlogik in `addon/default.py`)
- `docs/PLUGIN_DEVELOPMENT.md` (Entwicklerdoku für Plugins)
### Aktuelle Plugins ### Aktuelle Plugins
- `serienstream_plugin.py` Serienstream (s.to) - `serienstream_plugin.py` Serienstream (s.to)
- `topstreamfilm_plugin.py` Topstreamfilm - `topstreamfilm_plugin.py` Topstreamfilm
- `einschalten_plugin.py` Einschalten - `einschalten_plugin.py` Einschalten
- `aniworld_plugin.py` Aniworld - `aniworld_plugin.py` Aniworld
- `filmpalast_plugin.py` Filmpalast
- `_template_plugin.py` Vorlage für neue Plugins - `_template_plugin.py` Vorlage für neue Plugins
### Plugin-Discovery (Ladeprozess) ### Plugin-Discovery (Ladeprozess)

20
pyproject.toml Normal file
View File

@@ -0,0 +1,20 @@
[tool.pytest.ini_options]
addopts = "-q --cov=addon --cov-report=term-missing"
python_files = ["test_*.py"]
norecursedirs = ["scripts"]
markers = [
"live: real HTTP requests (set LIVE_TESTS=1 to run)",
"perf: performance benchmarks",
]
[tool.coverage.run]
source = ["addon"]
branch = true
omit = [
"*/__pycache__/*",
"addon/resources/*",
]
[tool.coverage.report]
show_missing = true
skip_empty = true

2
requirements-dev.txt Normal file
View File

@@ -0,0 +1,2 @@
pytest>=9,<10
pytest-cov>=5,<8