nightly: playback fast-path, windows asyncio fix, v0.1.56

This commit is contained in:
2026-02-19 14:10:09 +01:00
parent 307df97d74
commit cbc18eb911
8 changed files with 382 additions and 44 deletions

View File

@@ -1,5 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<addon id="plugin.video.viewit" name="ViewIt" version="0.1.54" provider-name="ViewIt"> <addon id="plugin.video.viewit" name="ViewIt" version="0.1.56" provider-name="ViewIt">
<requires> <requires>
<import addon="xbmc.python" version="3.0.0" /> <import addon="xbmc.python" version="3.0.0" />
<import addon="script.module.requests" /> <import addon="script.module.requests" />

View File

@@ -16,11 +16,26 @@ import json
import os import os
import re import re
import sys import sys
import threading
import xml.etree.ElementTree as ET import xml.etree.ElementTree as ET
from pathlib import Path from pathlib import Path
from types import ModuleType from types import ModuleType
from urllib.parse import parse_qs, urlencode from urllib.parse import parse_qs, urlencode
def _ensure_windows_selector_policy() -> None:
"""Erzwingt unter Windows einen Selector-Loop (thread-kompatibel in Kodi)."""
if not sys.platform.startswith("win"):
return
try:
current = asyncio.get_event_loop_policy()
if current.__class__.__name__ == "WindowsSelectorEventLoopPolicy":
return
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
except Exception:
# Fallback: Wenn die Policy nicht verfügbar ist, arbeitet der Code mit Default-Policy weiter.
return
try: # pragma: no cover - Kodi runtime try: # pragma: no cover - Kodi runtime
import xbmc # type: ignore[import-not-found] import xbmc # type: ignore[import-not-found]
import xbmcaddon # type: ignore[import-not-found] import xbmcaddon # type: ignore[import-not-found]
@@ -1195,7 +1210,16 @@ def _show_plugin_search_results(plugin_name: str, query: str) -> None:
try: try:
with _progress_dialog("Suche läuft", f"{plugin_name} (1/1) starte…") as progress: with _progress_dialog("Suche läuft", f"{plugin_name} (1/1) starte…") as progress:
canceled = progress(5, f"{plugin_name} (1/1) Suche…") canceled = progress(5, f"{plugin_name} (1/1) Suche…")
results = _run_async(plugin.search_titles(query)) search_coro = plugin.search_titles(query)
try:
results = _run_async(search_coro)
except Exception:
if inspect.iscoroutine(search_coro):
try:
search_coro.close()
except Exception:
pass
raise
results = [str(t).strip() for t in (results or []) if t and str(t).strip()] results = [str(t).strip() for t in (results or []) if t and str(t).strip()]
results.sort(key=lambda value: value.casefold()) results.sort(key=lambda value: value.casefold())
@@ -1347,18 +1371,35 @@ def _discover_plugins() -> dict[str, BasisPlugin]:
def _run_async(coro): def _run_async(coro):
"""Fuehrt eine Coroutine aus, auch wenn Kodi bereits einen Event-Loop hat.""" """Fuehrt eine Coroutine aus, auch wenn Kodi bereits einen Event-Loop hat."""
try: _ensure_windows_selector_policy()
loop = asyncio.get_event_loop()
except RuntimeError: def _run_with_asyncio_run():
loop = None
if loop and loop.is_running():
temp_loop = asyncio.new_event_loop()
try:
return temp_loop.run_until_complete(coro)
finally:
temp_loop.close()
return asyncio.run(coro) return asyncio.run(coro)
try:
running_loop = asyncio.get_running_loop()
except RuntimeError:
running_loop = None
if running_loop and running_loop.is_running():
result_box: dict[str, object] = {}
error_box: dict[str, BaseException] = {}
def _worker() -> None:
try:
result_box["value"] = _run_with_asyncio_run()
except BaseException as exc: # pragma: no cover - defensive
error_box["error"] = exc
worker = threading.Thread(target=_worker, name="viewit-async-runner")
worker.start()
worker.join()
if "error" in error_box:
raise error_box["error"]
return result_box.get("value")
return _run_with_asyncio_run()
def _series_url_params(plugin: BasisPlugin, title: str) -> dict[str, str]: def _series_url_params(plugin: BasisPlugin, title: str) -> dict[str, str]:
getter = getattr(plugin, "series_url_for_title", None) getter = getattr(plugin, "series_url_for_title", None)
@@ -1403,9 +1444,15 @@ def _show_search_results(query: str) -> None:
canceled = progress(range_start, f"{plugin_name} ({plugin_index}/{total_plugins}) Suche…") canceled = progress(range_start, f"{plugin_name} ({plugin_index}/{total_plugins}) Suche…")
if canceled: if canceled:
break break
search_coro = plugin.search_titles(query)
try: try:
results = _run_async(plugin.search_titles(query)) results = _run_async(search_coro)
except Exception as exc: except Exception as exc:
if inspect.iscoroutine(search_coro):
try:
search_coro.close()
except Exception:
pass
_log(f"Suche fehlgeschlagen ({plugin_name}): {exc}", xbmc.LOGWARNING) _log(f"Suche fehlgeschlagen ({plugin_name}): {exc}", xbmc.LOGWARNING)
continue continue
results = [str(t).strip() for t in (results or []) if t and str(t).strip()] results = [str(t).strip() for t in (results or []) if t and str(t).strip()]
@@ -1687,6 +1734,8 @@ def _show_episodes(plugin_name: str, title: str, season: str, series_url: str =
episodes = list(plugin.episodes_for(title, season)) episodes = list(plugin.episodes_for(title, season))
if episodes: if episodes:
episode_url_getter = getattr(plugin, "episode_url_for", None)
supports_direct_episode_url = callable(getattr(plugin, "stream_link_for_url", None))
use_source, show_tmdb, _prefer_source = _metadata_policy( use_source, show_tmdb, _prefer_source = _metadata_policy(
plugin_name, plugin, allow_tmdb=_tmdb_enabled() plugin_name, plugin, allow_tmdb=_tmdb_enabled()
) )
@@ -1750,11 +1799,25 @@ def _show_episodes(plugin_name: str, title: str, season: str, series_url: str =
merged_info = _apply_playstate_to_info(merged_info, _get_playstate(key)) merged_info = _apply_playstate_to_info(merged_info, _get_playstate(key))
display_label = episode display_label = episode
play_params = {
"plugin": plugin_name,
"title": title,
"season": season,
"episode": episode,
"series_url": series_url,
}
if supports_direct_episode_url and callable(episode_url_getter):
try:
episode_url = str(episode_url_getter(title, season, episode) or "").strip()
except Exception:
episode_url = ""
if episode_url:
play_params["url"] = episode_url
_add_directory_item( _add_directory_item(
handle, handle,
display_label, display_label,
"play_episode", "play_episode",
{"plugin": plugin_name, "title": title, "season": season, "episode": episode}, play_params,
is_folder=False, is_folder=False,
info_labels=merged_info, info_labels=merged_info,
art=merged_art, art=merged_art,
@@ -3155,8 +3218,34 @@ def _play_episode(
season: str, season: str,
episode: str, episode: str,
*, *,
episode_url: str = "",
series_url: str = "",
resolve_handle: int | None = None, resolve_handle: int | None = None,
) -> None: ) -> None:
episode_url = (episode_url or "").strip()
if episode_url:
_play_episode_url(
plugin_name,
title=title,
season_number=_extract_first_int(season) or 0,
episode_number=_extract_first_int(episode) or 0,
episode_url=episode_url,
season_label_override=season,
episode_label_override=episode,
resolve_handle=resolve_handle,
)
return
series_url = (series_url or "").strip()
if series_url:
plugin_for_url = _discover_plugins().get(plugin_name)
remember_series_url = getattr(plugin_for_url, "remember_series_url", None) if plugin_for_url is not None else None
if callable(remember_series_url):
try:
remember_series_url(title, series_url)
except Exception:
pass
_log(f"Play anfordern: {plugin_name} / {title} / {season} / {episode}") _log(f"Play anfordern: {plugin_name} / {title} / {season} / {episode}")
plugin = _discover_plugins().get(plugin_name) plugin = _discover_plugins().get(plugin_name)
if plugin is None: if plugin is None:
@@ -3233,10 +3322,14 @@ def _play_episode_url(
season_number: int, season_number: int,
episode_number: int, episode_number: int,
episode_url: str, episode_url: str,
season_label_override: str = "",
episode_label_override: str = "",
resolve_handle: int | None = None, resolve_handle: int | None = None,
) -> None: ) -> None:
season_label = f"Staffel {season_number}" if season_number > 0 else "" season_label = (season_label_override or "").strip() or (f"Staffel {season_number}" if season_number > 0 else "")
episode_label = f"Episode {episode_number}" if episode_number > 0 else "" episode_label = (episode_label_override or "").strip() or (
f"Episode {episode_number}" if episode_number > 0 else ""
)
_log(f"Play (URL) anfordern: {plugin_name} / {title} / {season_label} / {episode_label} / {episode_url}") _log(f"Play (URL) anfordern: {plugin_name} / {title} / {season_label} / {episode_label} / {episode_url}")
plugin = _discover_plugins().get(plugin_name) plugin = _discover_plugins().get(plugin_name)
if plugin is None: if plugin is None:
@@ -3405,6 +3498,8 @@ def run() -> None:
params.get("title", ""), params.get("title", ""),
params.get("season", ""), params.get("season", ""),
params.get("episode", ""), params.get("episode", ""),
episode_url=params.get("url", ""),
series_url=params.get("series_url", ""),
resolve_handle=_get_handle(), resolve_handle=_get_handle(),
) )
elif action == "play_movie": elif action == "play_movie":

View File

@@ -9,7 +9,7 @@ Zum Verwenden:
from __future__ import annotations from __future__ import annotations
from dataclasses import dataclass from dataclasses import dataclass
from typing import TYPE_CHECKING, Any, List, Optional, TypeAlias from typing import TYPE_CHECKING, Any, List, Optional
try: # pragma: no cover - optional dependency try: # pragma: no cover - optional dependency
import requests import requests
@@ -34,8 +34,8 @@ if TYPE_CHECKING: # pragma: no cover
from requests import Session as RequestsSession from requests import Session as RequestsSession
from bs4 import BeautifulSoup as BeautifulSoupT # type: ignore[import-not-found] from bs4 import BeautifulSoup as BeautifulSoupT # type: ignore[import-not-found]
else: # pragma: no cover else: # pragma: no cover
RequestsSession: TypeAlias = Any RequestsSession = Any
BeautifulSoupT: TypeAlias = Any BeautifulSoupT = Any
ADDON_ID = "plugin.video.viewit" ADDON_ID = "plugin.video.viewit"

View File

@@ -13,7 +13,7 @@ import hashlib
import json import json
import re import re
import time import time
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, TypeAlias from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple
try: # pragma: no cover - optional dependency try: # pragma: no cover - optional dependency
import requests import requests
@@ -43,8 +43,8 @@ if TYPE_CHECKING: # pragma: no cover
from requests import Session as RequestsSession from requests import Session as RequestsSession
from bs4 import BeautifulSoup as BeautifulSoupT # type: ignore[import-not-found] from bs4 import BeautifulSoup as BeautifulSoupT # type: ignore[import-not-found]
else: # pragma: no cover else: # pragma: no cover
RequestsSession: TypeAlias = Any RequestsSession = Any
BeautifulSoupT: TypeAlias = Any BeautifulSoupT = Any
SETTING_BASE_URL = "aniworld_base_url" SETTING_BASE_URL = "aniworld_base_url"
@@ -1213,6 +1213,18 @@ class AniworldPlugin(BasisPlugin):
_log_url(link, kind="FOUND") _log_url(link, kind="FOUND")
return link return link
def episode_url_for(self, title: str, season: str, episode: str) -> str:
cache_key = (title, season)
cached = self._episode_label_cache.get(cache_key)
if cached:
info = cached.get(episode)
if info and info.url:
return info.url
episode_info = self._lookup_episode(title, season, episode)
if episode_info and episode_info.url:
return episode_info.url
return ""
def available_hosters_for(self, title: str, season: str, episode: str) -> List[str]: def available_hosters_for(self, title: str, season: str, episode: str) -> List[str]:
if not self._requests_available: if not self._requests_available:
raise RuntimeError("AniworldPlugin kann ohne requests/bs4 keine Hoster laden.") raise RuntimeError("AniworldPlugin kann ohne requests/bs4 keine Hoster laden.")

View File

@@ -5,7 +5,7 @@ from __future__ import annotations
from dataclasses import dataclass from dataclasses import dataclass
import re import re
from urllib.parse import quote from urllib.parse import quote
from typing import TYPE_CHECKING, Any, Dict, List, Optional, TypeAlias from typing import TYPE_CHECKING, Any, Dict, List, Optional
try: # pragma: no cover - optional dependency try: # pragma: no cover - optional dependency
import requests import requests
@@ -27,8 +27,8 @@ if TYPE_CHECKING: # pragma: no cover
from requests import Session as RequestsSession from requests import Session as RequestsSession
from bs4 import BeautifulSoup as BeautifulSoupT # type: ignore[import-not-found] from bs4 import BeautifulSoup as BeautifulSoupT # type: ignore[import-not-found]
else: # pragma: no cover else: # pragma: no cover
RequestsSession: TypeAlias = Any RequestsSession = Any
BeautifulSoupT: TypeAlias = Any BeautifulSoupT = Any
ADDON_ID = "plugin.video.viewit" ADDON_ID = "plugin.video.viewit"

View File

@@ -11,7 +11,7 @@ from dataclasses import dataclass
import re import re
from urllib.parse import quote, urlencode from urllib.parse import quote, urlencode
from urllib.parse import urljoin from urllib.parse import urljoin
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, TypeAlias from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple
try: # pragma: no cover - optional dependency try: # pragma: no cover - optional dependency
import requests import requests
@@ -33,8 +33,8 @@ if TYPE_CHECKING: # pragma: no cover
from requests import Session as RequestsSession from requests import Session as RequestsSession
from bs4 import BeautifulSoup as BeautifulSoupT # type: ignore[import-not-found] from bs4 import BeautifulSoup as BeautifulSoupT # type: ignore[import-not-found]
else: # pragma: no cover else: # pragma: no cover
RequestsSession: TypeAlias = Any RequestsSession = Any
BeautifulSoupT: TypeAlias = Any BeautifulSoupT = Any
ADDON_ID = "plugin.video.viewit" ADDON_ID = "plugin.video.viewit"
@@ -820,11 +820,23 @@ class FilmpalastPlugin(BasisPlugin):
def available_hosters_for(self, title: str, season: str, episode: str) -> List[str]: def available_hosters_for(self, title: str, season: str, episode: str) -> List[str]:
detail_url = self._detail_url_for_selection(title, season, episode) detail_url = self._detail_url_for_selection(title, season, episode)
hosters = self._hosters_for_detail_url(detail_url) return self.available_hosters_for_url(detail_url)
return list(hosters.keys())
def stream_link_for(self, title: str, season: str, episode: str) -> Optional[str]: def stream_link_for(self, title: str, season: str, episode: str) -> Optional[str]:
detail_url = self._detail_url_for_selection(title, season, episode) detail_url = self._detail_url_for_selection(title, season, episode)
return self.stream_link_for_url(detail_url)
def episode_url_for(self, title: str, season: str, episode: str) -> str:
detail_url = self._detail_url_for_selection(title, season, episode)
return (detail_url or "").strip()
def available_hosters_for_url(self, episode_url: str) -> List[str]:
detail_url = (episode_url or "").strip()
hosters = self._hosters_for_detail_url(detail_url)
return list(hosters.keys())
def stream_link_for_url(self, episode_url: str) -> Optional[str]:
detail_url = (episode_url or "").strip()
if not detail_url: if not detail_url:
return None return None
hosters = self._hosters_for_detail_url(detail_url) hosters = self._hosters_for_detail_url(detail_url)

View File

@@ -17,7 +17,8 @@ import os
import re import re
import time import time
import unicodedata import unicodedata
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, TypeAlias from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple
from urllib.parse import quote
try: # pragma: no cover - optional dependency try: # pragma: no cover - optional dependency
import requests import requests
@@ -49,14 +50,15 @@ if TYPE_CHECKING: # pragma: no cover
from requests import Session as RequestsSession from requests import Session as RequestsSession
from bs4 import BeautifulSoup as BeautifulSoupT # type: ignore[import-not-found] from bs4 import BeautifulSoup as BeautifulSoupT # type: ignore[import-not-found]
else: # pragma: no cover else: # pragma: no cover
RequestsSession: TypeAlias = Any RequestsSession = Any
BeautifulSoupT: TypeAlias = Any BeautifulSoupT = Any
SETTING_BASE_URL = "serienstream_base_url" SETTING_BASE_URL = "serienstream_base_url"
DEFAULT_BASE_URL = "https://s.to" DEFAULT_BASE_URL = "https://s.to"
DEFAULT_PREFERRED_HOSTERS = ["voe"] DEFAULT_PREFERRED_HOSTERS = ["voe"]
DEFAULT_TIMEOUT = 20 DEFAULT_TIMEOUT = 20
SEARCH_TIMEOUT = 8
ADDON_ID = "plugin.video.viewit" ADDON_ID = "plugin.video.viewit"
GLOBAL_SETTING_LOG_URLS = "debug_log_urls" GLOBAL_SETTING_LOG_URLS = "debug_log_urls"
GLOBAL_SETTING_DUMP_HTML = "debug_dump_html" GLOBAL_SETTING_DUMP_HTML = "debug_dump_html"
@@ -75,6 +77,9 @@ HEADERS = {
SESSION_CACHE_TTL_SECONDS = 300 SESSION_CACHE_TTL_SECONDS = 300
SESSION_CACHE_PREFIX = "viewit.serienstream" SESSION_CACHE_PREFIX = "viewit.serienstream"
SESSION_CACHE_MAX_TITLE_URLS = 800 SESSION_CACHE_MAX_TITLE_URLS = 800
CATALOG_SEARCH_TTL_SECONDS = 600
CATALOG_SEARCH_CACHE_KEY = "catalog_index"
_CATALOG_INDEX_MEMORY: tuple[float, List["SeriesResult"]] = (0.0, [])
@dataclass @dataclass
@@ -451,20 +456,222 @@ def _extract_genre_names_from_html(body: str) -> List[str]:
return names return names
def _strip_tags(value: str) -> str:
return re.sub(r"<[^>]+>", " ", value or "")
def _search_series_api(query: str) -> List[SeriesResult]:
query = (query or "").strip()
if not query:
return []
_ensure_requests()
sess = get_requests_session("serienstream", headers=HEADERS)
terms = [query]
if " " in query:
# Fallback: einzelne Tokens liefern in der API oft bessere Treffer.
terms.extend([token for token in query.split() if token])
seen_urls: set[str] = set()
for term in terms:
try:
response = sess.get(
f"{_get_base_url()}/api/search/suggest",
params={"term": term},
headers=HEADERS,
timeout=SEARCH_TIMEOUT,
)
response.raise_for_status()
except Exception:
continue
try:
payload = response.json()
except Exception:
continue
shows = payload.get("shows") if isinstance(payload, dict) else None
if not isinstance(shows, list):
continue
results: List[SeriesResult] = []
for item in shows:
if not isinstance(item, dict):
continue
title = (item.get("name") or "").strip()
href = (item.get("url") or "").strip()
if not title or not href:
continue
url_abs = _absolute_url(href)
if not url_abs or url_abs in seen_urls:
continue
if "/staffel-" in url_abs or "/episode-" in url_abs:
continue
seen_urls.add(url_abs)
results.append(SeriesResult(title=title, description="", url=url_abs))
if not results:
continue
filtered = [entry for entry in results if _matches_query(query, title=entry.title)]
if filtered:
return filtered
# Falls nur Token-Suche möglich war, zumindest die Ergebnisse liefern.
if term != query:
return results
return []
def _search_series_server(query: str) -> List[SeriesResult]:
if not query:
return []
api_results = _search_series_api(query)
if api_results:
return api_results
base = _get_base_url()
search_url = f"{base}/search?q={quote(query)}"
alt_url = f"{base}/suche?q={quote(query)}"
for url in (search_url, alt_url):
try:
body = _get_html_simple(url)
except Exception:
continue
if not body:
continue
soup = BeautifulSoup(body, "html.parser")
root = soup.select_one(".search-results-list")
if root is None:
continue
seen_urls: set[str] = set()
results: List[SeriesResult] = []
for card in root.select(".cover-card"):
anchor = card.select_one("a[href*='/serie/']")
if not anchor:
continue
href = (anchor.get("href") or "").strip()
url_abs = _absolute_url(href)
if not url_abs or url_abs in seen_urls:
continue
if "/staffel-" in url_abs or "/episode-" in url_abs:
continue
title_tag = card.select_one(".show-title") or card.select_one("h3") or card.select_one("h4")
title = (title_tag.get_text(" ", strip=True) if title_tag else anchor.get_text(" ", strip=True)).strip()
if not title:
continue
seen_urls.add(url_abs)
results.append(SeriesResult(title=title, description="", url=url_abs))
if results:
return results
return []
def _extract_catalog_index_from_html(body: str) -> List[SeriesResult]:
items: List[SeriesResult] = []
if not body:
return items
seen_urls: set[str] = set()
item_re = re.compile(
r"<li[^>]*class=[\"'][^\"']*series-item[^\"']*[\"'][^>]*>(.*?)</li>",
re.IGNORECASE | re.DOTALL,
)
anchor_re = re.compile(r"<a[^>]+href=[\"']([^\"']+)[\"'][^>]*>(.*?)</a>", re.IGNORECASE | re.DOTALL)
data_search_re = re.compile(r"data-search=[\"']([^\"']*)[\"']", re.IGNORECASE)
for match in item_re.finditer(body):
block = match.group(0)
inner = match.group(1) or ""
anchor_match = anchor_re.search(inner)
if not anchor_match:
continue
href = (anchor_match.group(1) or "").strip()
url = _absolute_url(href)
if not url or "/serie/" not in url or "/staffel-" in url or "/episode-" in url:
continue
if url in seen_urls:
continue
seen_urls.add(url)
title_raw = anchor_match.group(2) or ""
title = unescape(re.sub(r"\s+", " ", _strip_tags(title_raw))).strip()
if not title:
continue
search_match = data_search_re.search(block)
description = (search_match.group(1) or "").strip() if search_match else ""
items.append(SeriesResult(title=title, description=description, url=url))
return items
def _catalog_index_from_soup(soup: BeautifulSoupT) -> List[SeriesResult]:
items: List[SeriesResult] = []
if not soup:
return items
seen_urls: set[str] = set()
for item in soup.select("li.series-item"):
anchor = item.find("a", href=True)
if not anchor:
continue
href = (anchor.get("href") or "").strip()
url = _absolute_url(href)
if not url or "/serie/" not in url or "/staffel-" in url or "/episode-" in url:
continue
if url in seen_urls:
continue
seen_urls.add(url)
title = (anchor.get_text(" ", strip=True) or "").strip()
if not title:
continue
description = (item.get("data-search") or "").strip()
items.append(SeriesResult(title=title, description=description, url=url))
return items
def _load_catalog_index_from_cache() -> Optional[List[SeriesResult]]:
global _CATALOG_INDEX_MEMORY
expires_at, cached = _CATALOG_INDEX_MEMORY
if cached and expires_at > time.time():
return list(cached)
raw = _session_cache_get(CATALOG_SEARCH_CACHE_KEY)
if not isinstance(raw, list):
return None
items: List[SeriesResult] = []
for entry in raw:
if not isinstance(entry, list) or len(entry) < 2:
continue
title = str(entry[0] or "").strip()
url = str(entry[1] or "").strip()
description = str(entry[2] or "") if len(entry) > 2 else ""
if title and url:
items.append(SeriesResult(title=title, description=description, url=url))
if items:
_CATALOG_INDEX_MEMORY = (time.time() + CATALOG_SEARCH_TTL_SECONDS, list(items))
return items or None
def _store_catalog_index_in_cache(items: List[SeriesResult]) -> None:
global _CATALOG_INDEX_MEMORY
if not items:
return
_CATALOG_INDEX_MEMORY = (time.time() + CATALOG_SEARCH_TTL_SECONDS, list(items))
payload: List[List[str]] = []
for entry in items:
if not entry.title or not entry.url:
continue
payload.append([entry.title, entry.url, entry.description])
_session_cache_set(CATALOG_SEARCH_CACHE_KEY, payload, ttl_seconds=CATALOG_SEARCH_TTL_SECONDS)
def search_series(query: str) -> List[SeriesResult]: def search_series(query: str) -> List[SeriesResult]:
"""Sucht Serien im (/serien)-Katalog (Genre-liste) nach Titel/Alt-Titel.""" """Sucht Serien im (/serien)-Katalog nach Titel. Nutzt Cache + Ein-Pass-Filter."""
_ensure_requests() _ensure_requests()
if not _normalize_search_text(query): if not _normalize_search_text(query):
return [] return []
# Direkter Abruf wie in fetch_serien.py. server_results = _search_series_server(query)
if server_results:
return [entry for entry in server_results if entry.title and _matches_query(query, title=entry.title)]
cached = _load_catalog_index_from_cache()
if cached is not None:
return [entry for entry in cached if entry.title and _matches_query(query, title=entry.title)]
catalog_url = f"{_get_base_url()}/serien?by=genre" catalog_url = f"{_get_base_url()}/serien?by=genre"
soup = _get_soup_simple(catalog_url) body = _get_html_simple(catalog_url)
results: List[SeriesResult] = [] items = _extract_catalog_index_from_html(body)
for series in parse_series_catalog(soup).values(): if not items:
for entry in series: soup = BeautifulSoup(body, "html.parser")
if entry.title and _matches_query(query, title=entry.title): items = _catalog_index_from_soup(soup)
results.append(entry) if items:
return results _store_catalog_index_in_cache(items)
return [entry for entry in items if entry.title and _matches_query(query, title=entry.title)]
def parse_series_catalog(soup: BeautifulSoupT) -> Dict[str, List[SeriesResult]]: def parse_series_catalog(soup: BeautifulSoupT) -> Dict[str, List[SeriesResult]]:
@@ -1569,6 +1776,18 @@ class SerienstreamPlugin(BasisPlugin):
except Exception as exc: # pragma: no cover - defensive logging except Exception as exc: # pragma: no cover - defensive logging
raise RuntimeError(f"Stream-Link konnte nicht geladen werden: {exc}") from exc raise RuntimeError(f"Stream-Link konnte nicht geladen werden: {exc}") from exc
def episode_url_for(self, title: str, season: str, episode: str) -> str:
cache_key = (title, season)
cached = self._episode_label_cache.get(cache_key)
if cached:
info = cached.get(episode)
if info and info.url:
return info.url
episode_info = self._lookup_episode(title, season, episode)
if episode_info and episode_info.url:
return episode_info.url
return ""
def available_hosters_for(self, title: str, season: str, episode: str) -> List[str]: def available_hosters_for(self, title: str, season: str, episode: str) -> List[str]:
if not self._requests_available: if not self._requests_available:
raise RuntimeError("SerienstreamPlugin kann ohne requests/bs4 keine Hoster laden.") raise RuntimeError("SerienstreamPlugin kann ohne requests/bs4 keine Hoster laden.")

View File

@@ -19,7 +19,7 @@ import hashlib
import os import os
import re import re
import json import json
from typing import TYPE_CHECKING, Any, Dict, List, Optional, TypeAlias from typing import TYPE_CHECKING, Any, Dict, List, Optional
from urllib.parse import urlencode, urljoin from urllib.parse import urlencode, urljoin
try: # pragma: no cover - optional dependency try: # pragma: no cover - optional dependency
@@ -51,8 +51,8 @@ if TYPE_CHECKING: # pragma: no cover
from requests import Session as RequestsSession from requests import Session as RequestsSession
from bs4 import BeautifulSoup as BeautifulSoupT # type: ignore[import-not-found] from bs4 import BeautifulSoup as BeautifulSoupT # type: ignore[import-not-found]
else: # pragma: no cover else: # pragma: no cover
RequestsSession: TypeAlias = Any RequestsSession = Any
BeautifulSoupT: TypeAlias = Any BeautifulSoupT = Any
ADDON_ID = "plugin.video.viewit" ADDON_ID = "plugin.video.viewit"