Compare commits

...

2 Commits

6 changed files with 114 additions and 172 deletions

View File

@@ -1,3 +1,11 @@
## 0.1.83.5-dev - 2026-03-15
- dev: SerienStream Suche via /suche?term=, Staffel 0 als Filme, Katalog-Suche entfernt
## 0.1.83.0-dev - 2026-03-15
- dev: Trakt Performance, Suchfilter Phrase-Match, Debug-Settings Expert-Level
## 0.1.82.5-dev - 2026-03-15
- dev: Update-Versionsvergleich numerisch korrigiert

View File

@@ -1,5 +1,5 @@
<?xml version='1.0' encoding='utf-8'?>
<addon id="plugin.video.viewit" name="ViewIt" version="0.1.83.0-dev" provider-name="ViewIt">
<addon id="plugin.video.viewit" name="ViewIt" version="0.1.84.0-dev" provider-name="ViewIt">
<requires>
<import addon="xbmc.python" version="3.0.0" />
<import addon="script.module.requests" />

View File

@@ -370,6 +370,40 @@ class TraktClient:
return []
return self._parse_history_items(payload)
def get_watched_shows(self, token: str) -> list[TraktItem]:
"""GET /users/me/watched/shows alle Serien mit zuletzt gesehener Episode."""
status, payload = self._get("/users/me/watched/shows", token=token)
if status != 200 or not isinstance(payload, list):
self._do_log(f"get_watched_shows: status={status}")
return []
result: list[TraktItem] = []
for entry in payload:
if not isinstance(entry, dict):
continue
show = entry.get("show") or {}
ids = self._parse_ids((show.get("ids") or {}))
title = str(show.get("title", "") or "")
year = int(show.get("year", 0) or 0)
seasons = entry.get("seasons") or []
last_season = 0
last_episode = 0
for s in seasons:
snum = int((s.get("number") or 0))
if snum == 0: # Specials überspringen
continue
for ep in (s.get("episodes") or []):
enum = int((ep.get("number") or 0))
if snum > last_season or (snum == last_season and enum > last_episode):
last_season = snum
last_episode = enum
if title:
result.append(TraktItem(
title=title, year=year, media_type="episode",
ids=ids, season=last_season, episode=last_episode,
))
self._do_log(f"get_watched_shows: {len(result)} Serien")
return result
# -------------------------------------------------------------------
# Calendar
# -------------------------------------------------------------------

View File

@@ -2095,8 +2095,17 @@ def _run_async(coro):
"""Fuehrt eine Coroutine aus, auch wenn Kodi bereits einen Event-Loop hat."""
_ensure_windows_selector_policy()
def _run_with_asyncio_run():
return asyncio.run(coro)
def _run_without_asyncio_run():
# asyncio.run() wuerde cancel_all_tasks() aufrufen, was auf Android TV
# wegen eines kaputten _weakrefset.py-Builds zu NameError: 'len' fuehrt.
loop = asyncio.new_event_loop()
try:
return loop.run_until_complete(coro)
finally:
try:
loop.close()
except Exception:
pass
try:
running_loop = asyncio.get_running_loop()
@@ -2109,7 +2118,7 @@ def _run_async(coro):
def _worker() -> None:
try:
result_box["value"] = _run_with_asyncio_run()
result_box["value"] = _run_without_asyncio_run()
except BaseException as exc: # pragma: no cover - defensive
error_box["error"] = exc
@@ -2120,7 +2129,7 @@ def _run_async(coro):
raise error_box["error"]
return result_box.get("value")
return _run_with_asyncio_run()
return _run_without_asyncio_run()
def _series_url_params(plugin: BasisPlugin, title: str) -> dict[str, str]:
@@ -4414,13 +4423,15 @@ def _play_episode(
preferred_setter([selected_hoster])
try:
link = plugin.stream_link_for(title, season, episode)
with _busy_dialog("Stream wird gesucht..."):
link = plugin.stream_link_for(title, season, episode)
if not link:
_log("Kein Stream gefunden.", xbmc.LOGWARNING)
xbmcgui.Dialog().notification("Wiedergabe", "Kein Stream gefunden.", xbmcgui.NOTIFICATION_INFO, 3000)
return
_log(f"Stream-Link: {link}", xbmc.LOGDEBUG)
final_link = _resolve_stream_with_retry(plugin, link)
with _busy_dialog("Stream wird aufgelöst..."):
final_link = _resolve_stream_with_retry(plugin, link)
if not final_link:
return
finally:
@@ -4815,11 +4826,33 @@ def _show_tag_titles_page(plugin_name: str, tag: str, page: int = 1) -> None:
xbmcplugin.endOfDirectory(handle)
return
titles = [str(t).strip() for t in titles if t and str(t).strip()]
for title in titles:
_add_directory_item(handle, title, "seasons",
{"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)},
is_folder=True)
if titles:
use_source, show_tmdb, prefer_source = _metadata_policy(
plugin_name, plugin, allow_tmdb=_tmdb_list_enabled()
)
plugin_meta = _collect_plugin_metadata(plugin, titles) if use_source else {}
show_plot = _get_setting_bool("tmdb_show_plot", default=True)
show_art = _get_setting_bool("tmdb_show_art", default=True)
tmdb_prefetched: dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]] = {}
tmdb_titles = list(titles) if show_tmdb else []
if show_tmdb and prefer_source and use_source:
tmdb_titles = [
t for t in titles
if _needs_tmdb((plugin_meta.get(t) or ({},))[0], (plugin_meta.get(t) or ({}, {}))[1],
want_plot=show_plot, want_art=show_art)
]
if show_tmdb and tmdb_titles:
with _busy_dialog("Schlagwort-Liste wird geladen..."):
tmdb_prefetched = _tmdb_labels_and_art_bulk(tmdb_titles)
for title in titles:
tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) if show_tmdb else ({}, {}, [])
meta = plugin_meta.get(title)
info_labels, art, cast = _merge_metadata(title, tmdb_info, tmdb_art, tmdb_cast, meta)
info_labels = dict(info_labels or {})
info_labels.setdefault("mediatype", "tvshow")
_add_directory_item(handle, title, "seasons",
{"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)},
is_folder=True, info_labels=info_labels, art=art, cast=cast)
_add_directory_item(handle, "Naechste Seite", "tag_titles_page",
{"plugin": plugin_name, "tag": tag, "page": str(page + 1)}, is_folder=True)
xbmcplugin.endOfDirectory(handle)
@@ -4929,7 +4962,7 @@ def _show_trakt_watchlist(media_type: str = "") -> None:
_add_directory_item(handle, label, "search", {"query": item.title}, is_folder=True, info_labels=info_labels, art=art)
if not items:
xbmcgui.Dialog().notification("Trakt", "Watchlist ist leer.", xbmcgui.NOTIFICATION_INFO, 3000)
xbmcplugin.endOfDirectory(handle)
xbmcplugin.endOfDirectory(handle, cacheToDisc=False)
def _show_trakt_history(page: int = 1) -> None:
@@ -4999,7 +5032,7 @@ def _show_trakt_history(page: int = 1) -> None:
_add_directory_item(handle, "Naechste Seite >>", "trakt_history", {"page": str(page + 1)}, is_folder=True)
if not items and page == 1:
xbmcgui.Dialog().notification("Trakt", "Keine History vorhanden.", xbmcgui.NOTIFICATION_INFO, 3000)
xbmcplugin.endOfDirectory(handle)
xbmcplugin.endOfDirectory(handle, cacheToDisc=False)
def _show_trakt_upcoming() -> None:
@@ -5110,7 +5143,7 @@ def _show_trakt_upcoming() -> None:
_add_directory_item(handle, label, action, params, is_folder=True, info_labels=info_labels, art=art)
xbmcplugin.endOfDirectory(handle)
xbmcplugin.endOfDirectory(handle, cacheToDisc=False)
def _show_trakt_continue_watching() -> None:
@@ -5127,21 +5160,17 @@ def _show_trakt_continue_watching() -> None:
_set_content(handle, "episodes")
try:
history = client.get_history(token, media_type="episodes", limit=100)
watched = client.get_watched_shows(token)
except Exception as exc:
_log(f"Trakt History fehlgeschlagen: {exc}", xbmc.LOGWARNING)
xbmcgui.Dialog().notification("Trakt", "History konnte nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000)
_log(f"Trakt Watched fehlgeschlagen: {exc}", xbmc.LOGWARNING)
xbmcgui.Dialog().notification("Trakt", "Watched-Liste konnte nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000)
xbmcplugin.endOfDirectory(handle)
return
# Pro Serie nur den zuletzt gesehenen Eintrag behalten (History ist absteigend sortiert)
seen: dict[str, object] = {}
for item in history:
if item.title and item.title not in seen:
seen[item.title] = item
seen: dict[str, object] = {item.title: item for item in watched if item.title}
if not seen:
xbmcgui.Dialog().notification("Trakt", "Keine History vorhanden.", xbmcgui.NOTIFICATION_INFO, 3000)
xbmcgui.Dialog().notification("Trakt", "Keine gesehenen Serien vorhanden.", xbmcgui.NOTIFICATION_INFO, 3000)
xbmcplugin.endOfDirectory(handle)
return
@@ -5169,7 +5198,7 @@ def _show_trakt_continue_watching() -> None:
_, art, _ = tmdb_prefetched.get(last.title, ({}, {}, []))
_add_directory_item(handle, display_label, "search", {"query": last.title}, is_folder=True, info_labels=info_labels, art=art)
xbmcplugin.endOfDirectory(handle)
xbmcplugin.endOfDirectory(handle, cacheToDisc=False)
# ---------------------------------------------------------------------------

View File

@@ -57,7 +57,6 @@ else: # pragma: no cover
SETTING_BASE_URL = "serienstream_base_url"
SETTING_CATALOG_SEARCH = "serienstream_catalog_search"
DEFAULT_BASE_URL = "https://s.to"
DEFAULT_PREFERRED_HOSTERS = ["voe"]
DEFAULT_TIMEOUT = 20
@@ -80,10 +79,7 @@ HEADERS = {
SESSION_CACHE_TTL_SECONDS = 300
SESSION_CACHE_PREFIX = "viewit.serienstream"
SESSION_CACHE_MAX_TITLE_URLS = 800
CATALOG_SEARCH_TTL_SECONDS = 600
CATALOG_SEARCH_CACHE_KEY = "catalog_index"
GENRE_LIST_PAGE_SIZE = 20
_CATALOG_INDEX_MEMORY: tuple[float, list["SeriesResult"]] = (0.0, [])
ProgressCallback = Optional[Callable[[str, int | None], Any]]
@@ -575,8 +571,8 @@ def _search_series_server(query: str) -> list[SeriesResult]:
if not query:
return []
base = _get_base_url()
search_url = f"{base}/search?q={quote(query)}"
alt_url = f"{base}/suche?q={quote(query)}"
search_url = f"{base}/suche?term={quote(query)}"
alt_url = f"{base}/search?term={quote(query)}"
for url in (search_url, alt_url):
try:
body = _get_html_simple(url)
@@ -606,158 +602,30 @@ def _search_series_server(query: str) -> list[SeriesResult]:
continue
seen_urls.add(url_abs)
results.append(SeriesResult(title=title, description="", url=url_abs))
filtered = [r for r in results if _matches_query(query, title=r.title)]
if filtered:
return filtered
if results:
return results
api_results = _search_series_api(query)
if api_results:
return api_results
return []
def _extract_catalog_index_from_html(body: str, *, progress_callback: ProgressCallback = None) -> list[SeriesResult]:
items: list[SeriesResult] = []
if not body:
return items
seen_urls: set[str] = set()
item_re = re.compile(
r"<li[^>]*class=[\"'][^\"']*series-item[^\"']*[\"'][^>]*>(.*?)</li>",
re.IGNORECASE | re.DOTALL,
)
anchor_re = re.compile(r"<a[^>]+href=[\"']([^\"']+)[\"'][^>]*>(.*?)</a>", re.IGNORECASE | re.DOTALL)
data_search_re = re.compile(r"data-search=[\"']([^\"']*)[\"']", re.IGNORECASE)
for idx, match in enumerate(item_re.finditer(body), start=1):
if idx == 1 or idx % 200 == 0:
_emit_progress(progress_callback, f"Katalog parsen {idx}", 62)
block = match.group(0)
inner = match.group(1) or ""
anchor_match = anchor_re.search(inner)
if not anchor_match:
continue
href = (anchor_match.group(1) or "").strip()
url = _absolute_url(href)
if not url or "/serie/" not in url or "/staffel-" in url or "/episode-" in url:
continue
if url in seen_urls:
continue
seen_urls.add(url)
title_raw = anchor_match.group(2) or ""
title = unescape(re.sub(r"\s+", " ", _strip_tags(title_raw))).strip()
if not title:
continue
search_match = data_search_re.search(block)
description = (search_match.group(1) or "").strip() if search_match else ""
items.append(SeriesResult(title=title, description=description, url=url))
return items
def _catalog_index_from_soup(soup: BeautifulSoupT) -> list[SeriesResult]:
items: list[SeriesResult] = []
if not soup:
return items
seen_urls: set[str] = set()
for item in soup.select("li.series-item"):
anchor = item.find("a", href=True)
if not anchor:
continue
href = (anchor.get("href") or "").strip()
url = _absolute_url(href)
if not url or "/serie/" not in url or "/staffel-" in url or "/episode-" in url:
continue
if url in seen_urls:
continue
seen_urls.add(url)
title = (anchor.get_text(" ", strip=True) or "").strip()
if not title:
continue
description = (item.get("data-search") or "").strip()
items.append(SeriesResult(title=title, description=description, url=url))
return items
def _load_catalog_index_from_cache() -> Optional[list[SeriesResult]]:
global _CATALOG_INDEX_MEMORY
expires_at, cached = _CATALOG_INDEX_MEMORY
if cached and expires_at > time.time():
return list(cached)
raw = _session_cache_get(CATALOG_SEARCH_CACHE_KEY)
if not isinstance(raw, list):
return None
items: list[SeriesResult] = []
for entry in raw:
if not isinstance(entry, list) or len(entry) < 2:
continue
title = str(entry[0] or "").strip()
url = str(entry[1] or "").strip()
description = str(entry[2] or "") if len(entry) > 2 else ""
cover = str(entry[3] or "").strip() if len(entry) > 3 else ""
if title and url:
items.append(SeriesResult(title=title, description=description, url=url, cover=cover))
if items:
_CATALOG_INDEX_MEMORY = (time.time() + CATALOG_SEARCH_TTL_SECONDS, list(items))
return items or None
def _store_catalog_index_in_cache(items: list[SeriesResult]) -> None:
global _CATALOG_INDEX_MEMORY
if not items:
return
_CATALOG_INDEX_MEMORY = (time.time() + CATALOG_SEARCH_TTL_SECONDS, list(items))
payload: list[list[str]] = []
for entry in items:
if not entry.title or not entry.url:
continue
payload.append([entry.title, entry.url, entry.description, entry.cover])
_session_cache_set(CATALOG_SEARCH_CACHE_KEY, payload, ttl_seconds=CATALOG_SEARCH_TTL_SECONDS)
def search_series(query: str, *, progress_callback: ProgressCallback = None) -> list[SeriesResult]:
"""Sucht Serien. Katalog-Suche (vollstaendig) oder API-Suche (max 10) je nach Setting."""
"""Sucht Serien. Server-Suche (/suche?term=) zuerst, API als Fallback."""
_ensure_requests()
if not _normalize_search_text(query):
return []
use_catalog = _get_setting_bool(SETTING_CATALOG_SEARCH, default=True)
if use_catalog:
_emit_progress(progress_callback, "Pruefe Such-Cache", 15)
cached = _load_catalog_index_from_cache()
if cached is not None:
matched_from_cache = [entry for entry in cached if entry.title and _matches_query(query, title=entry.title)]
_emit_progress(progress_callback, f"Cache-Treffer: {len(cached)}", 35)
if matched_from_cache:
return matched_from_cache
_emit_progress(progress_callback, "Lade Katalogseite", 42)
catalog_url = f"{_get_base_url()}/serien?by=genre"
items: list[SeriesResult] = []
try:
soup = _get_soup_simple(catalog_url)
items = _catalog_index_from_soup(soup)
except Exception:
body = _get_html_simple(catalog_url)
items = _extract_catalog_index_from_html(body, progress_callback=progress_callback)
if not items:
_emit_progress(progress_callback, "Fallback-Parser", 58)
soup = BeautifulSoup(body, "html.parser")
items = _catalog_index_from_soup(soup)
if items:
_store_catalog_index_in_cache(items)
_emit_progress(progress_callback, f"Filtere Treffer ({len(items)})", 70)
return [entry for entry in items if entry.title and _matches_query(query, title=entry.title)]
# API-Suche (primaer wenn Katalog deaktiviert, Fallback wenn Katalog leer)
_emit_progress(progress_callback, "API-Suche", 60)
api_results = _search_series_api(query)
if api_results:
_emit_progress(progress_callback, f"API-Treffer: {len(api_results)}", 80)
return api_results
_emit_progress(progress_callback, "Server-Suche", 85)
# 1. Server-Suche (schnell, vollstaendig, direkte HTML-Suche)
_emit_progress(progress_callback, "Suche", 20)
server_results = _search_series_server(query)
if server_results:
_emit_progress(progress_callback, f"Server-Treffer: {len(server_results)}", 95)
return [entry for entry in server_results if entry.title and _matches_query(query, title=entry.title)]
return []
return server_results
# 2. API-Suche (Fallback, max 10 Ergebnisse)
_emit_progress(progress_callback, "API-Suche", 60)
return _search_series_api(query)
def parse_series_catalog(soup: BeautifulSoupT) -> dict[str, list[SeriesResult]]:
@@ -1252,7 +1120,7 @@ class SerienstreamPlugin(BasisPlugin):
except Exception:
continue
url = str(item.get("url") or "").strip()
if number <= 0 or not url:
if number < 0 or not url:
continue
seasons.append(SeasonInfo(number=number, url=url, episodes=[]))
if not seasons:
@@ -1794,6 +1662,8 @@ class SerienstreamPlugin(BasisPlugin):
@staticmethod
def _season_label(number: int) -> str:
if number == 0:
return "Filme"
return f"Staffel {number}"
@staticmethod
@@ -1808,6 +1678,8 @@ class SerienstreamPlugin(BasisPlugin):
@staticmethod
def _parse_season_number(label: str) -> int | None:
if (label or "").strip().casefold() == "filme":
return 0
digits = "".join(ch for ch in label if ch.isdigit())
if not digits:
return None

View File

@@ -2,8 +2,7 @@
<settings>
<category label="Quellen">
<setting id="serienstream_base_url" type="text" label="SerienStream Basis-URL" default="https://s.to" />
<setting id="serienstream_catalog_search" type="bool" label="SerienStream: Katalog-Suche (mehr Ergebnisse, langsamer)" default="true" />
<setting id="aniworld_base_url" type="text" label="AniWorld Basis-URL" default="https://aniworld.to" />
<setting id="aniworld_base_url" type="text" label="AniWorld Basis-URL" default="https://aniworld.to" />
<setting id="topstream_base_url" type="text" label="TopStream Basis-URL" default="https://topstreamfilm.live" />
<setting id="einschalten_base_url" type="text" label="Einschalten Basis-URL" default="https://einschalten.in" />
<setting id="filmpalast_base_url" type="text" label="Filmpalast Basis-URL" default="https://filmpalast.to" />