dev: bump to 0.1.71-dev – neue Plugins (Moflix, KKiste, HDFilme, Netzkino), SerienStream A-Z, VidHide-Fix

This commit is contained in:
2026-03-04 22:29:49 +01:00
parent ff30548811
commit 58da715723
7 changed files with 2460 additions and 3 deletions

View File

@@ -1138,6 +1138,8 @@ class SerienstreamPlugin(BasisPlugin):
self._genre_page_entries_cache: dict[tuple[str, int], list[SeriesResult]] = {}
self._genre_page_has_more_cache: dict[tuple[str, int], bool] = {}
self._popular_cache: Optional[list[SeriesResult]] = None
self._alpha_letters_cache: Optional[list[str]] = None
self._alpha_page_count_cache: dict[str, int] = {}
self._requests_available = REQUESTS_AVAILABLE
self._default_preferred_hosters: list[str] = list(DEFAULT_PREFERRED_HOSTERS)
self._preferred_hosters: list[str] = list(self._default_preferred_hosters)
@@ -1370,7 +1372,7 @@ class SerienstreamPlugin(BasisPlugin):
def capabilities(self) -> set[str]:
"""Meldet unterstützte Features für Router-Menüs."""
return {"popular_series", "genres", "latest_episodes"}
return {"popular_series", "genres", "latest_episodes", "alpha"}
def popular_series(self) -> list[str]:
"""Liefert die Titel der beliebten Serien (Quelle: `/beliebte-serien`)."""
@@ -1396,6 +1398,70 @@ class SerienstreamPlugin(BasisPlugin):
self._remember_series_result(entry.title, entry.url, entry.description)
return [entry.title for entry in entries if entry.title]
def alpha_index(self) -> list[str]:
"""Liefert alle Buchstaben aus dem A-Z-Katalog (/serien)."""
if not self._requests_available:
return []
if self._alpha_letters_cache is not None:
return list(self._alpha_letters_cache)
try:
soup = _get_soup(_get_base_url() +"/serien")
except Exception:
return []
letters = []
for a in soup.select("nav.alphabet-bar a.alphabet-link[href]"):
letter = a.get_text(strip=True)
if letter:
letters.append(letter)
self._alpha_letters_cache = letters
return list(letters)
def alpha_page_count(self, letter: str) -> int:
"""Gibt die Anzahl der Seiten für einen Buchstaben zurück."""
letter = (letter or "").strip()
if not letter:
return 1
if letter in self._alpha_page_count_cache:
return self._alpha_page_count_cache[letter]
try:
soup = _get_soup(_get_base_url() +f"/katalog/{letter}")
except Exception:
return 1
page_nums = []
for a in soup.select(".pagination a[href]"):
m = re.search(r"page=(\d+)", a.get("href", ""))
if m:
page_nums.append(int(m.group(1)))
count = max(page_nums) if page_nums else 1
self._alpha_page_count_cache[letter] = count
return count
def titles_for_alpha_page(self, letter: str, page: int = 1) -> list[str]:
"""Liefert Serientitel für den angegebenen Buchstaben und Seitenindex."""
letter = (letter or "").strip()
if not letter or not self._requests_available:
return []
page = max(1, int(page or 1))
url = _get_base_url() +f"/katalog/{letter}"
if page > 1:
url = url + f"?page={page}"
try:
soup = _get_soup(url)
except Exception:
return []
seen: set[str] = set()
titles: list[str] = []
for a in soup.select("a[href*='/serie/']"):
href = (a.get("href") or "").strip()
title = a.get_text(strip=True)
if not href or not title or href in seen:
continue
seen.add(href)
full_url = href if href.startswith("http") else _get_base_url() +href
self._remember_series_result(title, full_url, "")
titles.append(title)
return titles
@staticmethod
def _title_group_key(title: str) -> str:
raw = (title or "").strip()