Standardize plugin base URLs

This commit is contained in:
2026-02-01 18:25:22 +01:00
parent 4e0b0ffd1a
commit cd2e8e2b15
4 changed files with 86 additions and 36 deletions

View File

@@ -12,6 +12,7 @@ from contextlib import contextmanager
from datetime import datetime from datetime import datetime
import importlib.util import importlib.util
import inspect import inspect
import json
import os import os
import re import re
import sys import sys

View File

@@ -29,7 +29,7 @@ except ImportError: # pragma: no cover - allow running outside Kodi
xbmcaddon = None xbmcaddon = None
from plugin_interface import BasisPlugin from plugin_interface import BasisPlugin
from plugin_helpers import dump_response_html, get_setting_bool, log_url, notify_url from plugin_helpers import dump_response_html, get_setting_bool, get_setting_string, log_url, notify_url
from http_session_pool import get_requests_session from http_session_pool import get_requests_session
from regex_patterns import DIGITS, SEASON_EPISODE_TAG, SEASON_EPISODE_URL, STAFFEL_NUM_IN_URL from regex_patterns import DIGITS, SEASON_EPISODE_TAG, SEASON_EPISODE_URL, STAFFEL_NUM_IN_URL
@@ -41,13 +41,8 @@ else: # pragma: no cover
BeautifulSoupT: TypeAlias = Any BeautifulSoupT: TypeAlias = Any
BASE_URL = "https://aniworld.to" SETTING_BASE_URL = "aniworld_base_url"
ANIME_BASE_URL = f"{BASE_URL}/anime/stream" DEFAULT_BASE_URL = "https://aniworld.to"
POPULAR_ANIMES_URL = f"{BASE_URL}/beliebte-animes"
GENRES_URL = f"{BASE_URL}/animes"
LATEST_EPISODES_URL = f"{BASE_URL}/neue-episoden"
SEARCH_URL = f"{BASE_URL}/search?q={{query}}"
SEARCH_API_URL = f"{BASE_URL}/ajax/search"
DEFAULT_PREFERRED_HOSTERS = ["voe"] DEFAULT_PREFERRED_HOSTERS = ["voe"]
DEFAULT_TIMEOUT = 20 DEFAULT_TIMEOUT = 20
ADDON_ID = "plugin.video.viewit" ADDON_ID = "plugin.video.viewit"
@@ -93,8 +88,39 @@ class SeasonInfo:
episodes: List[EpisodeInfo] episodes: List[EpisodeInfo]
def _get_base_url() -> str:
base = get_setting_string(ADDON_ID, SETTING_BASE_URL, default=DEFAULT_BASE_URL).strip()
if not base:
base = DEFAULT_BASE_URL
return base.rstrip("/")
def _anime_base_url() -> str:
return f"{_get_base_url()}/anime/stream"
def _popular_animes_url() -> str:
return f"{_get_base_url()}/beliebte-animes"
def _genres_url() -> str:
return f"{_get_base_url()}/animes"
def _latest_episodes_url() -> str:
return f"{_get_base_url()}/neue-episoden"
def _search_url(query: str) -> str:
return f"{_get_base_url()}/search?q={query}"
def _search_api_url() -> str:
return f"{_get_base_url()}/ajax/search"
def _absolute_url(href: str) -> str: def _absolute_url(href: str) -> str:
return f"{BASE_URL}{href}" if href.startswith("/") else href return f"{_get_base_url()}{href}" if href.startswith("/") else href
def _log_url(url: str, *, kind: str = "VISIT") -> None: def _log_url(url: str, *, kind: str = "VISIT") -> None:
@@ -360,7 +386,7 @@ def scrape_anime_detail(anime_identifier: str, max_seasons: Optional[int] = None
_log_url(anime_url, kind="ANIME") _log_url(anime_url, kind="ANIME")
session = get_requests_session("aniworld", headers=HEADERS) session = get_requests_session("aniworld", headers=HEADERS)
try: try:
_get_soup(BASE_URL, session=session) _get_soup(_get_base_url(), session=session)
except Exception: except Exception:
pass pass
soup = _get_soup(anime_url, session=session) soup = _get_soup(anime_url, session=session)
@@ -394,7 +420,7 @@ def resolve_redirect(target_url: str) -> Optional[str]:
normalized_url = _absolute_url(target_url) normalized_url = _absolute_url(target_url)
_log_visit(normalized_url) _log_visit(normalized_url)
session = get_requests_session("aniworld", headers=HEADERS) session = get_requests_session("aniworld", headers=HEADERS)
_get_soup(BASE_URL, session=session) _get_soup(_get_base_url(), session=session)
response = session.get(normalized_url, headers=HEADERS, timeout=DEFAULT_TIMEOUT, allow_redirects=True) response = session.get(normalized_url, headers=HEADERS, timeout=DEFAULT_TIMEOUT, allow_redirects=True)
if response.url: if response.url:
_log_url(response.url, kind="RESOLVED") _log_url(response.url, kind="RESOLVED")
@@ -405,7 +431,7 @@ def fetch_episode_hoster_names(episode_url: str) -> List[str]:
_ensure_requests() _ensure_requests()
normalized_url = _absolute_url(episode_url) normalized_url = _absolute_url(episode_url)
session = get_requests_session("aniworld", headers=HEADERS) session = get_requests_session("aniworld", headers=HEADERS)
_get_soup(BASE_URL, session=session) _get_soup(_get_base_url(), session=session)
soup = _get_soup(normalized_url, session=session) soup = _get_soup(normalized_url, session=session)
names: List[str] = [] names: List[str] = []
seen: set[str] = set() seen: set[str] = set()
@@ -440,7 +466,7 @@ def fetch_episode_stream_link(
normalized_url = _absolute_url(episode_url) normalized_url = _absolute_url(episode_url)
preferred = [hoster.lower() for hoster in (preferred_hosters or DEFAULT_PREFERRED_HOSTERS)] preferred = [hoster.lower() for hoster in (preferred_hosters or DEFAULT_PREFERRED_HOSTERS)]
session = get_requests_session("aniworld", headers=HEADERS) session = get_requests_session("aniworld", headers=HEADERS)
_get_soup(BASE_URL, session=session) _get_soup(_get_base_url(), session=session)
soup = _get_soup(normalized_url, session=session) soup = _get_soup(normalized_url, session=session)
candidates: List[Tuple[str, str]] = [] candidates: List[Tuple[str, str]] = []
for anchor in soup.select(".hosterSiteVideo a.watchEpisode"): for anchor in soup.select(".hosterSiteVideo a.watchEpisode"):
@@ -476,10 +502,10 @@ def search_animes(query: str) -> List[SeriesResult]:
return [] return []
session = get_requests_session("aniworld", headers=HEADERS) session = get_requests_session("aniworld", headers=HEADERS)
try: try:
session.get(BASE_URL, headers=HEADERS, timeout=DEFAULT_TIMEOUT) session.get(_get_base_url(), headers=HEADERS, timeout=DEFAULT_TIMEOUT)
except Exception: except Exception:
pass pass
data = _post_json(SEARCH_API_URL, payload={"keyword": query}, session=session) data = _post_json(_search_api_url(), payload={"keyword": query}, session=session)
results: List[SeriesResult] = [] results: List[SeriesResult] = []
seen: set[str] = set() seen: set[str] = set()
if isinstance(data, list): if isinstance(data, list):
@@ -507,7 +533,7 @@ def search_animes(query: str) -> List[SeriesResult]:
results.append(SeriesResult(title=title, description=description, url=url)) results.append(SeriesResult(title=title, description=description, url=url))
return results return results
soup = _get_soup_simple(SEARCH_URL.format(query=requests.utils.quote(query))) soup = _get_soup_simple(_search_url(requests.utils.quote(query)))
for anchor in soup.select("a[href^='/anime/stream/'][href]"): for anchor in soup.select("a[href^='/anime/stream/'][href]"):
href = (anchor.get("href") or "").strip() href = (anchor.get("href") or "").strip()
if not href or "/staffel-" in href or "/episode-" in href: if not href or "/staffel-" in href or "/episode-" in href:
@@ -600,7 +626,7 @@ class AniworldPlugin(BasisPlugin):
def _ensure_popular(self) -> List[SeriesResult]: def _ensure_popular(self) -> List[SeriesResult]:
if self._popular_cache is not None: if self._popular_cache is not None:
return list(self._popular_cache) return list(self._popular_cache)
soup = _get_soup_simple(POPULAR_ANIMES_URL) soup = _get_soup_simple(_popular_animes_url())
results: List[SeriesResult] = [] results: List[SeriesResult] = []
seen: set[str] = set() seen: set[str] = set()
for anchor in soup.select("div.seriesListContainer a[href^='/anime/stream/']"): for anchor in soup.select("div.seriesListContainer a[href^='/anime/stream/']"):
@@ -646,7 +672,7 @@ class AniworldPlugin(BasisPlugin):
if cached is not None: if cached is not None:
return list(cached) return list(cached)
url = LATEST_EPISODES_URL url = _latest_episodes_url()
if page > 1: if page > 1:
url = f"{url}?page={page}" url = f"{url}?page={page}"
@@ -658,7 +684,7 @@ class AniworldPlugin(BasisPlugin):
def _ensure_genres(self) -> Dict[str, List[SeriesResult]]: def _ensure_genres(self) -> Dict[str, List[SeriesResult]]:
if self._genre_cache is not None: if self._genre_cache is not None:
return {key: list(value) for key, value in self._genre_cache.items()} return {key: list(value) for key, value in self._genre_cache.items()}
soup = _get_soup_simple(GENRES_URL) soup = _get_soup_simple(_genres_url())
results: Dict[str, List[SeriesResult]] = {} results: Dict[str, List[SeriesResult]] = {}
genre_blocks = soup.select("#seriesContainer div.genre") genre_blocks = soup.select("#seriesContainer div.genre")
if not genre_blocks: if not genre_blocks:

View File

@@ -37,7 +37,7 @@ except ImportError: # pragma: no cover - allow running outside Kodi
xbmcgui = None xbmcgui = None
from plugin_interface import BasisPlugin from plugin_interface import BasisPlugin
from plugin_helpers import dump_response_html, get_setting_bool, log_url, notify_url from plugin_helpers import dump_response_html, get_setting_bool, get_setting_string, log_url, notify_url
from http_session_pool import get_requests_session from http_session_pool import get_requests_session
from regex_patterns import SEASON_EPISODE_TAG, SEASON_EPISODE_URL from regex_patterns import SEASON_EPISODE_TAG, SEASON_EPISODE_URL
@@ -49,10 +49,8 @@ else: # pragma: no cover
BeautifulSoupT: TypeAlias = Any BeautifulSoupT: TypeAlias = Any
BASE_URL = "https://s.to" SETTING_BASE_URL = "serienstream_base_url"
SERIES_BASE_URL = f"{BASE_URL}/serie/stream" DEFAULT_BASE_URL = "https://s.to"
POPULAR_SERIES_URL = f"{BASE_URL}/beliebte-serien"
LATEST_EPISODES_URL = f"{BASE_URL}"
DEFAULT_PREFERRED_HOSTERS = ["voe"] DEFAULT_PREFERRED_HOSTERS = ["voe"]
DEFAULT_TIMEOUT = 20 DEFAULT_TIMEOUT = 20
ADDON_ID = "plugin.video.viewit" ADDON_ID = "plugin.video.viewit"
@@ -101,15 +99,34 @@ class SeasonInfo:
episodes: List[EpisodeInfo] episodes: List[EpisodeInfo]
def _get_base_url() -> str:
base = get_setting_string(ADDON_ID, SETTING_BASE_URL, default=DEFAULT_BASE_URL).strip()
if not base:
base = DEFAULT_BASE_URL
return base.rstrip("/")
def _series_base_url() -> str:
return f"{_get_base_url()}/serie/stream"
def _popular_series_url() -> str:
return f"{_get_base_url()}/beliebte-serien"
def _latest_episodes_url() -> str:
return f"{_get_base_url()}"
def _absolute_url(href: str) -> str: def _absolute_url(href: str) -> str:
return f"{BASE_URL}{href}" if href.startswith("/") else href return f"{_get_base_url()}{href}" if href.startswith("/") else href
def _normalize_series_url(identifier: str) -> str: def _normalize_series_url(identifier: str) -> str:
if identifier.startswith("http://") or identifier.startswith("https://"): if identifier.startswith("http://") or identifier.startswith("https://"):
return identifier.rstrip("/") return identifier.rstrip("/")
slug = identifier.strip("/") slug = identifier.strip("/")
return f"{SERIES_BASE_URL}/{slug}" return f"{_series_base_url()}/{slug}"
def _series_root_url(url: str) -> str: def _series_root_url(url: str) -> str:
@@ -227,7 +244,7 @@ def search_series(query: str) -> List[SeriesResult]:
if not normalized_query: if not normalized_query:
return [] return []
# Direkter Abruf wie in fetch_serien.py. # Direkter Abruf wie in fetch_serien.py.
catalog_url = f"{BASE_URL}/serien?by=genre" catalog_url = f"{_get_base_url()}/serien?by=genre"
soup = _get_soup_simple(catalog_url) soup = _get_soup_simple(catalog_url)
results: List[SeriesResult] = [] results: List[SeriesResult] = []
for series in parse_series_catalog(soup).values(): for series in parse_series_catalog(soup).values():
@@ -424,7 +441,7 @@ def fetch_episode_stream_link(
session = get_requests_session("serienstream", headers=HEADERS) session = get_requests_session("serienstream", headers=HEADERS)
# Preflight optional: Startseite kann 5xx liefern, Zielseite aber funktionieren. # Preflight optional: Startseite kann 5xx liefern, Zielseite aber funktionieren.
try: try:
_get_soup(BASE_URL, session=session) _get_soup(_get_base_url(), session=session)
except Exception: except Exception:
pass pass
soup = _get_soup(normalized_url, session=session) soup = _get_soup(normalized_url, session=session)
@@ -453,7 +470,7 @@ def fetch_episode_hoster_names(episode_url: str) -> List[str]:
session = get_requests_session("serienstream", headers=HEADERS) session = get_requests_session("serienstream", headers=HEADERS)
# Preflight optional: Startseite kann 5xx liefern, Zielseite aber funktionieren. # Preflight optional: Startseite kann 5xx liefern, Zielseite aber funktionieren.
try: try:
_get_soup(BASE_URL, session=session) _get_soup(_get_base_url(), session=session)
except Exception: except Exception:
pass pass
soup = _get_soup(normalized_url, session=session) soup = _get_soup(normalized_url, session=session)
@@ -546,7 +563,7 @@ def resolve_redirect(target_url: str) -> Optional[str]:
session = get_requests_session("serienstream", headers=HEADERS) session = get_requests_session("serienstream", headers=HEADERS)
# Preflight optional: Startseite kann 5xx liefern, Zielseite aber funktionieren. # Preflight optional: Startseite kann 5xx liefern, Zielseite aber funktionieren.
try: try:
_get_soup(BASE_URL, session=session) _get_soup(_get_base_url(), session=session)
except Exception: except Exception:
pass pass
response = session.get( response = session.get(
@@ -571,7 +588,7 @@ def scrape_series_detail(
session = get_requests_session("serienstream", headers=HEADERS) session = get_requests_session("serienstream", headers=HEADERS)
# Preflight ist optional; manche Umgebungen/Provider leiten die Startseite um. # Preflight ist optional; manche Umgebungen/Provider leiten die Startseite um.
try: try:
_get_soup(BASE_URL, session=session) _get_soup(_get_base_url(), session=session)
except Exception: except Exception:
pass pass
soup = _get_soup(series_url, session=session) soup = _get_soup(series_url, session=session)
@@ -636,7 +653,7 @@ class SerienstreamPlugin(BasisPlugin):
if self._catalog_cache is not None: if self._catalog_cache is not None:
return self._catalog_cache return self._catalog_cache
# Stand: 2026-01 liefert `?by=genre` konsistente Gruppen für `genres()`. # Stand: 2026-01 liefert `?by=genre` konsistente Gruppen für `genres()`.
catalog_url = f"{BASE_URL}/serien?by=genre" catalog_url = f"{_get_base_url()}/serien?by=genre"
soup = _get_soup_simple(catalog_url) soup = _get_soup_simple(catalog_url)
self._catalog_cache = parse_series_catalog(soup) self._catalog_cache = parse_series_catalog(soup)
return self._catalog_cache return self._catalog_cache
@@ -678,7 +695,7 @@ class SerienstreamPlugin(BasisPlugin):
"""Laedt und cached die Liste der beliebten Serien aus `/beliebte-serien`.""" """Laedt und cached die Liste der beliebten Serien aus `/beliebte-serien`."""
if self._popular_cache is not None: if self._popular_cache is not None:
return list(self._popular_cache) return list(self._popular_cache)
soup = _get_soup_simple(POPULAR_SERIES_URL) soup = _get_soup_simple(_popular_series_url())
results: List[SeriesResult] = [] results: List[SeriesResult] = []
seen: set[str] = set() seen: set[str] = set()
@@ -894,7 +911,7 @@ class SerienstreamPlugin(BasisPlugin):
if cached is not None: if cached is not None:
return list(cached) return list(cached)
url = LATEST_EPISODES_URL url = _latest_episodes_url()
if page > 1: if page > 1:
url = f"{url}?page={page}" url = f"{url}?page={page}"
soup = _get_soup_simple(url) soup = _get_soup_simple(url)

View File

@@ -6,11 +6,17 @@
<setting id="debug_show_url_info" type="bool" label="Debug: Aufgerufene URL anzeigen (global)" default="false" /> <setting id="debug_show_url_info" type="bool" label="Debug: Aufgerufene URL anzeigen (global)" default="false" />
</category> </category>
<category label="TopStream"> <category label="TopStream">
<setting id="topstream_base_url" type="text" label="Basis-URL (z.B. https://www.meineseite)" default="https://www.meineseite" /> <setting id="topstream_base_url" type="text" label="Domain (BASE_URL)" default="https://topstreamfilm.live" />
<setting id="topstream_genre_max_pages" type="number" label="Genres: max. Seiten laden (Pagination)" default="20" /> <setting id="topstream_genre_max_pages" type="number" label="Genres: max. Seiten laden (Pagination)" default="20" />
</category> </category>
<category label="SerienStream">
<setting id="serienstream_base_url" type="text" label="Domain (BASE_URL)" default="https://s.to" />
</category>
<category label="AniWorld">
<setting id="aniworld_base_url" type="text" label="Domain (BASE_URL)" default="https://aniworld.to" />
</category>
<category label="Einschalten"> <category label="Einschalten">
<setting id="einschalten_base_url" type="text" label="Basis-URL (nur eigene/autorisiert betriebene Quelle)" default="" /> <setting id="einschalten_base_url" type="text" label="Domain (BASE_URL)" default="https://einschalten.in" />
<setting id="einschalten_index_path" type="text" label="Index-Pfad (z.B. /)" default="/" /> <setting id="einschalten_index_path" type="text" label="Index-Pfad (z.B. /)" default="/" />
<setting id="einschalten_new_titles_path" type="text" label="Neue-Titel-Pfad (z.B. /movies/new)" default="/movies/new" /> <setting id="einschalten_new_titles_path" type="text" label="Neue-Titel-Pfad (z.B. /movies/new)" default="/movies/new" />
<setting id="einschalten_search_path" type="text" label="Suche-Pfad (z.B. /search)" default="/search" /> <setting id="einschalten_search_path" type="text" label="Suche-Pfad (z.B. /search)" default="/search" />