Compare commits

...

5 Commits

Author SHA1 Message Date
fe79cca818 Add TMDB master toggle and filter upcoming episodes 2026-02-01 20:05:28 +01:00
4d74755e20 Filter TBA episodes 2026-02-01 19:53:57 +01:00
9df80240c4 Improve logging and docs 2026-02-01 19:45:51 +01:00
da83ed02be Simplify plugin settings and names 2026-02-01 19:05:08 +01:00
cd2e8e2b15 Standardize plugin base URLs 2026-02-01 18:25:22 +01:00
11 changed files with 598 additions and 112 deletions

View File

@@ -1,5 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<addon id="plugin.video.viewit" name="ViewIt" version="0.1.46" provider-name="ViewIt"> <addon id="plugin.video.viewit" name="ViewIt" version="0.1.47" provider-name="ViewIt">
<requires> <requires>
<import addon="xbmc.python" version="3.0.0" /> <import addon="xbmc.python" version="3.0.0" />
<import addon="script.module.requests" /> <import addon="script.module.requests" />

View File

@@ -12,6 +12,7 @@ from contextlib import contextmanager
from datetime import datetime from datetime import datetime
import importlib.util import importlib.util
import inspect import inspect
import json
import os import os
import re import re
import sys import sys
@@ -112,6 +113,10 @@ def _tmdb_prefetch_concurrency() -> int:
return max(1, min(20, value)) return max(1, min(20, value))
def _tmdb_enabled() -> bool:
return _get_setting_bool("tmdb_enabled", default=True)
def _log(message: str, level: int = xbmc.LOGINFO) -> None: def _log(message: str, level: int = xbmc.LOGINFO) -> None:
xbmc.log(f"[ViewIt] {message}", level) xbmc.log(f"[ViewIt] {message}", level)
@@ -547,6 +552,8 @@ def _tmdb_file_log(message: str) -> None:
def _tmdb_labels_and_art(title: str) -> tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]: def _tmdb_labels_and_art(title: str) -> tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]:
if not _tmdb_enabled():
return {}, {}, []
title_key = (title or "").strip().casefold() title_key = (title or "").strip().casefold()
language = _get_setting_string("tmdb_language").strip() or "de-DE" language = _get_setting_string("tmdb_language").strip() or "de-DE"
show_plot = _get_setting_bool("tmdb_show_plot", default=True) show_plot = _get_setting_bool("tmdb_show_plot", default=True)
@@ -685,10 +692,14 @@ async def _tmdb_labels_and_art_bulk_async(
def _tmdb_labels_and_art_bulk( def _tmdb_labels_and_art_bulk(
titles: list[str], titles: list[str],
) -> dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]]: ) -> dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]]:
if not _tmdb_enabled():
return {}
return _run_async(_tmdb_labels_and_art_bulk_async(titles)) return _run_async(_tmdb_labels_and_art_bulk_async(titles))
def _tmdb_episode_labels_and_art(*, title: str, season_label: str, episode_label: str) -> tuple[dict[str, str], dict[str, str]]: def _tmdb_episode_labels_and_art(*, title: str, season_label: str, episode_label: str) -> tuple[dict[str, str], dict[str, str]]:
if not _tmdb_enabled():
return {"title": episode_label}, {}
title_key = (title or "").strip().casefold() title_key = (title or "").strip().casefold()
tmdb_id = _TMDB_ID_CACHE.get(title_key) tmdb_id = _TMDB_ID_CACHE.get(title_key)
if not tmdb_id: if not tmdb_id:
@@ -747,6 +758,8 @@ def _tmdb_episode_labels_and_art(*, title: str, season_label: str, episode_label
def _tmdb_episode_cast(*, title: str, season_label: str, episode_label: str) -> list[TmdbCastMember]: def _tmdb_episode_cast(*, title: str, season_label: str, episode_label: str) -> list[TmdbCastMember]:
if not _tmdb_enabled():
return []
show_episode_cast = _get_setting_bool("tmdb_show_episode_cast", default=False) show_episode_cast = _get_setting_bool("tmdb_show_episode_cast", default=False)
if not show_episode_cast: if not show_episode_cast:
return [] return []

View File

@@ -54,10 +54,39 @@ def get_setting_bool(addon_id: str, setting_id: str, *, default: bool = False) -
return default return default
def notify_url(addon_id: str, *, heading: str, url: str, enabled_setting_id: str) -> None: def get_setting_int(addon_id: str, setting_id: str, *, default: int = 0) -> int:
if xbmcaddon is None:
return default
try:
addon = xbmcaddon.Addon(addon_id)
getter = getattr(addon, "getSettingInt", None)
if getter is not None:
return int(getter(setting_id))
raw = addon.getSetting(setting_id)
return int(str(raw).strip())
except Exception:
return default
def _is_logging_enabled(addon_id: str, *, global_setting_id: str, plugin_setting_id: Optional[str]) -> bool:
if not get_setting_bool(addon_id, global_setting_id, default=False):
return False
if plugin_setting_id:
return get_setting_bool(addon_id, plugin_setting_id, default=False)
return True
def notify_url(
addon_id: str,
*,
heading: str,
url: str,
enabled_setting_id: str,
plugin_setting_id: Optional[str] = None,
) -> None:
if xbmcgui is None: if xbmcgui is None:
return return
if not get_setting_bool(addon_id, enabled_setting_id, default=False): if not _is_logging_enabled(addon_id, global_setting_id=enabled_setting_id, plugin_setting_id=plugin_setting_id):
return return
try: try:
xbmcgui.Dialog().notification(heading, url, xbmcgui.NOTIFICATION_INFO, 3000) xbmcgui.Dialog().notification(heading, url, xbmcgui.NOTIFICATION_INFO, 3000)
@@ -96,16 +125,92 @@ def _append_text_file(path: str, content: str) -> None:
return return
def log_url(addon_id: str, *, enabled_setting_id: str, log_filename: str, url: str, kind: str = "VISIT") -> None: def _rotate_log_file(path: str, *, max_bytes: int, max_files: int) -> None:
if not get_setting_bool(addon_id, enabled_setting_id, default=False): if max_bytes <= 0 or max_files <= 0:
return
try:
if not os.path.exists(path) or os.path.getsize(path) <= max_bytes:
return
except Exception:
return
try:
for index in range(max_files - 1, 0, -1):
older = f"{path}.{index}"
newer = f"{path}.{index + 1}"
if os.path.exists(older):
if index + 1 > max_files:
os.remove(older)
else:
os.replace(older, newer)
os.replace(path, f"{path}.1")
except Exception:
return
def _prune_dump_files(directory: str, *, prefix: str, max_files: int) -> None:
if not directory or max_files <= 0:
return
try:
entries = [
os.path.join(directory, name)
for name in os.listdir(directory)
if name.startswith(prefix) and name.endswith(".html")
]
if len(entries) <= max_files:
return
entries.sort(key=lambda path: os.path.getmtime(path))
for path in entries[: len(entries) - max_files]:
try:
os.remove(path)
except Exception:
pass
except Exception:
return
def log_url(
addon_id: str,
*,
enabled_setting_id: str,
log_filename: str,
url: str,
kind: str = "VISIT",
request_id: Optional[str] = None,
plugin_setting_id: Optional[str] = None,
max_mb_setting_id: str = "log_max_mb",
max_files_setting_id: str = "log_max_files",
) -> None:
if not _is_logging_enabled(addon_id, global_setting_id=enabled_setting_id, plugin_setting_id=plugin_setting_id):
return return
timestamp = datetime.utcnow().isoformat(timespec="seconds") + "Z" timestamp = datetime.utcnow().isoformat(timespec="seconds") + "Z"
line = f"{timestamp}\t{kind}\t{url}\n" request_part = f"\t{request_id}" if request_id else ""
line = f"{timestamp}\t{kind}{request_part}\t{url}\n"
log_dir = _profile_logs_dir(addon_id) log_dir = _profile_logs_dir(addon_id)
if log_dir: path = os.path.join(log_dir, log_filename) if log_dir else os.path.join(os.path.dirname(__file__), log_filename)
_append_text_file(os.path.join(log_dir, log_filename), line) max_mb = get_setting_int(addon_id, max_mb_setting_id, default=5)
return max_files = get_setting_int(addon_id, max_files_setting_id, default=3)
_append_text_file(os.path.join(os.path.dirname(__file__), log_filename), line) _rotate_log_file(path, max_bytes=max_mb * 1024 * 1024, max_files=max_files)
_append_text_file(path, line)
def log_error(
addon_id: str,
*,
enabled_setting_id: str,
log_filename: str,
message: str,
request_id: Optional[str] = None,
plugin_setting_id: Optional[str] = None,
) -> None:
log_url(
addon_id,
enabled_setting_id=enabled_setting_id,
plugin_setting_id=plugin_setting_id,
log_filename=log_filename,
url=message,
kind="ERROR",
request_id=request_id,
)
def dump_response_html( def dump_response_html(
@@ -115,14 +220,20 @@ def dump_response_html(
url: str, url: str,
body: str, body: str,
filename_prefix: str, filename_prefix: str,
request_id: Optional[str] = None,
plugin_setting_id: Optional[str] = None,
max_files_setting_id: str = "dump_max_files",
) -> None: ) -> None:
if not get_setting_bool(addon_id, enabled_setting_id, default=False): if not _is_logging_enabled(addon_id, global_setting_id=enabled_setting_id, plugin_setting_id=plugin_setting_id):
return return
timestamp = datetime.utcnow().strftime("%Y%m%d_%H%M%S_%f") timestamp = datetime.utcnow().strftime("%Y%m%d_%H%M%S_%f")
digest = hashlib.md5(url.encode("utf-8")).hexdigest() # nosec - filename only digest = hashlib.md5(url.encode("utf-8")).hexdigest() # nosec - filename only
filename = f"{filename_prefix}_{timestamp}_{digest}.html" filename = f"{filename_prefix}_{timestamp}_{digest}.html"
log_dir = _profile_logs_dir(addon_id) log_dir = _profile_logs_dir(addon_id)
path = os.path.join(log_dir, filename) if log_dir else os.path.join(os.path.dirname(__file__), filename) path = os.path.join(log_dir, filename) if log_dir else os.path.join(os.path.dirname(__file__), filename)
content = f"<!-- {url} -->\n{body or ''}" request_line = f" request_id={request_id}" if request_id else ""
content = f"<!-- {url}{request_line} -->\n{body or ''}"
if log_dir:
max_files = get_setting_int(addon_id, max_files_setting_id, default=200)
_prune_dump_files(log_dir, prefix=filename_prefix, max_files=max_files)
_append_text_file(path, content) _append_text_file(path, content)

View File

@@ -29,7 +29,7 @@ except ImportError: # pragma: no cover - allow running outside Kodi
xbmcaddon = None xbmcaddon = None
from plugin_interface import BasisPlugin from plugin_interface import BasisPlugin
from plugin_helpers import dump_response_html, get_setting_bool, log_url, notify_url from plugin_helpers import dump_response_html, get_setting_bool, get_setting_string, log_error, log_url, notify_url
from http_session_pool import get_requests_session from http_session_pool import get_requests_session
from regex_patterns import DIGITS, SEASON_EPISODE_TAG, SEASON_EPISODE_URL, STAFFEL_NUM_IN_URL from regex_patterns import DIGITS, SEASON_EPISODE_TAG, SEASON_EPISODE_URL, STAFFEL_NUM_IN_URL
@@ -41,19 +41,19 @@ else: # pragma: no cover
BeautifulSoupT: TypeAlias = Any BeautifulSoupT: TypeAlias = Any
BASE_URL = "https://aniworld.to" SETTING_BASE_URL = "aniworld_base_url"
ANIME_BASE_URL = f"{BASE_URL}/anime/stream" DEFAULT_BASE_URL = "https://aniworld.to"
POPULAR_ANIMES_URL = f"{BASE_URL}/beliebte-animes"
GENRES_URL = f"{BASE_URL}/animes"
LATEST_EPISODES_URL = f"{BASE_URL}/neue-episoden"
SEARCH_URL = f"{BASE_URL}/search?q={{query}}"
SEARCH_API_URL = f"{BASE_URL}/ajax/search"
DEFAULT_PREFERRED_HOSTERS = ["voe"] DEFAULT_PREFERRED_HOSTERS = ["voe"]
DEFAULT_TIMEOUT = 20 DEFAULT_TIMEOUT = 20
ADDON_ID = "plugin.video.viewit" ADDON_ID = "plugin.video.viewit"
GLOBAL_SETTING_LOG_URLS = "debug_log_urls" GLOBAL_SETTING_LOG_URLS = "debug_log_urls"
GLOBAL_SETTING_DUMP_HTML = "debug_dump_html" GLOBAL_SETTING_DUMP_HTML = "debug_dump_html"
GLOBAL_SETTING_SHOW_URL_INFO = "debug_show_url_info" GLOBAL_SETTING_SHOW_URL_INFO = "debug_show_url_info"
GLOBAL_SETTING_LOG_ERRORS = "debug_log_errors"
SETTING_LOG_URLS = "log_urls_aniworld"
SETTING_DUMP_HTML = "dump_html_aniworld"
SETTING_SHOW_URL_INFO = "show_url_info_aniworld"
SETTING_LOG_ERRORS = "log_errors_aniworld"
HEADERS = { HEADERS = {
"User-Agent": "Mozilla/5.0 (Kodi; ViewIt) AppleWebKit/537.36 (KHTML, like Gecko)", "User-Agent": "Mozilla/5.0 (Kodi; ViewIt) AppleWebKit/537.36 (KHTML, like Gecko)",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
@@ -93,17 +93,61 @@ class SeasonInfo:
episodes: List[EpisodeInfo] episodes: List[EpisodeInfo]
def _get_base_url() -> str:
base = get_setting_string(ADDON_ID, SETTING_BASE_URL, default=DEFAULT_BASE_URL).strip()
if not base:
base = DEFAULT_BASE_URL
return base.rstrip("/")
def _anime_base_url() -> str:
return f"{_get_base_url()}/anime/stream"
def _popular_animes_url() -> str:
return f"{_get_base_url()}/beliebte-animes"
def _genres_url() -> str:
return f"{_get_base_url()}/animes"
def _latest_episodes_url() -> str:
return f"{_get_base_url()}/neue-episoden"
def _search_url(query: str) -> str:
return f"{_get_base_url()}/search?q={query}"
def _search_api_url() -> str:
return f"{_get_base_url()}/ajax/search"
def _absolute_url(href: str) -> str: def _absolute_url(href: str) -> str:
return f"{BASE_URL}{href}" if href.startswith("/") else href return f"{_get_base_url()}{href}" if href.startswith("/") else href
def _log_url(url: str, *, kind: str = "VISIT") -> None: def _log_url(url: str, *, kind: str = "VISIT") -> None:
log_url(ADDON_ID, enabled_setting_id=GLOBAL_SETTING_LOG_URLS, log_filename="aniworld_urls.log", url=url, kind=kind) log_url(
ADDON_ID,
enabled_setting_id=GLOBAL_SETTING_LOG_URLS,
plugin_setting_id=SETTING_LOG_URLS,
log_filename="aniworld_urls.log",
url=url,
kind=kind,
)
def _log_visit(url: str) -> None: def _log_visit(url: str) -> None:
_log_url(url, kind="VISIT") _log_url(url, kind="VISIT")
notify_url(ADDON_ID, heading="AniWorld", url=url, enabled_setting_id=GLOBAL_SETTING_SHOW_URL_INFO) notify_url(
ADDON_ID,
heading="AniWorld",
url=url,
enabled_setting_id=GLOBAL_SETTING_SHOW_URL_INFO,
plugin_setting_id=SETTING_SHOW_URL_INFO,
)
def _log_parsed_url(url: str) -> None: def _log_parsed_url(url: str) -> None:
@@ -114,12 +158,23 @@ def _log_response_html(url: str, body: str) -> None:
dump_response_html( dump_response_html(
ADDON_ID, ADDON_ID,
enabled_setting_id=GLOBAL_SETTING_DUMP_HTML, enabled_setting_id=GLOBAL_SETTING_DUMP_HTML,
plugin_setting_id=SETTING_DUMP_HTML,
url=url, url=url,
body=body, body=body,
filename_prefix="aniworld_response", filename_prefix="aniworld_response",
) )
def _log_error(message: str) -> None:
log_error(
ADDON_ID,
enabled_setting_id=GLOBAL_SETTING_LOG_ERRORS,
plugin_setting_id=SETTING_LOG_ERRORS,
log_filename="aniworld_errors.log",
message=message,
)
def _normalize_search_text(value: str) -> str: def _normalize_search_text(value: str) -> str:
value = (value or "").casefold() value = (value or "").casefold()
value = re.sub(r"[^a-z0-9]+", " ", value) value = re.sub(r"[^a-z0-9]+", " ", value)
@@ -166,8 +221,12 @@ def _get_soup(url: str, *, session: Optional[RequestsSession] = None) -> Beautif
_ensure_requests() _ensure_requests()
_log_visit(url) _log_visit(url)
sess = session or get_requests_session("aniworld", headers=HEADERS) sess = session or get_requests_session("aniworld", headers=HEADERS)
try:
response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT) response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT)
response.raise_for_status() response.raise_for_status()
except Exception as exc:
_log_error(f"GET {url} failed: {exc}")
raise
if response.url and response.url != url: if response.url and response.url != url:
_log_url(response.url, kind="REDIRECT") _log_url(response.url, kind="REDIRECT")
_log_response_html(url, response.text) _log_response_html(url, response.text)
@@ -180,8 +239,12 @@ def _get_soup_simple(url: str) -> BeautifulSoupT:
_ensure_requests() _ensure_requests()
_log_visit(url) _log_visit(url)
sess = get_requests_session("aniworld", headers=HEADERS) sess = get_requests_session("aniworld", headers=HEADERS)
try:
response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT) response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT)
response.raise_for_status() response.raise_for_status()
except Exception as exc:
_log_error(f"GET {url} failed: {exc}")
raise
if response.url and response.url != url: if response.url and response.url != url:
_log_url(response.url, kind="REDIRECT") _log_url(response.url, kind="REDIRECT")
_log_response_html(url, response.text) _log_response_html(url, response.text)
@@ -360,7 +423,7 @@ def scrape_anime_detail(anime_identifier: str, max_seasons: Optional[int] = None
_log_url(anime_url, kind="ANIME") _log_url(anime_url, kind="ANIME")
session = get_requests_session("aniworld", headers=HEADERS) session = get_requests_session("aniworld", headers=HEADERS)
try: try:
_get_soup(BASE_URL, session=session) _get_soup(_get_base_url(), session=session)
except Exception: except Exception:
pass pass
soup = _get_soup(anime_url, session=session) soup = _get_soup(anime_url, session=session)
@@ -394,7 +457,7 @@ def resolve_redirect(target_url: str) -> Optional[str]:
normalized_url = _absolute_url(target_url) normalized_url = _absolute_url(target_url)
_log_visit(normalized_url) _log_visit(normalized_url)
session = get_requests_session("aniworld", headers=HEADERS) session = get_requests_session("aniworld", headers=HEADERS)
_get_soup(BASE_URL, session=session) _get_soup(_get_base_url(), session=session)
response = session.get(normalized_url, headers=HEADERS, timeout=DEFAULT_TIMEOUT, allow_redirects=True) response = session.get(normalized_url, headers=HEADERS, timeout=DEFAULT_TIMEOUT, allow_redirects=True)
if response.url: if response.url:
_log_url(response.url, kind="RESOLVED") _log_url(response.url, kind="RESOLVED")
@@ -405,7 +468,7 @@ def fetch_episode_hoster_names(episode_url: str) -> List[str]:
_ensure_requests() _ensure_requests()
normalized_url = _absolute_url(episode_url) normalized_url = _absolute_url(episode_url)
session = get_requests_session("aniworld", headers=HEADERS) session = get_requests_session("aniworld", headers=HEADERS)
_get_soup(BASE_URL, session=session) _get_soup(_get_base_url(), session=session)
soup = _get_soup(normalized_url, session=session) soup = _get_soup(normalized_url, session=session)
names: List[str] = [] names: List[str] = []
seen: set[str] = set() seen: set[str] = set()
@@ -440,7 +503,7 @@ def fetch_episode_stream_link(
normalized_url = _absolute_url(episode_url) normalized_url = _absolute_url(episode_url)
preferred = [hoster.lower() for hoster in (preferred_hosters or DEFAULT_PREFERRED_HOSTERS)] preferred = [hoster.lower() for hoster in (preferred_hosters or DEFAULT_PREFERRED_HOSTERS)]
session = get_requests_session("aniworld", headers=HEADERS) session = get_requests_session("aniworld", headers=HEADERS)
_get_soup(BASE_URL, session=session) _get_soup(_get_base_url(), session=session)
soup = _get_soup(normalized_url, session=session) soup = _get_soup(normalized_url, session=session)
candidates: List[Tuple[str, str]] = [] candidates: List[Tuple[str, str]] = []
for anchor in soup.select(".hosterSiteVideo a.watchEpisode"): for anchor in soup.select(".hosterSiteVideo a.watchEpisode"):
@@ -476,10 +539,10 @@ def search_animes(query: str) -> List[SeriesResult]:
return [] return []
session = get_requests_session("aniworld", headers=HEADERS) session = get_requests_session("aniworld", headers=HEADERS)
try: try:
session.get(BASE_URL, headers=HEADERS, timeout=DEFAULT_TIMEOUT) session.get(_get_base_url(), headers=HEADERS, timeout=DEFAULT_TIMEOUT)
except Exception: except Exception:
pass pass
data = _post_json(SEARCH_API_URL, payload={"keyword": query}, session=session) data = _post_json(_search_api_url(), payload={"keyword": query}, session=session)
results: List[SeriesResult] = [] results: List[SeriesResult] = []
seen: set[str] = set() seen: set[str] = set()
if isinstance(data, list): if isinstance(data, list):
@@ -507,7 +570,7 @@ def search_animes(query: str) -> List[SeriesResult]:
results.append(SeriesResult(title=title, description=description, url=url)) results.append(SeriesResult(title=title, description=description, url=url))
return results return results
soup = _get_soup_simple(SEARCH_URL.format(query=requests.utils.quote(query))) soup = _get_soup_simple(_search_url(requests.utils.quote(query)))
for anchor in soup.select("a[href^='/anime/stream/'][href]"): for anchor in soup.select("a[href^='/anime/stream/'][href]"):
href = (anchor.get("href") or "").strip() href = (anchor.get("href") or "").strip()
if not href or "/staffel-" in href or "/episode-" in href: if not href or "/staffel-" in href or "/episode-" in href:
@@ -530,7 +593,7 @@ def search_animes(query: str) -> List[SeriesResult]:
class AniworldPlugin(BasisPlugin): class AniworldPlugin(BasisPlugin):
name = "AniWorld (aniworld.to)" name = "Aniworld"
def __init__(self) -> None: def __init__(self) -> None:
self._anime_results: Dict[str, SeriesResult] = {} self._anime_results: Dict[str, SeriesResult] = {}
@@ -600,7 +663,7 @@ class AniworldPlugin(BasisPlugin):
def _ensure_popular(self) -> List[SeriesResult]: def _ensure_popular(self) -> List[SeriesResult]:
if self._popular_cache is not None: if self._popular_cache is not None:
return list(self._popular_cache) return list(self._popular_cache)
soup = _get_soup_simple(POPULAR_ANIMES_URL) soup = _get_soup_simple(_popular_animes_url())
results: List[SeriesResult] = [] results: List[SeriesResult] = []
seen: set[str] = set() seen: set[str] = set()
for anchor in soup.select("div.seriesListContainer a[href^='/anime/stream/']"): for anchor in soup.select("div.seriesListContainer a[href^='/anime/stream/']"):
@@ -646,7 +709,7 @@ class AniworldPlugin(BasisPlugin):
if cached is not None: if cached is not None:
return list(cached) return list(cached)
url = LATEST_EPISODES_URL url = _latest_episodes_url()
if page > 1: if page > 1:
url = f"{url}?page={page}" url = f"{url}?page={page}"
@@ -658,7 +721,7 @@ class AniworldPlugin(BasisPlugin):
def _ensure_genres(self) -> Dict[str, List[SeriesResult]]: def _ensure_genres(self) -> Dict[str, List[SeriesResult]]:
if self._genre_cache is not None: if self._genre_cache is not None:
return {key: list(value) for key, value in self._genre_cache.items()} return {key: list(value) for key, value in self._genre_cache.items()}
soup = _get_soup_simple(GENRES_URL) soup = _get_soup_simple(_genres_url())
results: Dict[str, List[SeriesResult]] = {} results: Dict[str, List[SeriesResult]] = {}
genre_blocks = soup.select("#seriesContainer div.genre") genre_blocks = soup.select("#seriesContainer div.genre")
if not genre_blocks: if not genre_blocks:

View File

@@ -30,19 +30,18 @@ except ImportError: # pragma: no cover - allow running outside Kodi
xbmcaddon = None xbmcaddon = None
from plugin_interface import BasisPlugin from plugin_interface import BasisPlugin
from plugin_helpers import dump_response_html, get_setting_bool, log_url, notify_url from plugin_helpers import dump_response_html, get_setting_bool, log_error, log_url, notify_url
ADDON_ID = "plugin.video.viewit" ADDON_ID = "plugin.video.viewit"
SETTING_BASE_URL = "einschalten_base_url" SETTING_BASE_URL = "einschalten_base_url"
SETTING_INDEX_PATH = "einschalten_index_path"
SETTING_NEW_TITLES_PATH = "einschalten_new_titles_path"
SETTING_SEARCH_PATH = "einschalten_search_path"
SETTING_GENRES_PATH = "einschalten_genres_path"
SETTING_ENABLE_PLAYBACK = "einschalten_enable_playback"
SETTING_WATCH_PATH_TEMPLATE = "einschalten_watch_path_template"
GLOBAL_SETTING_LOG_URLS = "debug_log_urls" GLOBAL_SETTING_LOG_URLS = "debug_log_urls"
GLOBAL_SETTING_DUMP_HTML = "debug_dump_html" GLOBAL_SETTING_DUMP_HTML = "debug_dump_html"
GLOBAL_SETTING_SHOW_URL_INFO = "debug_show_url_info" GLOBAL_SETTING_SHOW_URL_INFO = "debug_show_url_info"
GLOBAL_SETTING_LOG_ERRORS = "debug_log_errors"
SETTING_LOG_URLS = "log_urls_einschalten"
SETTING_DUMP_HTML = "dump_html_einschalten"
SETTING_SHOW_URL_INFO = "show_url_info_einschalten"
SETTING_LOG_ERRORS = "log_errors_einschalten"
DEFAULT_BASE_URL = "" DEFAULT_BASE_URL = ""
DEFAULT_INDEX_PATH = "/" DEFAULT_INDEX_PATH = "/"
@@ -153,16 +152,36 @@ def _extract_ng_state_payload(html: str) -> Dict[str, Any]:
def _notify_url(url: str) -> None: def _notify_url(url: str) -> None:
notify_url(ADDON_ID, heading="einschalten", url=url, enabled_setting_id=GLOBAL_SETTING_SHOW_URL_INFO) notify_url(
ADDON_ID,
heading="Einschalten",
url=url,
enabled_setting_id=GLOBAL_SETTING_SHOW_URL_INFO,
plugin_setting_id=SETTING_SHOW_URL_INFO,
)
def _log_url(url: str, *, kind: str = "VISIT") -> None: def _log_url(url: str, *, kind: str = "VISIT") -> None:
log_url(ADDON_ID, enabled_setting_id=GLOBAL_SETTING_LOG_URLS, log_filename="einschalten_urls.log", url=url, kind=kind) log_url(
ADDON_ID,
enabled_setting_id=GLOBAL_SETTING_LOG_URLS,
plugin_setting_id=SETTING_LOG_URLS,
log_filename="einschalten_urls.log",
url=url,
kind=kind,
)
def _log_debug_line(message: str) -> None: def _log_debug_line(message: str) -> None:
try: try:
log_url(ADDON_ID, enabled_setting_id=GLOBAL_SETTING_LOG_URLS, log_filename="einschalten_debug.log", url=message, kind="DEBUG") log_url(
ADDON_ID,
enabled_setting_id=GLOBAL_SETTING_LOG_URLS,
plugin_setting_id=SETTING_LOG_URLS,
log_filename="einschalten_debug.log",
url=message,
kind="DEBUG",
)
except Exception: except Exception:
pass pass
@@ -174,6 +193,7 @@ def _log_titles(items: list[MovieItem], *, context: str) -> None:
log_url( log_url(
ADDON_ID, ADDON_ID,
enabled_setting_id=GLOBAL_SETTING_LOG_URLS, enabled_setting_id=GLOBAL_SETTING_LOG_URLS,
plugin_setting_id=SETTING_LOG_URLS,
log_filename="einschalten_titles.log", log_filename="einschalten_titles.log",
url=f"{context}:count={len(items)}", url=f"{context}:count={len(items)}",
kind="TITLE", kind="TITLE",
@@ -182,6 +202,7 @@ def _log_titles(items: list[MovieItem], *, context: str) -> None:
log_url( log_url(
ADDON_ID, ADDON_ID,
enabled_setting_id=GLOBAL_SETTING_LOG_URLS, enabled_setting_id=GLOBAL_SETTING_LOG_URLS,
plugin_setting_id=SETTING_LOG_URLS,
log_filename="einschalten_titles.log", log_filename="einschalten_titles.log",
url=f"{context}:id={item.id} title={item.title}", url=f"{context}:id={item.id} title={item.title}",
kind="TITLE", kind="TITLE",
@@ -194,11 +215,22 @@ def _log_response_html(url: str, body: str) -> None:
dump_response_html( dump_response_html(
ADDON_ID, ADDON_ID,
enabled_setting_id=GLOBAL_SETTING_DUMP_HTML, enabled_setting_id=GLOBAL_SETTING_DUMP_HTML,
plugin_setting_id=SETTING_DUMP_HTML,
url=url, url=url,
body=body, body=body,
filename_prefix="einschalten_response", filename_prefix="einschalten_response",
) )
def _log_error(message: str) -> None:
log_error(
ADDON_ID,
enabled_setting_id=GLOBAL_SETTING_LOG_ERRORS,
plugin_setting_id=SETTING_LOG_ERRORS,
log_filename="einschalten_errors.log",
message=message,
)
def _u_matches(value: Any, expected_path: str) -> bool: def _u_matches(value: Any, expected_path: str) -> bool:
raw = (value or "").strip() raw = (value or "").strip()
if not raw: if not raw:
@@ -474,7 +506,7 @@ def _parse_ng_state_genres(payload: Dict[str, Any]) -> Dict[str, int]:
class EinschaltenPlugin(BasisPlugin): class EinschaltenPlugin(BasisPlugin):
"""Metadata-Plugin für eine autorisierte Quelle.""" """Metadata-Plugin für eine autorisierte Quelle."""
name = "einschalten" name = "Einschalten"
def __init__(self) -> None: def __init__(self) -> None:
self.is_available = REQUESTS_AVAILABLE self.is_available = REQUESTS_AVAILABLE
@@ -501,21 +533,21 @@ class EinschaltenPlugin(BasisPlugin):
base = self._get_base_url() base = self._get_base_url()
if not base: if not base:
return "" return ""
path = _get_setting_text(SETTING_INDEX_PATH, default=DEFAULT_INDEX_PATH).strip() or "/" path = DEFAULT_INDEX_PATH
return urljoin(base + "/", path.lstrip("/")) return urljoin(base + "/", path.lstrip("/"))
def _new_titles_url(self) -> str: def _new_titles_url(self) -> str:
base = self._get_base_url() base = self._get_base_url()
if not base: if not base:
return "" return ""
path = _get_setting_text(SETTING_NEW_TITLES_PATH, default=DEFAULT_NEW_TITLES_PATH).strip() or "/movies/new" path = DEFAULT_NEW_TITLES_PATH
return urljoin(base + "/", path.lstrip("/")) return urljoin(base + "/", path.lstrip("/"))
def _genres_url(self) -> str: def _genres_url(self) -> str:
base = self._get_base_url() base = self._get_base_url()
if not base: if not base:
return "" return ""
path = _get_setting_text(SETTING_GENRES_PATH, default=DEFAULT_GENRES_PATH).strip() or "/genres" path = DEFAULT_GENRES_PATH
return urljoin(base + "/", path.lstrip("/")) return urljoin(base + "/", path.lstrip("/"))
def _api_genres_url(self) -> str: def _api_genres_url(self) -> str:
@@ -528,7 +560,7 @@ class EinschaltenPlugin(BasisPlugin):
base = self._get_base_url() base = self._get_base_url()
if not base: if not base:
return "" return ""
path = _get_setting_text(SETTING_SEARCH_PATH, default=DEFAULT_SEARCH_PATH).strip() or "/search" path = DEFAULT_SEARCH_PATH
url = urljoin(base + "/", path.lstrip("/")) url = urljoin(base + "/", path.lstrip("/"))
return f"{url}?{urlencode({'query': query})}" return f"{url}?{urlencode({'query': query})}"
@@ -570,8 +602,6 @@ class EinschaltenPlugin(BasisPlugin):
base = self._get_base_url() base = self._get_base_url()
if not base: if not base:
return "" return ""
template = _get_setting_text(SETTING_WATCH_PATH_TEMPLATE, default=DEFAULT_WATCH_PATH_TEMPLATE).strip()
if not template:
template = DEFAULT_WATCH_PATH_TEMPLATE template = DEFAULT_WATCH_PATH_TEMPLATE
try: try:
path = template.format(id=int(movie_id)) path = template.format(id=int(movie_id))
@@ -624,7 +654,8 @@ class EinschaltenPlugin(BasisPlugin):
_log_response_html(resp.url or url, resp.text) _log_response_html(resp.url or url, resp.text)
self._detail_html_by_id[movie_id] = resp.text or "" self._detail_html_by_id[movie_id] = resp.text or ""
return resp.text or "" return resp.text or ""
except Exception: except Exception as exc:
_log_error(f"GET {url} failed: {exc}")
return "" return ""
def _fetch_watch_payload(self, movie_id: int) -> dict[str, object]: def _fetch_watch_payload(self, movie_id: int) -> dict[str, object]:
@@ -645,7 +676,8 @@ class EinschaltenPlugin(BasisPlugin):
_log_response_html(resp.url or url, resp.text) _log_response_html(resp.url or url, resp.text)
data = resp.json() data = resp.json()
return dict(data) if isinstance(data, dict) else {} return dict(data) if isinstance(data, dict) else {}
except Exception: except Exception as exc:
_log_error(f"GET {url} failed: {exc}")
return {} return {}
def _watch_stream_url(self, movie_id: int) -> str: def _watch_stream_url(self, movie_id: int) -> str:
@@ -996,21 +1028,17 @@ class EinschaltenPlugin(BasisPlugin):
movie_id = self._ensure_title_id(title) movie_id = self._ensure_title_id(title)
if movie_id is not None: if movie_id is not None:
self._fetch_movie_detail(movie_id) self._fetch_movie_detail(movie_id)
if _get_setting_bool(SETTING_ENABLE_PLAYBACK, default=False):
# Playback: expose a single "Stream" folder (inside: 1 playable item = Filmtitel). # Playback: expose a single "Stream" folder (inside: 1 playable item = Filmtitel).
return ["Stream"] return ["Stream"]
return ["Details"]
def episodes_for(self, title: str, season: str) -> List[str]: def episodes_for(self, title: str, season: str) -> List[str]:
season = (season or "").strip() season = (season or "").strip()
if season.casefold() == "stream" and _get_setting_bool(SETTING_ENABLE_PLAYBACK, default=False): if season.casefold() == "stream":
title = (title or "").strip() title = (title or "").strip()
return [title] if title else [] return [title] if title else []
return [] return []
def stream_link_for(self, title: str, season: str, episode: str) -> Optional[str]: def stream_link_for(self, title: str, season: str, episode: str) -> Optional[str]:
if not _get_setting_bool(SETTING_ENABLE_PLAYBACK, default=False):
return None
title = (title or "").strip() title = (title or "").strip()
season = (season or "").strip() season = (season or "").strip()
episode = (episode or "").strip() episode = (episode or "").strip()

View File

@@ -37,7 +37,7 @@ except ImportError: # pragma: no cover - allow running outside Kodi
xbmcgui = None xbmcgui = None
from plugin_interface import BasisPlugin from plugin_interface import BasisPlugin
from plugin_helpers import dump_response_html, get_setting_bool, log_url, notify_url from plugin_helpers import dump_response_html, get_setting_bool, get_setting_string, log_error, log_url, notify_url
from http_session_pool import get_requests_session from http_session_pool import get_requests_session
from regex_patterns import SEASON_EPISODE_TAG, SEASON_EPISODE_URL from regex_patterns import SEASON_EPISODE_TAG, SEASON_EPISODE_URL
@@ -49,16 +49,19 @@ else: # pragma: no cover
BeautifulSoupT: TypeAlias = Any BeautifulSoupT: TypeAlias = Any
BASE_URL = "https://s.to" SETTING_BASE_URL = "serienstream_base_url"
SERIES_BASE_URL = f"{BASE_URL}/serie/stream" DEFAULT_BASE_URL = "https://s.to"
POPULAR_SERIES_URL = f"{BASE_URL}/beliebte-serien"
LATEST_EPISODES_URL = f"{BASE_URL}"
DEFAULT_PREFERRED_HOSTERS = ["voe"] DEFAULT_PREFERRED_HOSTERS = ["voe"]
DEFAULT_TIMEOUT = 20 DEFAULT_TIMEOUT = 20
ADDON_ID = "plugin.video.viewit" ADDON_ID = "plugin.video.viewit"
GLOBAL_SETTING_LOG_URLS = "debug_log_urls" GLOBAL_SETTING_LOG_URLS = "debug_log_urls"
GLOBAL_SETTING_DUMP_HTML = "debug_dump_html" GLOBAL_SETTING_DUMP_HTML = "debug_dump_html"
GLOBAL_SETTING_SHOW_URL_INFO = "debug_show_url_info" GLOBAL_SETTING_SHOW_URL_INFO = "debug_show_url_info"
GLOBAL_SETTING_LOG_ERRORS = "debug_log_errors"
SETTING_LOG_URLS = "log_urls_serienstream"
SETTING_DUMP_HTML = "dump_html_serienstream"
SETTING_SHOW_URL_INFO = "show_url_info_serienstream"
SETTING_LOG_ERRORS = "log_errors_serienstream"
HEADERS = { HEADERS = {
"User-Agent": "Mozilla/5.0 (Kodi; ViewIt) AppleWebKit/537.36 (KHTML, like Gecko)", "User-Agent": "Mozilla/5.0 (Kodi; ViewIt) AppleWebKit/537.36 (KHTML, like Gecko)",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
@@ -101,15 +104,34 @@ class SeasonInfo:
episodes: List[EpisodeInfo] episodes: List[EpisodeInfo]
def _get_base_url() -> str:
base = get_setting_string(ADDON_ID, SETTING_BASE_URL, default=DEFAULT_BASE_URL).strip()
if not base:
base = DEFAULT_BASE_URL
return base.rstrip("/")
def _series_base_url() -> str:
return f"{_get_base_url()}/serie/stream"
def _popular_series_url() -> str:
return f"{_get_base_url()}/beliebte-serien"
def _latest_episodes_url() -> str:
return f"{_get_base_url()}"
def _absolute_url(href: str) -> str: def _absolute_url(href: str) -> str:
return f"{BASE_URL}{href}" if href.startswith("/") else href return f"{_get_base_url()}{href}" if href.startswith("/") else href
def _normalize_series_url(identifier: str) -> str: def _normalize_series_url(identifier: str) -> str:
if identifier.startswith("http://") or identifier.startswith("https://"): if identifier.startswith("http://") or identifier.startswith("https://"):
return identifier.rstrip("/") return identifier.rstrip("/")
slug = identifier.strip("/") slug = identifier.strip("/")
return f"{SERIES_BASE_URL}/{slug}" return f"{_series_base_url()}/{slug}"
def _series_root_url(url: str) -> str: def _series_root_url(url: str) -> str:
@@ -147,16 +169,52 @@ def _normalize_search_text(value: str) -> str:
return value return value
def _is_episode_tba(title: str, original_title: str) -> bool:
combined = f"{title} {original_title}".casefold()
markers = ("tba", "demnächst", "demnaechst", "coming soon", "to be announced")
return any(marker in combined for marker in markers)
def _row_is_upcoming(row: BeautifulSoupT) -> bool:
classes = row.get("class") or []
if isinstance(classes, str):
classes = classes.split()
if "upcoming" in classes:
return True
badge = row.select_one(".badge-upcoming")
if badge and (badge.get_text(" ", strip=True) or "").strip():
return True
watch_cell = row.select_one(".episode-watch-cell")
if watch_cell:
text = watch_cell.get_text(" ", strip=True).casefold()
if "tba" in text:
return True
return False
def _get_setting_bool(setting_id: str, *, default: bool = False) -> bool: def _get_setting_bool(setting_id: str, *, default: bool = False) -> bool:
return get_setting_bool(ADDON_ID, setting_id, default=default) return get_setting_bool(ADDON_ID, setting_id, default=default)
def _notify_url(url: str) -> None: def _notify_url(url: str) -> None:
notify_url(ADDON_ID, heading="Serienstream", url=url, enabled_setting_id=GLOBAL_SETTING_SHOW_URL_INFO) notify_url(
ADDON_ID,
heading="Serienstream",
url=url,
enabled_setting_id=GLOBAL_SETTING_SHOW_URL_INFO,
plugin_setting_id=SETTING_SHOW_URL_INFO,
)
def _log_url(url: str, *, kind: str = "VISIT") -> None: def _log_url(url: str, *, kind: str = "VISIT") -> None:
log_url(ADDON_ID, enabled_setting_id=GLOBAL_SETTING_LOG_URLS, log_filename="serienstream_urls.log", url=url, kind=kind) log_url(
ADDON_ID,
enabled_setting_id=GLOBAL_SETTING_LOG_URLS,
plugin_setting_id=SETTING_LOG_URLS,
log_filename="serienstream_urls.log",
url=url,
kind=kind,
)
def _log_parsed_url(url: str) -> None: def _log_parsed_url(url: str) -> None:
@@ -167,12 +225,23 @@ def _log_response_html(url: str, body: str) -> None:
dump_response_html( dump_response_html(
ADDON_ID, ADDON_ID,
enabled_setting_id=GLOBAL_SETTING_DUMP_HTML, enabled_setting_id=GLOBAL_SETTING_DUMP_HTML,
plugin_setting_id=SETTING_DUMP_HTML,
url=url, url=url,
body=body, body=body,
filename_prefix="s_to_response", filename_prefix="s_to_response",
) )
def _log_error(message: str) -> None:
log_error(
ADDON_ID,
enabled_setting_id=GLOBAL_SETTING_LOG_ERRORS,
plugin_setting_id=SETTING_LOG_ERRORS,
log_filename="serienstream_errors.log",
message=message,
)
def _ensure_requests() -> None: def _ensure_requests() -> None:
if requests is None or BeautifulSoup is None: if requests is None or BeautifulSoup is None:
raise RuntimeError("requests/bs4 sind nicht verfuegbar.") raise RuntimeError("requests/bs4 sind nicht verfuegbar.")
@@ -196,8 +265,12 @@ def _get_soup(url: str, *, session: Optional[RequestsSession] = None) -> Beautif
_ensure_requests() _ensure_requests()
_log_visit(url) _log_visit(url)
sess = session or get_requests_session("serienstream", headers=HEADERS) sess = session or get_requests_session("serienstream", headers=HEADERS)
try:
response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT) response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT)
response.raise_for_status() response.raise_for_status()
except Exception as exc:
_log_error(f"GET {url} failed: {exc}")
raise
if response.url and response.url != url: if response.url and response.url != url:
_log_url(response.url, kind="REDIRECT") _log_url(response.url, kind="REDIRECT")
_log_response_html(url, response.text) _log_response_html(url, response.text)
@@ -210,8 +283,12 @@ def _get_soup_simple(url: str) -> BeautifulSoupT:
_ensure_requests() _ensure_requests()
_log_visit(url) _log_visit(url)
sess = get_requests_session("serienstream", headers=HEADERS) sess = get_requests_session("serienstream", headers=HEADERS)
try:
response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT) response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT)
response.raise_for_status() response.raise_for_status()
except Exception as exc:
_log_error(f"GET {url} failed: {exc}")
raise
if response.url and response.url != url: if response.url and response.url != url:
_log_url(response.url, kind="REDIRECT") _log_url(response.url, kind="REDIRECT")
_log_response_html(url, response.text) _log_response_html(url, response.text)
@@ -227,7 +304,7 @@ def search_series(query: str) -> List[SeriesResult]:
if not normalized_query: if not normalized_query:
return [] return []
# Direkter Abruf wie in fetch_serien.py. # Direkter Abruf wie in fetch_serien.py.
catalog_url = f"{BASE_URL}/serien?by=genre" catalog_url = f"{_get_base_url()}/serien?by=genre"
soup = _get_soup_simple(catalog_url) soup = _get_soup_simple(catalog_url)
results: List[SeriesResult] = [] results: List[SeriesResult] = []
for series in parse_series_catalog(soup).values(): for series in parse_series_catalog(soup).values():
@@ -349,6 +426,8 @@ def _extract_episodes(soup: BeautifulSoupT) -> List[EpisodeInfo]:
# Neues Layout (Stand: 2026-01): Episoden-Tabelle mit Zeilen und onclick-URL. # Neues Layout (Stand: 2026-01): Episoden-Tabelle mit Zeilen und onclick-URL.
rows = soup.select("table.episode-table tbody tr.episode-row") rows = soup.select("table.episode-table tbody tr.episode-row")
for index, row in enumerate(rows): for index, row in enumerate(rows):
if _row_is_upcoming(row):
continue
onclick = (row.get("onclick") or "").strip() onclick = (row.get("onclick") or "").strip()
url = "" url = ""
if onclick: if onclick:
@@ -376,6 +455,8 @@ def _extract_episodes(soup: BeautifulSoupT) -> List[EpisodeInfo]:
original_title = (original_tag.get_text(strip=True) if original_tag else "").strip() original_title = (original_tag.get_text(strip=True) if original_tag else "").strip()
if not title: if not title:
title = f"Episode {number}" title = f"Episode {number}"
if _is_episode_tba(title, original_title):
continue
hosters: List[str] = [] hosters: List[str] = []
for img in row.select(".episode-watch-cell img"): for img in row.select(".episode-watch-cell img"):
@@ -424,7 +505,7 @@ def fetch_episode_stream_link(
session = get_requests_session("serienstream", headers=HEADERS) session = get_requests_session("serienstream", headers=HEADERS)
# Preflight optional: Startseite kann 5xx liefern, Zielseite aber funktionieren. # Preflight optional: Startseite kann 5xx liefern, Zielseite aber funktionieren.
try: try:
_get_soup(BASE_URL, session=session) _get_soup(_get_base_url(), session=session)
except Exception: except Exception:
pass pass
soup = _get_soup(normalized_url, session=session) soup = _get_soup(normalized_url, session=session)
@@ -453,7 +534,7 @@ def fetch_episode_hoster_names(episode_url: str) -> List[str]:
session = get_requests_session("serienstream", headers=HEADERS) session = get_requests_session("serienstream", headers=HEADERS)
# Preflight optional: Startseite kann 5xx liefern, Zielseite aber funktionieren. # Preflight optional: Startseite kann 5xx liefern, Zielseite aber funktionieren.
try: try:
_get_soup(BASE_URL, session=session) _get_soup(_get_base_url(), session=session)
except Exception: except Exception:
pass pass
soup = _get_soup(normalized_url, session=session) soup = _get_soup(normalized_url, session=session)
@@ -546,7 +627,7 @@ def resolve_redirect(target_url: str) -> Optional[str]:
session = get_requests_session("serienstream", headers=HEADERS) session = get_requests_session("serienstream", headers=HEADERS)
# Preflight optional: Startseite kann 5xx liefern, Zielseite aber funktionieren. # Preflight optional: Startseite kann 5xx liefern, Zielseite aber funktionieren.
try: try:
_get_soup(BASE_URL, session=session) _get_soup(_get_base_url(), session=session)
except Exception: except Exception:
pass pass
response = session.get( response = session.get(
@@ -571,7 +652,7 @@ def scrape_series_detail(
session = get_requests_session("serienstream", headers=HEADERS) session = get_requests_session("serienstream", headers=HEADERS)
# Preflight ist optional; manche Umgebungen/Provider leiten die Startseite um. # Preflight ist optional; manche Umgebungen/Provider leiten die Startseite um.
try: try:
_get_soup(BASE_URL, session=session) _get_soup(_get_base_url(), session=session)
except Exception: except Exception:
pass pass
soup = _get_soup(series_url, session=session) soup = _get_soup(series_url, session=session)
@@ -602,7 +683,7 @@ def scrape_series_detail(
class SerienstreamPlugin(BasisPlugin): class SerienstreamPlugin(BasisPlugin):
"""Downloader-Plugin, das Serien von s.to ueber requests/bs4 bereitstellt.""" """Downloader-Plugin, das Serien von s.to ueber requests/bs4 bereitstellt."""
name = "Serienstream (s.to)" name = "Serienstream"
POPULAR_GENRE_LABEL = "⭐ Beliebte Serien" POPULAR_GENRE_LABEL = "⭐ Beliebte Serien"
def __init__(self) -> None: def __init__(self) -> None:
@@ -636,7 +717,7 @@ class SerienstreamPlugin(BasisPlugin):
if self._catalog_cache is not None: if self._catalog_cache is not None:
return self._catalog_cache return self._catalog_cache
# Stand: 2026-01 liefert `?by=genre` konsistente Gruppen für `genres()`. # Stand: 2026-01 liefert `?by=genre` konsistente Gruppen für `genres()`.
catalog_url = f"{BASE_URL}/serien?by=genre" catalog_url = f"{_get_base_url()}/serien?by=genre"
soup = _get_soup_simple(catalog_url) soup = _get_soup_simple(catalog_url)
self._catalog_cache = parse_series_catalog(soup) self._catalog_cache = parse_series_catalog(soup)
return self._catalog_cache return self._catalog_cache
@@ -678,7 +759,7 @@ class SerienstreamPlugin(BasisPlugin):
"""Laedt und cached die Liste der beliebten Serien aus `/beliebte-serien`.""" """Laedt und cached die Liste der beliebten Serien aus `/beliebte-serien`."""
if self._popular_cache is not None: if self._popular_cache is not None:
return list(self._popular_cache) return list(self._popular_cache)
soup = _get_soup_simple(POPULAR_SERIES_URL) soup = _get_soup_simple(_popular_series_url())
results: List[SeriesResult] = [] results: List[SeriesResult] = []
seen: set[str] = set() seen: set[str] = set()
@@ -894,7 +975,7 @@ class SerienstreamPlugin(BasisPlugin):
if cached is not None: if cached is not None:
return list(cached) return list(cached)
url = LATEST_EPISODES_URL url = _latest_episodes_url()
if page > 1: if page > 1:
url = f"{url}?page={page}" url = f"{url}?page={page}"
soup = _get_soup_simple(url) soup = _get_soup_simple(url)

View File

@@ -44,7 +44,7 @@ except ImportError: # pragma: no cover - allow running outside Kodi
xbmcgui = None xbmcgui = None
from plugin_interface import BasisPlugin from plugin_interface import BasisPlugin
from plugin_helpers import dump_response_html, get_setting_bool, log_url, notify_url from plugin_helpers import dump_response_html, get_setting_bool, log_error, log_url, notify_url
from regex_patterns import DIGITS from regex_patterns import DIGITS
if TYPE_CHECKING: # pragma: no cover if TYPE_CHECKING: # pragma: no cover
@@ -61,6 +61,11 @@ DEFAULT_BASE_URL = "https://www.meineseite"
GLOBAL_SETTING_LOG_URLS = "debug_log_urls" GLOBAL_SETTING_LOG_URLS = "debug_log_urls"
GLOBAL_SETTING_DUMP_HTML = "debug_dump_html" GLOBAL_SETTING_DUMP_HTML = "debug_dump_html"
GLOBAL_SETTING_SHOW_URL_INFO = "debug_show_url_info" GLOBAL_SETTING_SHOW_URL_INFO = "debug_show_url_info"
GLOBAL_SETTING_LOG_ERRORS = "debug_log_errors"
SETTING_LOG_URLS = "log_urls_topstreamfilm"
SETTING_DUMP_HTML = "dump_html_topstreamfilm"
SETTING_SHOW_URL_INFO = "show_url_info_topstreamfilm"
SETTING_LOG_ERRORS = "log_errors_topstreamfilm"
SETTING_GENRE_MAX_PAGES = "topstream_genre_max_pages" SETTING_GENRE_MAX_PAGES = "topstream_genre_max_pages"
DEFAULT_TIMEOUT = 20 DEFAULT_TIMEOUT = 20
DEFAULT_PREFERRED_HOSTERS = ["supervideo", "dropload", "voe"] DEFAULT_PREFERRED_HOSTERS = ["supervideo", "dropload", "voe"]
@@ -119,7 +124,7 @@ def _strip_der_film_suffix(title: str) -> str:
class TopstreamfilmPlugin(BasisPlugin): class TopstreamfilmPlugin(BasisPlugin):
"""Integration fuer eine HTML-basierte Suchseite.""" """Integration fuer eine HTML-basierte Suchseite."""
name = "TopStreamFilm" name = "Topstreamfilm"
def __init__(self) -> None: def __init__(self) -> None:
self._session: RequestsSession | None = None self._session: RequestsSession | None = None
@@ -348,20 +353,43 @@ class TopstreamfilmPlugin(BasisPlugin):
return default return default
def _notify_url(self, url: str) -> None: def _notify_url(self, url: str) -> None:
notify_url(ADDON_ID, heading=self.name, url=url, enabled_setting_id=GLOBAL_SETTING_SHOW_URL_INFO) notify_url(
ADDON_ID,
heading=self.name,
url=url,
enabled_setting_id=GLOBAL_SETTING_SHOW_URL_INFO,
plugin_setting_id=SETTING_SHOW_URL_INFO,
)
def _log_url(self, url: str, *, kind: str = "VISIT") -> None: def _log_url(self, url: str, *, kind: str = "VISIT") -> None:
log_url(ADDON_ID, enabled_setting_id=GLOBAL_SETTING_LOG_URLS, log_filename="topstream_urls.log", url=url, kind=kind) log_url(
ADDON_ID,
enabled_setting_id=GLOBAL_SETTING_LOG_URLS,
plugin_setting_id=SETTING_LOG_URLS,
log_filename="topstream_urls.log",
url=url,
kind=kind,
)
def _log_response_html(self, url: str, body: str) -> None: def _log_response_html(self, url: str, body: str) -> None:
dump_response_html( dump_response_html(
ADDON_ID, ADDON_ID,
enabled_setting_id=GLOBAL_SETTING_DUMP_HTML, enabled_setting_id=GLOBAL_SETTING_DUMP_HTML,
plugin_setting_id=SETTING_DUMP_HTML,
url=url, url=url,
body=body, body=body,
filename_prefix="topstream_response", filename_prefix="topstream_response",
) )
def _log_error(self, message: str) -> None:
log_error(
ADDON_ID,
enabled_setting_id=GLOBAL_SETTING_LOG_ERRORS,
plugin_setting_id=SETTING_LOG_ERRORS,
log_filename="topstream_errors.log",
message=message,
)
def capabilities(self) -> set[str]: def capabilities(self) -> set[str]:
return {"genres", "popular_series"} return {"genres", "popular_series"}
@@ -557,8 +585,12 @@ class TopstreamfilmPlugin(BasisPlugin):
session = self._get_session() session = self._get_session()
self._log_url(url, kind="VISIT") self._log_url(url, kind="VISIT")
self._notify_url(url) self._notify_url(url)
try:
response = session.get(url, timeout=DEFAULT_TIMEOUT) response = session.get(url, timeout=DEFAULT_TIMEOUT)
response.raise_for_status() response.raise_for_status()
except Exception as exc:
self._log_error(f"GET {url} failed: {exc}")
raise
self._log_url(response.url, kind="OK") self._log_url(response.url, kind="OK")
self._log_response_html(response.url, response.text) self._log_response_html(response.url, response.text)
return BeautifulSoup(response.text, "html.parser") return BeautifulSoup(response.text, "html.parser")
@@ -803,12 +835,16 @@ class TopstreamfilmPlugin(BasisPlugin):
request_url = f"{url}?{urlencode(params)}" request_url = f"{url}?{urlencode(params)}"
self._log_url(request_url, kind="GET") self._log_url(request_url, kind="GET")
self._notify_url(request_url) self._notify_url(request_url)
try:
response = session.get( response = session.get(
url, url,
params=params, params=params,
timeout=DEFAULT_TIMEOUT, timeout=DEFAULT_TIMEOUT,
) )
response.raise_for_status() response.raise_for_status()
except Exception as exc:
self._log_error(f"GET {request_url} failed: {exc}")
raise
self._log_url(response.url, kind="OK") self._log_url(response.url, kind="OK")
self._log_response_html(response.url, response.text) self._log_response_html(response.url, response.text)

View File

@@ -1,24 +1,45 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<settings> <settings>
<category label="Allgemein"> <category label="Logging">
<setting id="debug_log_urls" type="bool" label="Debug: URL-Log aktivieren (global)" default="false" /> <setting id="debug_log_urls" type="bool" label="URL-Logging aktivieren (global)" default="false" />
<setting id="debug_dump_html" type="bool" label="Debug: HTML-Antworten speichern (global)" default="false" /> <setting id="debug_dump_html" type="bool" label="HTML-Dumps aktivieren (global)" default="false" />
<setting id="debug_show_url_info" type="bool" label="Debug: Aufgerufene URL anzeigen (global)" default="false" /> <setting id="debug_show_url_info" type="bool" label="URL-Info anzeigen (global)" default="false" />
<setting id="debug_log_errors" type="bool" label="Fehler-Logging aktivieren (global)" default="false" />
<setting id="log_max_mb" type="number" label="URL-Log: max. Datei-Größe (MB)" default="5" />
<setting id="log_max_files" type="number" label="URL-Log: max. Rotationen" default="3" />
<setting id="dump_max_files" type="number" label="HTML-Dumps: max. Dateien pro Plugin" default="200" />
<setting id="log_urls_serienstream" type="bool" label="Serienstream: URL-Logging" default="false" />
<setting id="dump_html_serienstream" type="bool" label="Serienstream: HTML-Dumps" default="false" />
<setting id="show_url_info_serienstream" type="bool" label="Serienstream: URL-Info anzeigen" default="false" />
<setting id="log_errors_serienstream" type="bool" label="Serienstream: Fehler loggen" default="false" />
<setting id="log_urls_aniworld" type="bool" label="Aniworld: URL-Logging" default="false" />
<setting id="dump_html_aniworld" type="bool" label="Aniworld: HTML-Dumps" default="false" />
<setting id="show_url_info_aniworld" type="bool" label="Aniworld: URL-Info anzeigen" default="false" />
<setting id="log_errors_aniworld" type="bool" label="Aniworld: Fehler loggen" default="false" />
<setting id="log_urls_topstreamfilm" type="bool" label="Topstreamfilm: URL-Logging" default="false" />
<setting id="dump_html_topstreamfilm" type="bool" label="Topstreamfilm: HTML-Dumps" default="false" />
<setting id="show_url_info_topstreamfilm" type="bool" label="Topstreamfilm: URL-Info anzeigen" default="false" />
<setting id="log_errors_topstreamfilm" type="bool" label="Topstreamfilm: Fehler loggen" default="false" />
<setting id="log_urls_einschalten" type="bool" label="Einschalten: URL-Logging" default="false" />
<setting id="dump_html_einschalten" type="bool" label="Einschalten: HTML-Dumps" default="false" />
<setting id="show_url_info_einschalten" type="bool" label="Einschalten: URL-Info anzeigen" default="false" />
<setting id="log_errors_einschalten" type="bool" label="Einschalten: Fehler loggen" default="false" />
</category> </category>
<category label="TopStream"> <category label="TopStream">
<setting id="topstream_base_url" type="text" label="Basis-URL (z.B. https://www.meineseite)" default="https://www.meineseite" /> <setting id="topstream_base_url" type="text" label="Domain (BASE_URL)" default="https://topstreamfilm.live" />
<setting id="topstream_genre_max_pages" type="number" label="Genres: max. Seiten laden (Pagination)" default="20" /> <setting id="topstream_genre_max_pages" type="number" label="Genres: max. Seiten laden (Pagination)" default="20" />
</category> </category>
<category label="SerienStream">
<setting id="serienstream_base_url" type="text" label="Domain (BASE_URL)" default="https://s.to" />
</category>
<category label="AniWorld">
<setting id="aniworld_base_url" type="text" label="Domain (BASE_URL)" default="https://aniworld.to" />
</category>
<category label="Einschalten"> <category label="Einschalten">
<setting id="einschalten_base_url" type="text" label="Basis-URL (nur eigene/autorisiert betriebene Quelle)" default="" /> <setting id="einschalten_base_url" type="text" label="Domain (BASE_URL)" default="https://einschalten.in" />
<setting id="einschalten_index_path" type="text" label="Index-Pfad (z.B. /)" default="/" />
<setting id="einschalten_new_titles_path" type="text" label="Neue-Titel-Pfad (z.B. /movies/new)" default="/movies/new" />
<setting id="einschalten_search_path" type="text" label="Suche-Pfad (z.B. /search)" default="/search" />
<setting id="einschalten_genres_path" type="text" label="Genres-Pfad (z.B. /genres)" default="/genres" />
<setting id="einschalten_enable_playback" type="bool" label="Wiedergabe aktivieren (nur autorisierte Quellen)" default="false" />
<setting id="einschalten_watch_path_template" type="text" label="Watch-Pfad-Template (z.B. /api/movies/{id}/watch)" default="/api/movies/{id}/watch" />
</category> </category>
<category label="TMDB"> <category label="TMDB">
<setting id="tmdb_enabled" type="bool" label="TMDB aktivieren" default="true" />
<setting id="tmdb_api_key" type="text" label="TMDB API Key" default="" /> <setting id="tmdb_api_key" type="text" label="TMDB API Key" default="" />
<setting id="tmdb_language" type="text" label="TMDB Sprache (z.B. de-DE)" default="de-DE" /> <setting id="tmdb_language" type="text" label="TMDB Sprache (z.B. de-DE)" default="de-DE" />
<setting id="tmdb_prefetch_concurrency" type="number" label="TMDB: Parallelität (Prefetch, 1-20)" default="6" /> <setting id="tmdb_prefetch_concurrency" type="number" label="TMDB: Parallelität (Prefetch, 1-20)" default="6" />

54
docs/DEFAULT_ROUTER.md Normal file
View File

@@ -0,0 +1,54 @@
# ViewIT Hauptlogik (`addon/default.py`)
Dieses Dokument beschreibt den Einstiegspunkt des Addons und die zentrale Steuerlogik.
## Aufgabe der Datei
`addon/default.py` ist der Router des Addons. Er:
- lädt die PluginModule dynamisch,
- stellt die KodiNavigation bereit,
- übersetzt UIAktionen in PluginAufrufe,
- startet die Wiedergabe und verwaltet Playstate/Resume.
## Ablauf (high level)
1. **PluginDiscovery**: Lädt alle `addon/plugins/*.py` (ohne `_`Prefix) und instanziiert Klassen, die von `BasisPlugin` erben.
2. **Navigation**: Baut KodiListen (Serien/Staffeln/Episoden) auf Basis der PluginAntworten.
3. **Playback**: Holt StreamLinks aus dem Plugin und startet die Wiedergabe.
4. **Playstate**: Speichert ResumeDaten lokal (`playstate.json`) und setzt `playcount`/ResumeInfos.
## Routing & Aktionen
Die Datei arbeitet mit URLParametern (KodiPluginStandard). Typische Aktionen:
- `search` → Suche über ein Plugin
- `seasons` → Staffeln für einen Titel
- `episodes` → Episoden für eine Staffel
- `play` → StreamLink auflösen und abspielen
Die genaue Aktion wird aus den QueryParametern gelesen und an das entsprechende Plugin delegiert.
## Playstate (Resume/Watched)
- **Speicherort**: `playstate.json` im AddonProfilordner.
- **Key**: Kombination aus PluginName, Titel, Staffel, Episode.
- **Verwendung**:
- `playcount` wird gesetzt, wenn „gesehen“ markiert ist.
- `resume_position`/`resume_total` werden gesetzt, wenn vorhanden.
## Wichtige Hilfsfunktionen
- **PluginLoader**: findet & instanziiert Plugins.
- **UIHelper**: setzt ContentType, baut Verzeichnisseinträge.
- **PlaystateHelper**: `_load_playstate`, `_save_playstate`, `_apply_playstate_to_info`.
## Fehlerbehandlung
- PluginImportfehler werden isoliert behandelt, damit das Addon nicht komplett ausfällt.
- NetzwerkFehler werden in Plugins abgefangen, `default.py` sollte nur saubere Fehlermeldungen weitergeben.
## Debugging
- Globale DebugSettings werden über `addon/resources/settings.xml` gesteuert.
- Plugins loggen URLs/HTML optional (siehe jeweilige PluginDoku).
## Änderungen & Erweiterungen
Für neue Aktionen:
1. Neue Aktion im Router registrieren.
2. UIEinträge passend anlegen.
3. Entsprechende PluginMethode definieren oder erweitern.
## Hinweis zur Erstellung
Teile dieser Dokumentation wurden KIgestützt erstellt und bei Bedarf manuell angepasst.

View File

@@ -0,0 +1,75 @@
# ViewIT Entwicklerdoku Plugins (`addon/plugins/*_plugin.py`)
Diese Doku beschreibt, wie Plugins im ViewITAddon aufgebaut sind und wie neue ProviderIntegrationen entwickelt werden.
## Grundlagen
- Jedes Plugin ist eine einzelne Datei unter `addon/plugins/`.
- Dateinamen **ohne** `_`Prefix werden automatisch geladen.
- Jede Datei enthält eine Klasse, die von `BasisPlugin` erbt.
## PflichtMethoden (BasisPlugin)
Jedes Plugin muss diese Methoden implementieren:
- `async search_titles(query: str) -> list[str]`
- `seasons_for(title: str) -> list[str]`
- `episodes_for(title: str, season: str) -> list[str]`
## Optionale Features (Capabilities)
Über `capabilities()` kann das Plugin zusätzliche Funktionen anbieten:
- `popular_series``popular_series()`
- `genres``genres()` + `titles_for_genre(genre)`
- `latest_episodes``latest_episodes(page=1)`
## Empfohlene Struktur
- Konstanten für URLs/Endpoints (BASE_URL, Pfade, Templates)
- `requests` + `bs4` optional (fehlt beides, Plugin sollte sauber deaktivieren)
- HelperFunktionen für Parsing und Normalisierung
- Caches für Such, Staffel und EpisodenDaten
## Suche (aktuelle Policy)
- **Nur TitelMatches**
- **SubstringMatch** nach Normalisierung (Lowercase + NichtAlnum → Leerzeichen)
- Keine Beschreibung/Plot/Meta für Matches
## Namensgebung
- PluginKlassenname: `XxxPlugin`
- Anzeigename (Property `name`): **mit Großbuchstaben beginnen** (z.B. `Serienstream`, `Einschalten`)
## Settings pro Plugin
Standard: `*_base_url` (Domain / BASE_URL)
- Beispiele:
- `serienstream_base_url`
- `aniworld_base_url`
- `einschalten_base_url`
- `topstream_base_url`
## Playback
- Wenn möglich `stream_link_for(...)` implementieren.
- Optional `available_hosters_for(...)`/`resolve_stream_link(...)` für HosterAuflösung.
## Debugging
Global gesteuert über Settings:
- `debug_log_urls`
- `debug_dump_html`
- `debug_show_url_info`
Plugins sollten die Helper aus `addon/plugin_helpers.py` nutzen:
- `log_url(...)`
- `dump_response_html(...)`
- `notify_url(...)`
## Template
`addon/plugins/_template_plugin.py` dient als Startpunkt für neue Provider.
## Build & Test
- ZIP bauen: `./scripts/build_kodi_zip.sh`
- AddonOrdner: `./scripts/build_install_addon.sh`
## BeispielCheckliste
- [ ] `name` korrekt gesetzt
- [ ] `*_base_url` in Settings vorhanden
- [ ] Suche matcht nur Titel
- [ ] Fehlerbehandlung und Timeouts vorhanden
- [ ] Optional: Caches für Performance
## Hinweis zur Erstellung
Teile dieser Dokumentation wurden KIgestützt erstellt und bei Bedarf manuell angepasst.

View File

@@ -6,6 +6,10 @@ Dieses Dokument beschreibt, wie das Plugin-System von **ViewIt** funktioniert un
ViewIt lädt Provider-Integrationen dynamisch aus `addon/plugins/*.py`. Jede Datei enthält eine Klasse, die von `BasisPlugin` erbt. Beim Start werden alle Plugins instanziiert und nur aktiv genutzt, wenn sie verfügbar sind. ViewIt lädt Provider-Integrationen dynamisch aus `addon/plugins/*.py`. Jede Datei enthält eine Klasse, die von `BasisPlugin` erbt. Beim Start werden alle Plugins instanziiert und nur aktiv genutzt, wenn sie verfügbar sind.
Weitere Details:
- `docs/DEFAULT_ROUTER.md` (Hauptlogik in `addon/default.py`)
- `docs/PLUGIN_DEVELOPMENT.md` (Entwicklerdoku für Plugins)
### Aktuelle Plugins ### Aktuelle Plugins
- `serienstream_plugin.py` Serienstream (s.to) - `serienstream_plugin.py` Serienstream (s.to)