Standardize plugin base URLs

This commit is contained in:
2026-02-01 18:25:22 +01:00
parent 4e0b0ffd1a
commit cd2e8e2b15
4 changed files with 86 additions and 36 deletions

View File

@@ -29,7 +29,7 @@ except ImportError: # pragma: no cover - allow running outside Kodi
xbmcaddon = None
from plugin_interface import BasisPlugin
from plugin_helpers import dump_response_html, get_setting_bool, log_url, notify_url
from plugin_helpers import dump_response_html, get_setting_bool, get_setting_string, log_url, notify_url
from http_session_pool import get_requests_session
from regex_patterns import DIGITS, SEASON_EPISODE_TAG, SEASON_EPISODE_URL, STAFFEL_NUM_IN_URL
@@ -41,13 +41,8 @@ else: # pragma: no cover
BeautifulSoupT: TypeAlias = Any
BASE_URL = "https://aniworld.to"
ANIME_BASE_URL = f"{BASE_URL}/anime/stream"
POPULAR_ANIMES_URL = f"{BASE_URL}/beliebte-animes"
GENRES_URL = f"{BASE_URL}/animes"
LATEST_EPISODES_URL = f"{BASE_URL}/neue-episoden"
SEARCH_URL = f"{BASE_URL}/search?q={{query}}"
SEARCH_API_URL = f"{BASE_URL}/ajax/search"
SETTING_BASE_URL = "aniworld_base_url"
DEFAULT_BASE_URL = "https://aniworld.to"
DEFAULT_PREFERRED_HOSTERS = ["voe"]
DEFAULT_TIMEOUT = 20
ADDON_ID = "plugin.video.viewit"
@@ -93,8 +88,39 @@ class SeasonInfo:
episodes: List[EpisodeInfo]
def _get_base_url() -> str:
base = get_setting_string(ADDON_ID, SETTING_BASE_URL, default=DEFAULT_BASE_URL).strip()
if not base:
base = DEFAULT_BASE_URL
return base.rstrip("/")
def _anime_base_url() -> str:
return f"{_get_base_url()}/anime/stream"
def _popular_animes_url() -> str:
return f"{_get_base_url()}/beliebte-animes"
def _genres_url() -> str:
return f"{_get_base_url()}/animes"
def _latest_episodes_url() -> str:
return f"{_get_base_url()}/neue-episoden"
def _search_url(query: str) -> str:
return f"{_get_base_url()}/search?q={query}"
def _search_api_url() -> str:
return f"{_get_base_url()}/ajax/search"
def _absolute_url(href: str) -> str:
return f"{BASE_URL}{href}" if href.startswith("/") else href
return f"{_get_base_url()}{href}" if href.startswith("/") else href
def _log_url(url: str, *, kind: str = "VISIT") -> None:
@@ -360,7 +386,7 @@ def scrape_anime_detail(anime_identifier: str, max_seasons: Optional[int] = None
_log_url(anime_url, kind="ANIME")
session = get_requests_session("aniworld", headers=HEADERS)
try:
_get_soup(BASE_URL, session=session)
_get_soup(_get_base_url(), session=session)
except Exception:
pass
soup = _get_soup(anime_url, session=session)
@@ -394,7 +420,7 @@ def resolve_redirect(target_url: str) -> Optional[str]:
normalized_url = _absolute_url(target_url)
_log_visit(normalized_url)
session = get_requests_session("aniworld", headers=HEADERS)
_get_soup(BASE_URL, session=session)
_get_soup(_get_base_url(), session=session)
response = session.get(normalized_url, headers=HEADERS, timeout=DEFAULT_TIMEOUT, allow_redirects=True)
if response.url:
_log_url(response.url, kind="RESOLVED")
@@ -405,7 +431,7 @@ def fetch_episode_hoster_names(episode_url: str) -> List[str]:
_ensure_requests()
normalized_url = _absolute_url(episode_url)
session = get_requests_session("aniworld", headers=HEADERS)
_get_soup(BASE_URL, session=session)
_get_soup(_get_base_url(), session=session)
soup = _get_soup(normalized_url, session=session)
names: List[str] = []
seen: set[str] = set()
@@ -440,7 +466,7 @@ def fetch_episode_stream_link(
normalized_url = _absolute_url(episode_url)
preferred = [hoster.lower() for hoster in (preferred_hosters or DEFAULT_PREFERRED_HOSTERS)]
session = get_requests_session("aniworld", headers=HEADERS)
_get_soup(BASE_URL, session=session)
_get_soup(_get_base_url(), session=session)
soup = _get_soup(normalized_url, session=session)
candidates: List[Tuple[str, str]] = []
for anchor in soup.select(".hosterSiteVideo a.watchEpisode"):
@@ -476,10 +502,10 @@ def search_animes(query: str) -> List[SeriesResult]:
return []
session = get_requests_session("aniworld", headers=HEADERS)
try:
session.get(BASE_URL, headers=HEADERS, timeout=DEFAULT_TIMEOUT)
session.get(_get_base_url(), headers=HEADERS, timeout=DEFAULT_TIMEOUT)
except Exception:
pass
data = _post_json(SEARCH_API_URL, payload={"keyword": query}, session=session)
data = _post_json(_search_api_url(), payload={"keyword": query}, session=session)
results: List[SeriesResult] = []
seen: set[str] = set()
if isinstance(data, list):
@@ -507,7 +533,7 @@ def search_animes(query: str) -> List[SeriesResult]:
results.append(SeriesResult(title=title, description=description, url=url))
return results
soup = _get_soup_simple(SEARCH_URL.format(query=requests.utils.quote(query)))
soup = _get_soup_simple(_search_url(requests.utils.quote(query)))
for anchor in soup.select("a[href^='/anime/stream/'][href]"):
href = (anchor.get("href") or "").strip()
if not href or "/staffel-" in href or "/episode-" in href:
@@ -600,7 +626,7 @@ class AniworldPlugin(BasisPlugin):
def _ensure_popular(self) -> List[SeriesResult]:
if self._popular_cache is not None:
return list(self._popular_cache)
soup = _get_soup_simple(POPULAR_ANIMES_URL)
soup = _get_soup_simple(_popular_animes_url())
results: List[SeriesResult] = []
seen: set[str] = set()
for anchor in soup.select("div.seriesListContainer a[href^='/anime/stream/']"):
@@ -646,7 +672,7 @@ class AniworldPlugin(BasisPlugin):
if cached is not None:
return list(cached)
url = LATEST_EPISODES_URL
url = _latest_episodes_url()
if page > 1:
url = f"{url}?page={page}"
@@ -658,7 +684,7 @@ class AniworldPlugin(BasisPlugin):
def _ensure_genres(self) -> Dict[str, List[SeriesResult]]:
if self._genre_cache is not None:
return {key: list(value) for key, value in self._genre_cache.items()}
soup = _get_soup_simple(GENRES_URL)
soup = _get_soup_simple(_genres_url())
results: Dict[str, List[SeriesResult]] = {}
genre_blocks = soup.select("#seriesContainer div.genre")
if not genre_blocks: