dev: umfangreiches Refactoring, Trakt-Integration und Code-Review-Fixes (0.1.69-dev)
Core & Architektur: - Neues Verzeichnis addon/core/ mit router.py, trakt.py, metadata.py, gui.py, playstate.py, plugin_manager.py, updater.py - Tests-Verzeichnis hinzugefügt (24 Tests, pytest + Coverage) Trakt-Integration: - OAuth Device Flow, Scrobbling, Watchlist, History, Calendar - Upcoming Episodes, Weiterschauen (Continue Watching) - Watched-Status in Episodenlisten - _trakt_find_in_plugins() mit 5-Min-Cache Serienstream-Suche: - API-Ergebnisse werden immer mit Katalog-Cache ergänzt (serverseitiges 10-Treffer-Limit) - Katalog-Cache wird beim Addon-Start im Daemon-Thread vorgewärmt - Notification nach Cache-Load via xbmc.executebuiltin() (thread-sicher) Bugfixes (Code-Review): - Race Condition auf _TRAKT_WATCHED_CACHE: _TRAKT_WATCHED_CACHE_LOCK hinzugefügt - GUI-Dialog aus Daemon-Thread: xbmcgui -> xbmc.executebuiltin() - ValueError in Trakt-Watchlist-Routen abgesichert - Token expires_at==0 Check korrigiert - get_setting_bool() Kontrollfluss in gui.py bereinigt - topstreamfilm_plugin: try-finally um xbmcvfs.File.close() Cleanup: - default.py.bak und refactor_router.py entfernt - .gitignore: /tests/ Eintrag entfernt - Type-Hints vereinheitlicht (Dict/List/Tuple -> dict/list/tuple)
This commit is contained in:
3
tests/README_LOCAL.md
Normal file
3
tests/README_LOCAL.md
Normal file
@@ -0,0 +1,3 @@
|
||||
Diese Tests sind lokal (nicht committen). Ausführen mit:
|
||||
|
||||
pytest -q
|
||||
10
tests/conftest.py
Normal file
10
tests/conftest.py
Normal file
@@ -0,0 +1,10 @@
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[1]
|
||||
ADDON = ROOT / "addon"
|
||||
|
||||
for path in (ROOT, ADDON):
|
||||
value = str(path)
|
||||
if value not in sys.path:
|
||||
sys.path.insert(0, value)
|
||||
30
tests/test_dokustreams_live.py
Normal file
30
tests/test_dokustreams_live.py
Normal file
@@ -0,0 +1,30 @@
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.mark.live
|
||||
def test_dokustreams_embed_url_via_curl():
|
||||
if shutil.which('curl') is None:
|
||||
pytest.skip('curl not available')
|
||||
|
||||
url = 'https://doku-streams.com/verbrechen/deutsche-im-knast-japan-und-die-disziplin/'
|
||||
result = subprocess.run(
|
||||
['curl', '-L', '-s', '--compressed', url],
|
||||
check=False,
|
||||
capture_output=True,
|
||||
text=False,
|
||||
)
|
||||
assert result.returncode == 0
|
||||
html = result.stdout.decode('utf-8', errors='ignore')
|
||||
assert html
|
||||
|
||||
iframe_match = re.search(r'<iframe[^>]+src="([^"]+)"', html, re.IGNORECASE)
|
||||
if iframe_match is None:
|
||||
iframe_match = re.search(r'"embedUrl"\s*:\s*"([^"]+)"', html)
|
||||
assert iframe_match is not None
|
||||
|
||||
src = iframe_match.group(1)
|
||||
assert 'youtube' in src or 'vimeo' in src
|
||||
41
tests/test_filmpalast_genres.py
Normal file
41
tests/test_filmpalast_genres.py
Normal file
@@ -0,0 +1,41 @@
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from addon.plugins import filmpalast_plugin as fp
|
||||
|
||||
|
||||
def _soup(html: str):
|
||||
return BeautifulSoup(html, "html.parser")
|
||||
|
||||
|
||||
def test_genres_parse_sidebar(monkeypatch):
|
||||
html = """
|
||||
<aside>
|
||||
<section id="genre">
|
||||
<ul>
|
||||
<li><a href="https://filmpalast.to/search/genre/Action">Action</a></li>
|
||||
<li><a href="https://filmpalast.to/search/genre/Drama">Drama</a></li>
|
||||
</ul>
|
||||
</section>
|
||||
</aside>
|
||||
"""
|
||||
monkeypatch.setattr(fp, "_get_soup", lambda *args, **kwargs: _soup(html))
|
||||
plugin = fp.FilmpalastPlugin()
|
||||
genres = plugin.genres()
|
||||
assert genres == ["Action", "Drama"]
|
||||
|
||||
|
||||
def test_titles_for_genre_page_parsing(monkeypatch):
|
||||
html = """
|
||||
<article class="liste"><h2><a href="//filmpalast.to/stream/test-film">Test Film</a></h2></article>
|
||||
<article class="liste"><h2><a href="//filmpalast.to/stream/test-show-s01e01">Test Show S01E01 Pilot</a></h2></article>
|
||||
<article class="liste"><h2><a href="//filmpalast.to/stream/test-show-s01e02">Test Show S01E02 Folge 2</a></h2></article>
|
||||
"""
|
||||
plugin = fp.FilmpalastPlugin()
|
||||
plugin._genre_to_url = {"Action": "https://filmpalast.to/search/genre/Action"}
|
||||
monkeypatch.setattr(fp, "_get_soup", lambda *args, **kwargs: _soup(html))
|
||||
|
||||
titles = plugin.titles_for_genre_page("Action", 1)
|
||||
|
||||
assert titles == ["Test Film", "Test Show"]
|
||||
assert plugin.seasons_for("Test Show") == ["Staffel 1"]
|
||||
|
||||
45
tests/test_filmpalast_series_grouping.py
Normal file
45
tests/test_filmpalast_series_grouping.py
Normal file
@@ -0,0 +1,45 @@
|
||||
import asyncio
|
||||
|
||||
from addon.plugins.filmpalast_plugin import FilmpalastPlugin, SearchHit
|
||||
|
||||
|
||||
def _fake_hits(_query: str) -> list[SearchHit]:
|
||||
return [
|
||||
SearchHit(title="Star Trek S01E01 Pilot", url="https://filmpalast.to/stream/star-trek-s01e01"),
|
||||
SearchHit(title="Star Trek S01E02 Zweiter Kontakt", url="https://filmpalast.to/stream/star-trek-s01e02"),
|
||||
SearchHit(title="Ein Hund namens Palma", url="https://filmpalast.to/stream/ein-hund-namens-palma"),
|
||||
]
|
||||
|
||||
|
||||
def test_search_groups_series_and_movies(monkeypatch):
|
||||
plugin = FilmpalastPlugin()
|
||||
monkeypatch.setattr(plugin, "_search_hits", _fake_hits)
|
||||
|
||||
titles = asyncio.run(plugin.search_titles("trek"))
|
||||
|
||||
assert titles == ["Ein Hund namens Palma", "Star Trek"]
|
||||
|
||||
|
||||
def test_series_seasons_and_episodes(monkeypatch):
|
||||
plugin = FilmpalastPlugin()
|
||||
monkeypatch.setattr(plugin, "_search_hits", _fake_hits)
|
||||
|
||||
asyncio.run(plugin.search_titles("trek"))
|
||||
|
||||
assert plugin.is_movie("Star Trek") is False
|
||||
assert plugin.seasons_for("Star Trek") == ["Staffel 1"]
|
||||
assert plugin.episodes_for("Star Trek", "Staffel 1") == [
|
||||
"Episode 1 - Pilot",
|
||||
"Episode 2 - Zweiter Kontakt",
|
||||
]
|
||||
|
||||
|
||||
def test_movie_path_stays_unchanged(monkeypatch):
|
||||
plugin = FilmpalastPlugin()
|
||||
monkeypatch.setattr(plugin, "_search_hits", _fake_hits)
|
||||
|
||||
asyncio.run(plugin.search_titles("hund"))
|
||||
|
||||
assert plugin.is_movie("Ein Hund namens Palma") is True
|
||||
assert plugin.seasons_for("Ein Hund namens Palma") == ["Film"]
|
||||
assert plugin.episodes_for("Ein Hund namens Palma", "Film") == ["Stream"]
|
||||
29
tests/test_serienstream_parser.py
Normal file
29
tests/test_serienstream_parser.py
Normal file
@@ -0,0 +1,29 @@
|
||||
import pytest
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from addon.plugins.serienstream_plugin import _extract_episodes
|
||||
|
||||
|
||||
def test_extract_episodes_skips_upcoming():
|
||||
html = """
|
||||
<table class='episode-table'>
|
||||
<tbody>
|
||||
<tr class='episode-row' onclick="window.location='https://s.to/serie/x/staffel-1/episode-1'">
|
||||
<th class='episode-number-cell'>1</th>
|
||||
<td><strong class='episode-title-ger'>Ep1</strong></td>
|
||||
<td class='episode-watch-cell'><img alt='VOE'></td>
|
||||
</tr>
|
||||
<tr class='episode-row upcoming' onclick="window.location='https://s.to/serie/x/staffel-1/episode-2'">
|
||||
<th class='episode-number-cell'>2</th>
|
||||
<td>
|
||||
<strong class='episode-title-ger'></strong>
|
||||
<span class='badge badge-upcoming'>DEMNÄCHST</span>
|
||||
</td>
|
||||
<td class='episode-watch-cell'>— TBA —</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
"""
|
||||
soup = BeautifulSoup(html, "html.parser")
|
||||
episodes = _extract_episodes(soup)
|
||||
assert [e.number for e in episodes] == [1]
|
||||
48
tests/test_serienstream_perf.py
Normal file
48
tests/test_serienstream_perf.py
Normal file
@@ -0,0 +1,48 @@
|
||||
import os
|
||||
import time
|
||||
import pytest
|
||||
|
||||
from addon.plugins.serienstream_plugin import SerienstreamPlugin
|
||||
|
||||
|
||||
@pytest.mark.live
|
||||
@pytest.mark.perf
|
||||
def test_live_titel_staffel_episode_timing():
|
||||
if not os.getenv("LIVE_TESTS"):
|
||||
pytest.skip("LIVE_TESTS not set")
|
||||
|
||||
title = os.getenv("LIVE_TITLE", "Star Trek: Starfleet Academy")
|
||||
season = os.getenv("LIVE_SEASON", "Staffel 1")
|
||||
|
||||
max_title_to_season = float(os.getenv("PERF_MAX_TITLE_TO_SEASON", "6.0"))
|
||||
max_season_to_episodes = float(os.getenv("PERF_MAX_SEASON_TO_EPISODES", "5.0"))
|
||||
|
||||
plugin = SerienstreamPlugin()
|
||||
|
||||
t0 = time.perf_counter()
|
||||
seasons = plugin.seasons_for(title)
|
||||
t1 = time.perf_counter()
|
||||
|
||||
assert seasons, f"Keine Staffeln für Titel gefunden: {title}"
|
||||
assert season in seasons, f"Gewünschte Staffel fehlt: {season}; vorhanden: {seasons}"
|
||||
|
||||
episodes = plugin.episodes_for(title, season)
|
||||
t2 = time.perf_counter()
|
||||
|
||||
assert episodes, f"Keine Episoden für {title} / {season}"
|
||||
|
||||
title_to_season = t1 - t0
|
||||
season_to_episodes = t2 - t1
|
||||
|
||||
print(
|
||||
f"PERF title->seasons={title_to_season:.3f}s "
|
||||
f"season->episodes={season_to_episodes:.3f}s "
|
||||
f"episodes={len(episodes)}"
|
||||
)
|
||||
|
||||
assert title_to_season <= max_title_to_season, (
|
||||
f"title->seasons zu langsam: {title_to_season:.3f}s > {max_title_to_season:.3f}s"
|
||||
)
|
||||
assert season_to_episodes <= max_season_to_episodes, (
|
||||
f"season->episodes zu langsam: {season_to_episodes:.3f}s > {max_season_to_episodes:.3f}s"
|
||||
)
|
||||
239
tests/test_serienstream_user_actions.py
Normal file
239
tests/test_serienstream_user_actions.py
Normal file
@@ -0,0 +1,239 @@
|
||||
import os
|
||||
import pytest
|
||||
|
||||
try:
|
||||
from bs4 import BeautifulSoup
|
||||
except Exception: # pragma: no cover - optional in local env
|
||||
BeautifulSoup = None
|
||||
|
||||
from addon.plugins import serienstream_plugin as sp
|
||||
|
||||
|
||||
pytestmark = pytest.mark.skipif(BeautifulSoup is None, reason="bs4 not available")
|
||||
|
||||
|
||||
def _soup(html: str):
|
||||
return BeautifulSoup(html, "html.parser")
|
||||
|
||||
|
||||
def test_search_series_api_first(monkeypatch):
|
||||
"""search_series() kombiniert API-Treffer mit Katalog-Cache (ohne Duplikate)."""
|
||||
monkeypatch.setattr(sp, "_get_base_url", lambda: "https://s.to")
|
||||
monkeypatch.setattr(sp, "_search_series_api", lambda q: [
|
||||
sp.SeriesResult(title="Star Trek", description="", url="https://s.to/serie/star-trek"),
|
||||
])
|
||||
# Katalog-Cache: eine bekannte + eine neue URL
|
||||
cache_items = [
|
||||
sp.SeriesResult(title="Star Trek", description="", url="https://s.to/serie/star-trek"), # Duplikat
|
||||
sp.SeriesResult(title="Star Trek: Academy", description="", url="https://s.to/serie/star-trek-academy"),
|
||||
]
|
||||
monkeypatch.setattr(sp, "_load_catalog_index_from_cache", lambda: cache_items)
|
||||
results = sp.search_series("trek")
|
||||
titles = [r.title for r in results]
|
||||
# API-Treffer zuerst, Duplikate (gleiche URL) werden entfernt
|
||||
assert titles[0] == "Star Trek"
|
||||
assert "Star Trek: Academy" in titles
|
||||
assert titles.count("Star Trek") == 1
|
||||
|
||||
|
||||
def test_search_series_falls_back_to_catalog_cache(monkeypatch):
|
||||
"""Wenn API und Server-Suche leer sind, wird der Katalog-Cache als Fallback genutzt."""
|
||||
monkeypatch.setattr(sp, "_get_base_url", lambda: "https://s.to")
|
||||
# API und Server-Suche liefern nichts
|
||||
monkeypatch.setattr(sp, "_search_series_api", lambda q: [])
|
||||
monkeypatch.setattr(sp, "_search_series_server", lambda q: [])
|
||||
# Katalog-Cache mit Testdaten fuellen
|
||||
cache_items = [
|
||||
sp.SeriesResult(title="Der Hund", description="", url="https://s.to/serie/der-hund"),
|
||||
sp.SeriesResult(title="Hundeleben", description="", url="https://s.to/serie/hundeleben"),
|
||||
]
|
||||
monkeypatch.setattr(sp, "_load_catalog_index_from_cache", lambda: cache_items)
|
||||
results = sp.search_series("hund")
|
||||
titles = [r.title for r in results]
|
||||
# Nur Ganzwort-Treffer (nicht Hundeleben)
|
||||
assert titles == ["Der Hund"]
|
||||
|
||||
|
||||
def test_extract_season_links():
|
||||
html = """
|
||||
<ul class='nav list-items-nav'>
|
||||
<a data-season-pill='1' href='/serie/x/staffel-1'>1</a>
|
||||
<a data-season-pill='2' href='/serie/x/staffel-2'>2</a>
|
||||
<a data-season-pill='1' href='/serie/x/staffel-1/episode-1'>skip</a>
|
||||
</ul>
|
||||
"""
|
||||
seasons = sp._extract_season_links(_soup(html))
|
||||
assert seasons == [(1, "https://s.to/serie/x/staffel-1"), (2, "https://s.to/serie/x/staffel-2")]
|
||||
|
||||
|
||||
def test_extract_episodes_skips_upcoming_and_tba():
|
||||
html = """
|
||||
<table class='episode-table'>
|
||||
<tbody>
|
||||
<tr class='episode-row' onclick="window.location='https://s.to/serie/x/staffel-1/episode-1'">
|
||||
<th class='episode-number-cell'>1</th>
|
||||
<td><strong class='episode-title-ger'>Ep1</strong></td>
|
||||
<td class='episode-watch-cell'><img alt='VOE'></td>
|
||||
</tr>
|
||||
<tr class='episode-row upcoming' onclick="window.location='https://s.to/serie/x/staffel-1/episode-2'">
|
||||
<th class='episode-number-cell'>2</th>
|
||||
<td>
|
||||
<strong class='episode-title-ger'></strong>
|
||||
<span class='badge badge-upcoming'>DEMNÄCHST</span>
|
||||
</td>
|
||||
<td class='episode-watch-cell'>— TBA —</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
"""
|
||||
episodes = sp._extract_episodes(_soup(html))
|
||||
assert [e.number for e in episodes] == [1]
|
||||
|
||||
|
||||
def test_fetch_episode_hoster_names(monkeypatch):
|
||||
html = """
|
||||
<button class='link-box' data-provider-name='VOE' data-play-url='/redirect/voe'></button>
|
||||
<button class='link-box' data-provider-name='Vidoza' data-play-url='/redirect/vidoza'></button>
|
||||
"""
|
||||
|
||||
def fake_get_soup(url, session=None):
|
||||
return _soup(html)
|
||||
|
||||
monkeypatch.setattr(sp, "_get_soup", fake_get_soup)
|
||||
monkeypatch.setattr(sp, "_get_base_url", lambda: "https://s.to")
|
||||
names = sp.fetch_episode_hoster_names("/serie/x/staffel-1/episode-1")
|
||||
assert names == ["VOE", "Vidoza"]
|
||||
|
||||
|
||||
def test_fetch_episode_stream_link_prefers_requested_hoster(monkeypatch):
|
||||
html = """
|
||||
<button class='link-box' data-provider-name='VOE' data-play-url='/redirect/voe'></button>
|
||||
<button class='link-box' data-provider-name='Vidoza' data-play-url='/redirect/vidoza'></button>
|
||||
"""
|
||||
|
||||
def fake_get_soup(url, session=None):
|
||||
return _soup(html)
|
||||
|
||||
monkeypatch.setattr(sp, "_get_soup", fake_get_soup)
|
||||
monkeypatch.setattr(sp, "_get_base_url", lambda: "https://s.to")
|
||||
link = sp.fetch_episode_stream_link("/serie/x/staffel-1/episode-1", preferred_hosters=["vidoza"])
|
||||
assert link == "https://s.to/redirect/vidoza"
|
||||
|
||||
|
||||
def test_extract_latest_episodes():
|
||||
html = """
|
||||
<a class='latest-episode-row' href='/serie/x/staffel-1/episode-2'>
|
||||
<span class='ep-title' title='Show X'>Show X</span>
|
||||
<span class='ep-season'>S 1</span>
|
||||
<span class='ep-episode'>E 2</span>
|
||||
<span class='ep-time'>Heute</span>
|
||||
</a>
|
||||
"""
|
||||
episodes = sp._extract_latest_episodes(_soup(html))
|
||||
assert len(episodes) == 1
|
||||
assert episodes[0].series_title == "Show X"
|
||||
assert episodes[0].season == 1
|
||||
assert episodes[0].episode == 2
|
||||
|
||||
|
||||
def test_episode_url_for_uses_episode_cache(monkeypatch):
|
||||
plugin = sp.SerienstreamPlugin()
|
||||
info = sp.EpisodeInfo(
|
||||
number=2,
|
||||
title="Folge 2",
|
||||
original_title="",
|
||||
url="https://s.to/serie/x/staffel-1/episode-2",
|
||||
)
|
||||
plugin._episode_label_cache[("Show X", "Staffel 1")] = {"Episode 2: Folge 2": info}
|
||||
|
||||
called = {"lookup": False}
|
||||
|
||||
def _fail_lookup(*_args, **_kwargs):
|
||||
called["lookup"] = True
|
||||
return None
|
||||
|
||||
monkeypatch.setattr(plugin, "_lookup_episode", _fail_lookup)
|
||||
|
||||
url = plugin.episode_url_for("Show X", "Staffel 1", "Episode 2: Folge 2")
|
||||
assert url == "https://s.to/serie/x/staffel-1/episode-2"
|
||||
assert called["lookup"] is False
|
||||
|
||||
|
||||
def test_parse_series_catalog_groups_and_entries():
|
||||
html = """
|
||||
<div class='background-1'><h3>Genre A</h3></div>
|
||||
<ul class='series-list'>
|
||||
<li class='series-item' data-search='desc a'>
|
||||
<a href='/serie/a'>A</a>
|
||||
</li>
|
||||
</ul>
|
||||
<div class='background-1'><h3>Genre B</h3></div>
|
||||
<ul class='series-list'>
|
||||
<li class='series-item' data-search='desc b'>
|
||||
<a href='/serie/b'>B</a>
|
||||
</li>
|
||||
</ul>
|
||||
"""
|
||||
catalog = sp.parse_series_catalog(_soup(html))
|
||||
assert list(catalog.keys()) == ["Genre A", "Genre B"]
|
||||
assert [e.title for e in catalog["Genre A"]] == ["A"]
|
||||
assert [e.title for e in catalog["Genre B"]] == ["B"]
|
||||
|
||||
|
||||
def test_titles_for_genre_from_catalog(monkeypatch):
|
||||
html = """
|
||||
<div class='background-1'><h3>Drama</h3></div>
|
||||
<ul class='series-list'>
|
||||
<li class='series-item' data-search='desc'>
|
||||
<a href='/serie/drama-1'>Drama 1</a>
|
||||
</li>
|
||||
</ul>
|
||||
"""
|
||||
monkeypatch.setattr(sp, "_get_soup_simple", lambda url: _soup(html))
|
||||
monkeypatch.setattr(sp, "_get_base_url", lambda: "https://s.to")
|
||||
plugin = sp.SerienstreamPlugin()
|
||||
titles = plugin.titles_for_genre("Drama")
|
||||
assert titles == ["Drama 1"]
|
||||
|
||||
|
||||
def test_popular_series_parsing(monkeypatch):
|
||||
html = """
|
||||
<div class='mb-5'>
|
||||
<h2>Meistgesehen</h2>
|
||||
<a class='show-card' href='/serie/popular-1'>
|
||||
<img alt='Popular 1' src='x.jpg'>
|
||||
</a>
|
||||
<a class='show-card' href='/serie/popular-2'>
|
||||
<img alt='Popular 2' src='y.jpg'>
|
||||
</a>
|
||||
</div>
|
||||
"""
|
||||
monkeypatch.setattr(sp, "_get_soup_simple", lambda url: _soup(html))
|
||||
monkeypatch.setattr(sp, "_get_base_url", lambda: "https://s.to")
|
||||
plugin = sp.SerienstreamPlugin()
|
||||
titles = plugin.popular_series()
|
||||
assert titles == ["Popular 1", "Popular 2"]
|
||||
|
||||
|
||||
@pytest.mark.live
|
||||
def test_live_staffel_page_skips_upcoming():
|
||||
if not os.getenv("LIVE_TESTS"):
|
||||
pytest.skip("LIVE_TESTS not set")
|
||||
url = "https://s.to/serie/star-trek-starfleet-academy/staffel-1"
|
||||
soup = sp._get_soup_simple(url)
|
||||
rows = soup.select("table.episode-table tbody tr.episode-row")
|
||||
upcoming_rows = [row for row in rows if "upcoming" in (row.get("class") or [])]
|
||||
episodes = sp._extract_episodes(soup)
|
||||
assert len(episodes) == len(rows) - len(upcoming_rows)
|
||||
|
||||
|
||||
@pytest.mark.live
|
||||
def test_live_genres_and_titles():
|
||||
if not os.getenv("LIVE_TESTS"):
|
||||
pytest.skip("LIVE_TESTS not set")
|
||||
plugin = sp.SerienstreamPlugin()
|
||||
genres = plugin.genres()
|
||||
assert isinstance(genres, list) and genres
|
||||
sample = genres[0]
|
||||
titles = plugin.titles_for_genre(sample)
|
||||
assert isinstance(titles, list)
|
||||
28
tests/test_smoke.py
Normal file
28
tests/test_smoke.py
Normal file
@@ -0,0 +1,28 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[1]
|
||||
ADDON_DIR = ROOT / "addon"
|
||||
|
||||
if str(ADDON_DIR) not in sys.path:
|
||||
sys.path.insert(0, str(ADDON_DIR))
|
||||
|
||||
|
||||
def test_import_core_modules() -> None:
|
||||
"""Ein einfacher Smoke-Test, der sicherstellt, dass Kernmodule importierbar sind.
|
||||
|
||||
Wichtig: Die Module sind so geschrieben, dass sie auch ohne Kodi-Umgebung
|
||||
(ohne xbmc/xbmcgui) importiert werden koennen.
|
||||
"""
|
||||
|
||||
import plugin_interface # noqa: F401
|
||||
import plugin_helpers # noqa: F401
|
||||
import http_session_pool # noqa: F401
|
||||
import tmdb # noqa: F401
|
||||
import metadata_utils # noqa: F401
|
||||
import resolveurl_backend # noqa: F401
|
||||
|
||||
|
||||
14
tests/test_title_word_matching.py
Normal file
14
tests/test_title_word_matching.py
Normal file
@@ -0,0 +1,14 @@
|
||||
from addon.plugins import aniworld_plugin as ap
|
||||
from addon.plugins import topstreamfilm_plugin as tp
|
||||
|
||||
|
||||
def test_aniworld_matches_whole_words_only():
|
||||
assert ap._matches_query("hund", title="Der Hund")
|
||||
assert not ap._matches_query("hund", title="Thunderstruck")
|
||||
assert not ap._matches_query("hund", title="Hundeleben")
|
||||
|
||||
|
||||
def test_topstream_matches_whole_words_only():
|
||||
assert tp._matches_query("hund", title="Der Hund", description="")
|
||||
assert not tp._matches_query("hund", title="Thunderstruck", description="")
|
||||
assert not tp._matches_query("hund", title="Hundeleben", description="")
|
||||
Reference in New Issue
Block a user