Nightly: snapshot harness and cache ignore
This commit is contained in:
3
.gitignore
vendored
3
.gitignore
vendored
@@ -17,3 +17,6 @@
|
|||||||
__pycache__/
|
__pycache__/
|
||||||
*.pyc
|
*.pyc
|
||||||
.coverage
|
.coverage
|
||||||
|
|
||||||
|
# Plugin runtime caches
|
||||||
|
/addon/plugins/*_cache.json
|
||||||
|
|||||||
@@ -57,7 +57,7 @@ else: # pragma: no cover
|
|||||||
|
|
||||||
ADDON_ID = "plugin.video.viewit"
|
ADDON_ID = "plugin.video.viewit"
|
||||||
SETTING_BASE_URL = "topstream_base_url"
|
SETTING_BASE_URL = "topstream_base_url"
|
||||||
DEFAULT_BASE_URL = "https://www.meineseite"
|
DEFAULT_BASE_URL = "https://topstreamfilm.live"
|
||||||
GLOBAL_SETTING_LOG_URLS = "debug_log_urls"
|
GLOBAL_SETTING_LOG_URLS = "debug_log_urls"
|
||||||
GLOBAL_SETTING_DUMP_HTML = "debug_dump_html"
|
GLOBAL_SETTING_DUMP_HTML = "debug_dump_html"
|
||||||
GLOBAL_SETTING_SHOW_URL_INFO = "debug_show_url_info"
|
GLOBAL_SETTING_SHOW_URL_INFO = "debug_show_url_info"
|
||||||
|
|||||||
@@ -102,6 +102,7 @@ Plugins sollten die Helper aus `addon/plugin_helpers.py` nutzen:
|
|||||||
- ZIP bauen: `./scripts/build_kodi_zip.sh`
|
- ZIP bauen: `./scripts/build_kodi_zip.sh`
|
||||||
- Addon‑Ordner: `./scripts/build_install_addon.sh`
|
- Addon‑Ordner: `./scripts/build_install_addon.sh`
|
||||||
- Plugin‑Manifest aktualisieren: `python3 scripts/generate_plugin_manifest.py`
|
- Plugin‑Manifest aktualisieren: `python3 scripts/generate_plugin_manifest.py`
|
||||||
|
- Live-Snapshot-Checks: `python3 qa/run_plugin_snapshots.py` (aktualisieren mit `--update`)
|
||||||
|
|
||||||
## Beispiel‑Checkliste
|
## Beispiel‑Checkliste
|
||||||
- [ ] `name` korrekt gesetzt
|
- [ ] `name` korrekt gesetzt
|
||||||
|
|||||||
52
qa/plugin_snapshots.json
Normal file
52
qa/plugin_snapshots.json
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
{
|
||||||
|
"snapshots": {
|
||||||
|
"Serienstream::search_titles::trek": [
|
||||||
|
"Star Trek: Lower Decks",
|
||||||
|
"Star Trek: Prodigy",
|
||||||
|
"Star Trek: The Animated Series",
|
||||||
|
"Inside Star Trek",
|
||||||
|
"Raumschiff Enterprise - Star Trek: The Original Series",
|
||||||
|
"Star Trek: Deep Space Nine",
|
||||||
|
"Star Trek: Discovery",
|
||||||
|
"Star Trek: Enterprise",
|
||||||
|
"Star Trek: Picard",
|
||||||
|
"Star Trek: Raumschiff Voyager",
|
||||||
|
"Star Trek: Short Treks",
|
||||||
|
"Star Trek: Starfleet Academy",
|
||||||
|
"Star Trek: Strange New Worlds",
|
||||||
|
"Star Trek: The Next Generation"
|
||||||
|
],
|
||||||
|
"Aniworld::search_titles::naruto": [
|
||||||
|
"Naruto",
|
||||||
|
"Naruto Shippuden",
|
||||||
|
"Boruto: Naruto Next Generations",
|
||||||
|
"Naruto Spin-Off: Rock Lee & His Ninja Pals"
|
||||||
|
],
|
||||||
|
"Topstreamfilm::search_titles::matrix": [
|
||||||
|
"Darkdrive – Verschollen in der Matrix",
|
||||||
|
"Matrix Reloaded",
|
||||||
|
"Armitage III: Poly Matrix",
|
||||||
|
"Matrix Resurrections",
|
||||||
|
"Matrix",
|
||||||
|
"Matrix Revolutions",
|
||||||
|
"Matrix Fighters"
|
||||||
|
],
|
||||||
|
"Einschalten::search_titles::tagesschau": [],
|
||||||
|
"Filmpalast::search_titles::trek": [
|
||||||
|
"Star Trek",
|
||||||
|
"Star Trek - Der Film",
|
||||||
|
"Star Trek 2 - Der Zorn des Khan",
|
||||||
|
"Star Trek 9 Der Aufstand",
|
||||||
|
"Star Trek: Nemesis",
|
||||||
|
"Star Trek: Section 31",
|
||||||
|
"Star Trek: Starfleet Academy",
|
||||||
|
"Star Trek: Strange New Worlds"
|
||||||
|
],
|
||||||
|
"Doku-Streams::search_titles::japan": [
|
||||||
|
"Deutsche im Knast - Japan und die Disziplin",
|
||||||
|
"Die Meerfrauen von Japan",
|
||||||
|
"Japan - Land der Moderne und Tradition",
|
||||||
|
"Japan im Zweiten Weltkrieg - Der Fall des Kaiserreichs"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
153
qa/run_plugin_snapshots.py
Executable file
153
qa/run_plugin_snapshots.py
Executable file
@@ -0,0 +1,153 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""Run live snapshot checks for plugins.
|
||||||
|
|
||||||
|
Use --update to refresh stored snapshots.
|
||||||
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import asyncio
|
||||||
|
import importlib.util
|
||||||
|
import inspect
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
ROOT_DIR = Path(__file__).resolve().parents[1]
|
||||||
|
PLUGIN_DIR = ROOT_DIR / "addon" / "plugins"
|
||||||
|
SNAPSHOT_PATH = ROOT_DIR / "qa" / "plugin_snapshots.json"
|
||||||
|
|
||||||
|
sys.path.insert(0, str(ROOT_DIR / "addon"))
|
||||||
|
|
||||||
|
try:
|
||||||
|
from plugin_interface import BasisPlugin # type: ignore
|
||||||
|
except Exception as exc: # pragma: no cover
|
||||||
|
raise SystemExit(f"Failed to import BasisPlugin: {exc}")
|
||||||
|
|
||||||
|
CONFIG = [
|
||||||
|
{"plugin": "Serienstream", "method": "search_titles", "args": ["trek"], "max_items": 20},
|
||||||
|
{"plugin": "Aniworld", "method": "search_titles", "args": ["naruto"], "max_items": 20},
|
||||||
|
{"plugin": "Topstreamfilm", "method": "search_titles", "args": ["matrix"], "max_items": 20},
|
||||||
|
{"plugin": "Einschalten", "method": "search_titles", "args": ["tagesschau"], "max_items": 20},
|
||||||
|
{"plugin": "Filmpalast", "method": "search_titles", "args": ["trek"], "max_items": 20},
|
||||||
|
{"plugin": "Doku-Streams", "method": "search_titles", "args": ["japan"], "max_items": 20},
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def _import_module(path: Path):
|
||||||
|
spec = importlib.util.spec_from_file_location(path.stem, path)
|
||||||
|
if spec is None or spec.loader is None:
|
||||||
|
raise ImportError(f"Missing spec for {path}")
|
||||||
|
module = importlib.util.module_from_spec(spec)
|
||||||
|
sys.modules[spec.name] = module
|
||||||
|
spec.loader.exec_module(module)
|
||||||
|
return module
|
||||||
|
|
||||||
|
|
||||||
|
def _discover_plugins() -> dict[str, BasisPlugin]:
|
||||||
|
plugins: dict[str, BasisPlugin] = {}
|
||||||
|
for file_path in sorted(PLUGIN_DIR.glob("*.py")):
|
||||||
|
if file_path.name.startswith("_"):
|
||||||
|
continue
|
||||||
|
module = _import_module(file_path)
|
||||||
|
preferred = getattr(module, "Plugin", None)
|
||||||
|
if inspect.isclass(preferred) and issubclass(preferred, BasisPlugin) and preferred is not BasisPlugin:
|
||||||
|
classes = [preferred]
|
||||||
|
else:
|
||||||
|
classes = [
|
||||||
|
obj
|
||||||
|
for obj in module.__dict__.values()
|
||||||
|
if inspect.isclass(obj) and issubclass(obj, BasisPlugin) and obj is not BasisPlugin
|
||||||
|
]
|
||||||
|
classes.sort(key=lambda cls: cls.__name__.casefold())
|
||||||
|
for cls in classes:
|
||||||
|
instance = cls()
|
||||||
|
name = str(getattr(instance, "name", "") or "").strip()
|
||||||
|
if name and name not in plugins:
|
||||||
|
plugins[name] = instance
|
||||||
|
return plugins
|
||||||
|
|
||||||
|
|
||||||
|
def _normalize_titles(value: Any, max_items: int) -> list[str]:
|
||||||
|
if not value:
|
||||||
|
return []
|
||||||
|
titles = [str(item).strip() for item in list(value) if item and str(item).strip()]
|
||||||
|
seen = set()
|
||||||
|
normalized: list[str] = []
|
||||||
|
for title in titles:
|
||||||
|
key = title.casefold()
|
||||||
|
if key in seen:
|
||||||
|
continue
|
||||||
|
seen.add(key)
|
||||||
|
normalized.append(title)
|
||||||
|
if len(normalized) >= max_items:
|
||||||
|
break
|
||||||
|
return normalized
|
||||||
|
|
||||||
|
|
||||||
|
def _snapshot_key(entry: dict[str, Any]) -> str:
|
||||||
|
args = entry.get("args", [])
|
||||||
|
return f"{entry['plugin']}::{entry['method']}::{','.join(str(a) for a in args)}"
|
||||||
|
|
||||||
|
|
||||||
|
def _call_method(plugin: BasisPlugin, method_name: str, args: list[Any]):
|
||||||
|
method = getattr(plugin, method_name, None)
|
||||||
|
if not callable(method):
|
||||||
|
raise RuntimeError(f"Method missing: {method_name}")
|
||||||
|
result = method(*args)
|
||||||
|
if asyncio.iscoroutine(result):
|
||||||
|
return asyncio.run(result)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> int:
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument("--update", action="store_true")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
snapshots: dict[str, Any] = {}
|
||||||
|
if SNAPSHOT_PATH.exists():
|
||||||
|
snapshots = json.loads(SNAPSHOT_PATH.read_text(encoding="utf-8"))
|
||||||
|
data = snapshots.get("snapshots", {}) if isinstance(snapshots, dict) else {}
|
||||||
|
if args.update:
|
||||||
|
data = {}
|
||||||
|
|
||||||
|
plugins = _discover_plugins()
|
||||||
|
errors = []
|
||||||
|
|
||||||
|
for entry in CONFIG:
|
||||||
|
plugin_name = entry["plugin"]
|
||||||
|
plugin = plugins.get(plugin_name)
|
||||||
|
if plugin is None:
|
||||||
|
errors.append(f"Plugin missing: {plugin_name}")
|
||||||
|
continue
|
||||||
|
key = _snapshot_key(entry)
|
||||||
|
try:
|
||||||
|
result = _call_method(plugin, entry["method"], entry.get("args", []))
|
||||||
|
normalized = _normalize_titles(result, entry.get("max_items", 20))
|
||||||
|
except Exception as exc:
|
||||||
|
errors.append(f"Snapshot error: {key} ({exc})")
|
||||||
|
if args.update:
|
||||||
|
data[key] = {"error": str(exc)}
|
||||||
|
continue
|
||||||
|
if args.update:
|
||||||
|
data[key] = normalized
|
||||||
|
else:
|
||||||
|
expected = data.get(key)
|
||||||
|
if expected != normalized:
|
||||||
|
errors.append(f"Snapshot mismatch: {key}\nExpected: {expected}\nActual: {normalized}")
|
||||||
|
|
||||||
|
if args.update:
|
||||||
|
SNAPSHOT_PATH.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
SNAPSHOT_PATH.write_text(json.dumps({"snapshots": data}, indent=2, ensure_ascii=False) + "\n", encoding="utf-8")
|
||||||
|
|
||||||
|
if errors:
|
||||||
|
for err in errors:
|
||||||
|
print(err)
|
||||||
|
return 1
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
raise SystemExit(main())
|
||||||
Reference in New Issue
Block a user