From 22b9ae9c31ca04cadfbcd945da0ac906da7d4c2e Mon Sep 17 00:00:00 2001 From: "itdrui.de" Date: Mon, 23 Feb 2026 20:38:36 +0100 Subject: [PATCH] main: consolidate integrated changes after v0.1.54 --- .gitignore | 3 + README.md | 65 +- .../http_session_pool.cpython-312.pyc | Bin 1656 -> 0 bytes .../plugin_helpers.cpython-312.pyc | Bin 10931 -> 0 bytes .../regex_patterns.cpython-312.pyc | Bin 494 -> 0 bytes .../resolveurl_backend.cpython-312.pyc | Bin 1525 -> 0 bytes addon/__pycache__/tmdb.cpython-312.pyc | Bin 25016 -> 0 bytes addon/addon.xml | 8 +- addon/default.py | 1625 ++++++++++------- addon/http_session_pool.py | 9 + addon/metadata_utils.py | 93 + addon/plugin_helpers.py | 39 + addon/plugin_interface.py | 13 +- .../__pycache__/__init__.cpython-312.pyc | Bin 168 -> 0 bytes .../_template_plugin.cpython-312.pyc | Bin 5372 -> 0 bytes .../einschalten_plugin.cpython-312.pyc | Bin 48789 -> 0 bytes addon/plugins/_template_plugin.py | 14 +- addon/plugins/aniworld_plugin.py | 311 +++- addon/plugins/dokustreams_plugin.py | 39 +- addon/plugins/einschalten_plugin.py | 144 +- addon/plugins/filmpalast_plugin.py | 181 +- addon/plugins/serienstream_plugin.py | 480 ++++- addon/plugins/topstreamfilm_plugin.py | 178 +- addon/resources/settings.xml | 121 +- addon/tmdb.py | 167 +- docs/DEFAULT_ROUTER.md | 87 +- docs/PLUGIN_DEVELOPMENT.md | 156 +- docs/PLUGIN_MANIFEST.json | 104 ++ docs/PLUGIN_SYSTEM.md | 135 +- docs/RELEASE.md | 44 + qa/plugin_snapshots.json | 73 + qa/run_plugin_snapshots.py | 153 ++ .../test_einschalten_api.cpython-312.pyc | Bin 5702 -> 0 bytes scripts/__pycache__/test_tmdb.cpython-312.pyc | Bin 2665 -> 0 bytes scripts/build_kodi_zip.sh | 2 +- scripts/build_local_kodi_repo.sh | 22 +- scripts/generate_plugin_manifest.py | 106 ++ scripts/zip_deterministic.py | 73 + 38 files changed, 3139 insertions(+), 1306 deletions(-) delete mode 100644 addon/__pycache__/http_session_pool.cpython-312.pyc delete mode 100644 addon/__pycache__/plugin_helpers.cpython-312.pyc delete mode 100644 addon/__pycache__/regex_patterns.cpython-312.pyc delete mode 100644 addon/__pycache__/resolveurl_backend.cpython-312.pyc delete mode 100644 addon/__pycache__/tmdb.cpython-312.pyc create mode 100644 addon/metadata_utils.py delete mode 100644 addon/plugins/__pycache__/__init__.cpython-312.pyc delete mode 100644 addon/plugins/__pycache__/_template_plugin.cpython-312.pyc delete mode 100644 addon/plugins/__pycache__/einschalten_plugin.cpython-312.pyc create mode 100644 docs/PLUGIN_MANIFEST.json create mode 100644 docs/RELEASE.md create mode 100644 qa/plugin_snapshots.json create mode 100755 qa/run_plugin_snapshots.py delete mode 100644 scripts/__pycache__/test_einschalten_api.cpython-312.pyc delete mode 100644 scripts/__pycache__/test_tmdb.cpython-312.pyc create mode 100755 scripts/generate_plugin_manifest.py create mode 100755 scripts/zip_deterministic.py diff --git a/.gitignore b/.gitignore index 6920558..d210053 100644 --- a/.gitignore +++ b/.gitignore @@ -17,3 +17,6 @@ __pycache__/ *.pyc .coverage + +# Plugin runtime caches +/addon/plugins/*_cache.json diff --git a/README.md b/README.md index 79e8abd..55e9d16 100644 --- a/README.md +++ b/README.md @@ -2,40 +2,51 @@ ViewIT Logo -ViewIT ist ein Kodi‑Addon zum Durchsuchen und Abspielen von Inhalten der unterstützten Anbieter. +ViewIT ist ein Kodi Addon. +Es durchsucht Provider und startet Streams. ## Projektstruktur -- `addon/` Kodi‑Addon Quellcode -- `scripts/` Build‑Scripts (arbeiten mit `addon/` + `dist/`) -- `dist/` Build‑Ausgaben (ZIPs) -- `docs/`, `tests/` +- `addon/` Kodi Addon Quellcode +- `scripts/` Build Scripts +- `dist/` Build Ausgaben +- `docs/` Doku +- `tests/` Tests -## Build & Release -- Addon‑Ordner bauen: `./scripts/build_install_addon.sh` → `dist//` -- Kodi‑ZIP bauen: `./scripts/build_kodi_zip.sh` → `dist/-.zip` -- Addon‑Version in `addon/addon.xml` +## Build und Release +- Addon Ordner bauen: `./scripts/build_install_addon.sh` +- Kodi ZIP bauen: `./scripts/build_kodi_zip.sh` +- Version pflegen: `addon/addon.xml` +- Reproduzierbares ZIP: `SOURCE_DATE_EPOCH` optional setzen -## Lokales Kodi-Repository -- Repository bauen (inkl. ZIPs + `addons.xml` + `addons.xml.md5`): `./scripts/build_local_kodi_repo.sh` -- Lokal bereitstellen: `./scripts/serve_local_kodi_repo.sh` -- Standard-URL: `http://127.0.0.1:8080/repo/addons.xml` -- Optional eigene URL beim Build setzen: `REPO_BASE_URL=http://:/repo ./scripts/build_local_kodi_repo.sh` +## Lokales Kodi Repository +- Repository bauen: `./scripts/build_local_kodi_repo.sh` +- Repository starten: `./scripts/serve_local_kodi_repo.sh` +- Standard URL: `http://127.0.0.1:8080/repo/addons.xml` +- Eigene URL beim Build: `REPO_BASE_URL=http://:/repo ./scripts/build_local_kodi_repo.sh` -## Gitea Release-Asset Upload -- ZIP bauen: `./scripts/build_kodi_zip.sh` -- Token setzen: `export GITEA_TOKEN=` -- Asset an Tag hochladen (erstellt Release bei Bedarf): `./scripts/publish_gitea_release.sh` -- Optional: `--tag v0.1.50 --asset dist/plugin.video.viewit-0.1.50.zip` - -## Entwicklung (kurz) -- Hauptlogik: `addon/default.py` +## Entwicklung +- Router: `addon/default.py` - Plugins: `addon/plugins/*_plugin.py` -- Einstellungen: `addon/resources/settings.xml` +- Settings: `addon/resources/settings.xml` -## Tests mit Abdeckung -- Dev-Abhängigkeiten installieren: `./.venv/bin/pip install -r requirements-dev.txt` -- Tests + Coverage starten: `./.venv/bin/pytest` -- Optional (XML-Report): `./.venv/bin/pytest --cov-report=xml` +## TMDB API Key einrichten +- TMDB Account anlegen und API Key (v3) erstellen: `https://www.themoviedb.org/settings/api` +- In Kodi das ViewIT Addon oeffnen: `Einstellungen -> TMDB` +- `TMDB aktivieren` einschalten +- `TMDB API Key` eintragen +- Optional `TMDB Sprache` setzen (z. B. `de-DE`) +- Optional die Anzeige-Optionen aktivieren/deaktivieren: + - `TMDB Beschreibung anzeigen` + - `TMDB Poster und Vorschaubild anzeigen` + - `TMDB Fanart/Backdrop anzeigen` + - `TMDB Bewertung anzeigen` + - `TMDB Stimmen anzeigen` + - `TMDB Besetzung anzeigen` + +## Tests +- Dev Pakete installieren: `./.venv/bin/pip install -r requirements-dev.txt` +- Tests starten: `./.venv/bin/pytest` +- XML Report: `./.venv/bin/pytest --cov-report=xml` ## Dokumentation Siehe `docs/`. diff --git a/addon/__pycache__/http_session_pool.cpython-312.pyc b/addon/__pycache__/http_session_pool.cpython-312.pyc deleted file mode 100644 index 83e6a44b105bbe62dd99b8aba3f4bc39f4bb6975..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1656 zcmZWpO>7%Q6n^7fd%YXSX?{@pqn2$FRW(8m>ZPJYmB9T&l@f@H)Hp=jWp@&?#Gtfr4tssb(ieFy5AJ<#S@IcJgYX(6Jy70!Q* zO~5?KXp2|#MUzomXY^u~20X~-X%INRr&oO6b-WpU+Gl#jtx!FpWkwWQhZy}UVI2I3+V-K<;A1plvkOciDgKb+ z9Hz#r9(T(0Bx61c<47&1!W2AYD9unFD%&l=k^)LpE4ImL7(sot02_BN!kjw>wO1cy$!hH2ZrXE0jv zhlBR&4m@1sykcypHbk3dD+^&Co)iPxO&qPVy||^g!Qguo4fsvzdZhGkr1w7B_tt89 zPm5w0$euzEy4iW9_wDdm6cobpLYsdik^5YFclcURTnM%Nsp=*Lnt4 zdInZ|hOQ~=d-Nss#^Ckz6D8e@Nox0cs&{$p`q)}(U?nwhfBuL1xApHXuMHON@w*rA zAACHXe|&!8tK4n!<*9FScgdYoUyQB}7Jg1mHWQHQZH!(!+8F(3Q-PsL@@%7P_fsGu z<*xOPp2lc1j>cB-2L8g22M^}}9;n(o$Fv81F(Lc=h0G=eek_6VA*i`d>0yeX%*gB& z3}d1OtvgxV9X%wI_VZ^1uW)KAlL{5XuzkxgLd`Iyt4Kj=7)-*@Fu@lpcvVci(@fkW z6CKY)8#2M>He0$3eA!vAe!PG2+pEtN`#Q7vP9&+ zG$4b&!=V*8)I_`Q7rD2Yg#E8J)N99A;Q&_aEEywzLz?tHYj#4i8!JWn*1LMIoDoU- UwgPdcPvygKTR9@#BS)lv0h83F761SM diff --git a/addon/__pycache__/plugin_helpers.cpython-312.pyc b/addon/__pycache__/plugin_helpers.cpython-312.pyc deleted file mode 100644 index 449c2dd6bb0fa79cfe7c5bfc65e5fd260d3fed03..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 10931 zcmdT~U2GfImA=E7;XgT~Nd4HB9omvC(X!>laV)Erl}eT^D{uzD!4$)Z5~vkwWjK&ViL>4^#y*zQAK7EJ86 zS+&6KId?ciN@kLFfj;cWymRKxxqma~eCIoN{O=x*lfW_i+NIdPZYJbE@WME(y1?9@ zVF3;sb&We>i&zhBYA3k{BDorg?0_fA!SmwaS#f8a zr^)3XjgtxXG#O{Y;fw!u@_ba5#L#&uJ}Svd=v-0`Jr|S4`_<5Bd~7I|P}-c%Bgw%r zd3hG9l?MH4V5eLiq$7o)1A z#zv$8yfCJ&k+&GlCQ9d`V{vuRQZ!|-U&6zWC4{+OPpw%k-Xdh$lCqoxds0!09>YdE zqXNz;@^Pg*NPEs3@5e&jM z%`tIyWRO~u#&@GpX*N)es;aE9U`-m2CN-)#OYJqs;fXvJbyR|}770eo+aR`sqJ-g2*N2J|@V8~z=CzE6HptPGB{Hfiv&2HW7BSnX38=cgu zpqGd-f^Oz=E%dkre#-Zum?kSWU)ENa7iwm_1&fWZUGZ(WDqImV_M9&~!>{=3($(3} zp2gmrzhlOlcloof%`1VLD=(&pbG6}*YIiT3$ORs~8`!rL*q00J&${=o2vze>+zm7= z1sXC>i})*7*4yeA2mWd#E+ndKC(nr55gjdsHxSnImO zF#2I(-3_=j?unKc!9>A$eTL2#8?U!yd_90{T^uygn8!2=SSvdzDH^BBV^Y{5-hQL{j;k*(1n=4ER)j|~_Uw)W zH=bPQ0A(8QfB>t9Qa`4dNun0H0y2seo8qoBWzS+Q z3&FnD9R?f@o5o#INWqgVjWvt_weA4!QS6f;lL)4i`=b&VH?a8AP+P_V-m=K}Jm>;r z?*Leat(pzv|Ik>BJ~GYP8;ioT)ZFD{QS%QRKh=Mv|4{d-{^J9Y{(&RMskw)F{iWH? zOHqvE8apP(wW{JfD0_!=z*79z-?(ThNY40=dt9R_D3{w~G zK??}U!m5IJpWfT;AvZma?hWkCItEJfEyK-{jUQPDhTpZS=mYe?H@=2hR(Pjn%#tZt z$1qyax+BF@3b~@mFb&|EvXtKJu*vpHomC37;B}U_i}MDCYg*~B8E{MOQ>-zPQeCEr z!ExPD(gdE#Qb5I+Nso#)r`bm36}Zgu`haXJVY3M$A5X;Jto(@+?2e625iJTC+a7CRqY$0MsY+^uR}s%l=~@3wqx zspV_$bmUqN=c?)1>IuzkfHm>ZZKNVhM$ z8z@aR>@;`}h5|bct}6J<=X$uFAh{KE^n}=3k1$Z0Q5)`-;AjUm)N*r7(S-gwYe3Kw zWD1g@Ve{ot_N)u`X z?OuKcS|#D9T!vzr6f8XNS@HPi8`ErNXxYL+x%YJBHK=W2ETuCbhGSD3xlP%y~j#!Ssgu9>@7b7 zrHPw}FD5sz!qrvE2QF-cR#_TJz9?xnSsIN;2c@u8_dUl*bRu$g5;l@FDx!ZW8XrrD(uBq* zcNGr zu%A%<=x4;{*fn!FU%P4kT)I0grN5aCHkDmxqOZo%&u4?-nZxCSNq2o2CjA%1Q-AqlfyOVoORCcP4}kvWvbGf?%10? ze^5)@)wnxhI{oh-)UB^tRd&Jy_(3;ws0p?!y4gc}$*t;c0ZzA@9ETobZ$HXlx!2K4 z*xOGsP_A)*#CS@Me@>%O3bvruIX)TzJt{eVi7fS!If!#ZWYF>gI0U#LTM-mYawIz0Zh_nIPK+~ zMtrVxx8%}cUU%A6zU{m^5;tEmJ})E?uuH<+|2hce8BnEHTJg1+XnYbZhFAp+M(1m*_Re^8C zw{ND&PtN{y=*^+K+xIPP-v_?By>pt#t#AhdKYKx%3_IxrfF=T$1QrOpmugg2mGKzx zOkf5gm2{lyf@T^U2ZF#UNm-Ro!L`mht(pVcN`q=rp46G`Z&rrX88Tc+E@kNZ^GBRUu0tD*W3vE15kLHPg=$#xNmJ$_?0PZWdG` z9F-6)6YbX>M&2{!U31SaI?L_} zDtu#eDhNv6L^jt<%KynoJ49L6Sre;$l$Ol9P^9h77WmFo} zm@})4gMMJq80RO*1nFkiCy1gk0tOJa$j@OdLPgj^w_aql3(Y#F4knW0nj@wp;nUej zRHf-@2`ps_Mm9hl3NZEvHPT3gZYs)1X=uI?_UZZXUhLFuP#$a|KaM(>kI17~paaO? z!9s>YbI?Vo(UDQjZMexO03&4Yv4kXdqasa%th&$8B`6rBD6o|kdcIB5aNOc~8C1~D zm0v;uTRcER9$CA!v*N46&ow#U*1NveC12~}>8!6c=X>%-!;sbBPb*!Mx-hsQrSzWi-5cTCC-DY++AXz!^{e=FO( z|Hjs3Pw(`R6(8Lz?0TPB_B}P-zrqFQPi3B45Z`=Z@zhdN$Bol>xZ^8a^}Lwwy~AzQ z!;Gc$cum+qOF(y|bg2b|^*gC4picM&5_S$wWB3@}k6ziWvQ2qJ zd&&ZF$OF&7kz$J<$!!p;_$e=}xnA9D^hZ(KdEI3o?v$WDV%~E>8oL&a(H7|eamZ^t zgK{*!=xaLNG}0tSnvOO-({wV@bWT1D4KyYqzX0cpZusvEM&v(8@%bX(xpmjB&_zD9 zYgd=^llMW3LNx&Nd6X$Y`%3^V{}uLUhwsnl5GXa<`KWR}9y_bCBjOXVGl@G&lIA!s zO^C4}pcs|VTb{z6@!O9M+Q|7H0&VKn_yJR#)CA-qh~2<|DZ^arSMoG;pjq_+HM^l) zhSesLQw)C^$HRqv32awUUpD^pM+EZ$`6@KYW3$hp08FU)QUu?z74&|W=E!eDbRLf8^jD>_59%cv+2%s+k$mL{ilnI@@ z9Q?+NW5vE9-Mp|Hz};R1_r5z^qYmzN9bM^1Wk`#PpO@{1g!>ojn4F+rongy^LUASK zE!ZNB1xjD0*<@;>v_`W9Z_(Z{malq@|2gwKnBNO!y}QfivBzvf!v}3mqIg;TxUIV~q=o z4lLU6o~wUk6cXFRH1yYE^)@WFW3dAZ+$h$e4D&XfW>VNfu0hEHMTfWa6q_Br_*d|f zrzH%mBzPkn2ewb5xh_*Yk`IDq41a>JbqnXpgs&?b>0_1y5Hzxg7@paAZ-gkv9k*9?j!!Xe5f(s+?wAU&WCn? zT3y3zqhr7s#(@58DA`G%rr+f)dk6eOFr z6tLey4bG(&r7p6y2?qUC@H19A9Gr{8p`;Iok`tHdLdlJTcu?}ywGhW t_P}R^WdeC$aJoP5t(xwG;(L7s8`S9^VgJG1^EkQnxa$zZ-X;u`{|gI^%u4_O diff --git a/addon/__pycache__/regex_patterns.cpython-312.pyc b/addon/__pycache__/regex_patterns.cpython-312.pyc deleted file mode 100644 index 4d7fc68b874bdee588ce39a9e983be9bf138002d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 494 zcmZWmO-sW-5Z%}hHK3rNc<``P5UZG6^d>^VREnC4sh3b#*34Lg>2AoTYIE~f_#6BI z{t7Q1rC0xe;;kn)(IVm;-t2pCV3s$}b8{xq^|TO$y9Oa|IhmB^MSZU5_CSF61PtGR zDSrw|ehJEcd044evw?TT69k%I5ASHq(-ady11VI>MXD^*JVK1M(C%uT z@c?NmDZh~+P#y#!ph$T{K@Kh=+(s4fI21j)fmY9=wQd(`b<50_ysp~Z=)zWAcW3g> zR&BlZ@)D!%!m}|Bl?41|+5BHOs}w_Sy)v3+3}nFAXwh>TUfX5PY0GOLIIPn+{HL9t z9c!)AXf~Z==ANIhmdkRjdeAy-b-YLNnI|`P?JL7%Q6rS<^c>UL;#KBD+wOd-MxJ_b-K%|I2kg= znLMqOluX$F3DVy!a?5p0n|k^r^;~;_zBhSeVARabQ-|pJ?H`y;JipV_x{%kj8r+ZJV9w>;jj&znFXX2Ie*wH&HXxE`l$ z09K0L@#`U`n=@X~qV|l*sH49_iDim;z0Y#y?LnO`aKH&_65vhw`MkB1(M>-;OYQCN zJgzTNMkwsKnmz$zD4~AN+%^Vg9O;g}2A%qXUh3GxBj67UI5piC~Mk_di zxX7s6GMpztTaYbmCB(xG525AIR(g-k)8ZPhVpuO4oz9&WK%3aW?Oy8GYy zC3F$-rZy3}gqVcUEmT5_Xi2(-7V*E$-o5c(vnSk)a+8VDK}TInq%N3o%cenO!L%)6 zfDnUlQ}`e;RV^+;iKmC1|I9k_#-aJo<{B@n0OZgm6wX}j~P7AjlXjr9e|5iH^7nhO5>$`jrGD%JQ-dJ b06N3?_mGU`KjR2r{1v4S)pNL$rD5(bs1=9* diff --git a/addon/__pycache__/tmdb.cpython-312.pyc b/addon/__pycache__/tmdb.cpython-312.pyc deleted file mode 100644 index 6e208ab32d13ee09d5a5796a0a332ae7900c7810..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 25016 zcmeHvd2k!onP)fdqj3WSz!M}TQjkPa_d$!2E%DT8OY$Mw3JkF!2@(m=4Nx+fhQq|3 zSw?Xt)Fd^c%HA*>rXJP~YT;KitQ*k}>PHNNh7seSal|xe8Zi%= ziCgV3Gh!LE(3BBFQ4d=Ot*ma)&guspDk`GAuttu)@cW*m4!YR2tlp}U zd$jmUMfFjv`3;I?-cyNB6hEGdZY|)}`jFcywuQAp_+BHVZnb}GD{BXLK6V}JfL}kW zW}R=-m3^>5*7b&Nu$pzV9&lI1df(Pmc3J$CUC;W!SB>JUiuJ#(Eq3v+&Te3aP%#}1kdDAZ=?GMG+X!x(9&+2Lq~(z% z$+ol2kdmga;G=_G13sF^)y-7Iwua(#P;1qL)>G6p{pPcmsaMsfsLOP4)94JX4x!{J2YXi%Fs90(5&htCX0@|y1GP%^JM98DzihGV1XJUpD& zo*Ww;j-ayPo9sA#4kl7?3wmBcdldWM78`jIubq? zX-|$wZ;~COLt@1jr51jm)zbIg0OcB$RNhU}<8%_Jo0QrX zMO~130@*G8%IzyfC5`g);?Q_X4vmJ;PEbL0*aiU}k0i&qSRxP(j7GS&eM*?GsNI-PkHXI)c4+j#-I0yL;oQVvD@#S;T z$mM}#V3doTjU&C8apQik!UBuzAQ)AE=BipAesokw+x&c z3v=OEG6G3T#vuwh>O}Z#WMkmWSQ1hy1{e+`#H0o;N0aBHF%mF*vp^y|B6|y5j6_Bg zfuVRT78xSdByc&*jfjajn%7GqU~Q38l-HkwgeQ|+-WkGtgiiJ!?|G^_bokhTrw-@! zQhL!VaRv29gP|nwg9RS=6Y()_DAI8;&PGFFmW{_cNHrN9&pSd8a0u^0QnZ{K{NP{W zDG-y?eLH2a3wm$P=2|j4v#z!2@wqh_^UlfcoZdXiFWJ1eZ4C>yhK#LoO0#6JW$jJr zf!So%uq|h{O&+=jflwzQLAtYCd;)4T-piL%yXbox;mI{BLJiU^HK=+P#uezzReRtm z(bS5jL2Xp#IvPvn_2i32S&P`b0>SeQ%ZwP%OpVR+0zR~0*#*ItSz zBZ&uY?2uk8E!)$1%@7pyQLYX`MWxm&?s4g5j1pzj?7bhz{d*%dsOuR@o@Qwi+G zq@kXo-U2wn({F+Qx2S)u!OrGro_auc1XX!0RFvpw-bUKdz>%l=dP4i3I?)4fG^63< zd1yJ&iAVxN3IxdKhgw7W)(~{b+zxoY6aEsLKul6O#(w4C^#0rCx&?Dx#@sO3eaCFe z*&IvOI>Ayu6HaSpE~VM@bAoxJpxH?Btke%Zgn&II$?kE;cyX`%T}XaHbq@MTUd5}G zLIAhrms|*}>TOj@U+8h62-KK;kWdh;CW-Npi`UC22?|G_G8Fo2UMs)h%qoi4vRd8% zZdCG9#ArpM06~g!s8ZMlUUNeykI*S&(k8#>jjWMX#(Q==TpnVXMX3~1F{TZ?jtfF=ln^dB%L0O1o?$IF6jYe9kkr{o z>g>FQrC3KbsbEEEWGk7Y4vX-af| zf?B}^Ag3hu!Q|o@xg1j#f=W-yzJhf}A86S~TX)a9s=OZXW9VXJJZ~5d#{klti*TF4 z{{v&k5P+>{GO_Q0k$|MWo|A!hX&6MJBa!%6a)KdGdyXGJc0ABA(cJOvD4UY~$QUhj zHXI!Wd=%lhIMTqR^sWv@t*F1lP4Z_j>B0O zR}C&X?8o!QApo4=Asil=Q7(QKvJw&pgUg$DZGV^Mwu7s8)uIoqk6bN?-`C?)8)v}> zTR`M37;WgOrw7Q8!eRfIHvl{pA-@{1R^Hh2>QIEhea?!nH5U@`SYFGHjf^J5;aUyA zpNumEcjR^9(a}hZ4OZnf7o#zj!y%UIMDYZQeHe&(IDRf~0TJRNiBXt%L=rIGLzYGo zc{L1Mh$O}5d3EH~p}bBk^1OaDJPs&;&Fe7!2%BfHWFYI}Q!|2Caqvszt#|{~LB7{U zVo7eitVr^@5dy~&Rgf&i>JMMym}(6XTpp6D9*3jiUieF#f{__8^~)-)*16=Wxwhx(p0s-Q`K)XEl;OUKvU;Xp zm`TmD8Ov4(yx=uH!_GV>GgY020d7wwR?+qU)vTYJW~X-Xr%+mNxh zEZQNK+qSI>wyhc4cEIxX>Y45D?7qG`{jE&Rw#AyxdDYFTti69qd&lO=G43TtL;B>L zX1*uu*gvJaW47lQ?`@`jfvKOl^joIobEf(>)40Girdxi?1eZOKmb>+o!IiUB&upF9 zFW4FcLqpE&pJ|@iBADw1P5l@5pQS0r^9Kq_bII(QKJaGO9mY1jaV9x)R#-D|tNE5! zus@S!jzgjU(D>!D9^8F-zlwOF;RDzlSB%SQ(7uE(Y4`tuqOsI{Gu|J+^n+e_VQhMk z*bi0g1DCgtp+5Av`?{zPJH7o@>Qlzp*Qx&0Q{A^+{b>h{`gSeKE_1(Geal3nw7UCS z)VJ2sAQi+4p;0&$CJ^o)0HFeeQ*bIz74WD$sELpchHM)CR6GU4_Ag-ARu}nF7a*MC zi+n4(MZNdvp;f-?D*-4spDH7kUo{z{=}lM2LHAv^`N$*E$sMW9f} zYhYkcC9&Tm7q8*9yzYix8QApz&YV1bK>?4lOMqkwc!h!e9bWZFH@}2|orZxOrk1RM z4D2dd3mT6W1_7=IBH?;b^r7fSF@OT6B0)pm6a~%zI?mV-OfiR%x6!b`;c!RM4R-&9 zPMbSHyl~&bry&$CqX?rQqmD9$aA)9I!T>_|Zo(*0JY513OI0ATcxA#urZFmUB2+w$;Wbb*0|25hRB{zf8{F_*X~nBj zT|mF`+Da{shdOsdBe&m_uBfi-SbY((W?<=*ku{`D5R$G^U6#`YQz^)~copUE7)%&c ztP!VaESWYHgHB$}?cBuondvR0UM(X6d}niF&J zCJyL%h+B4Ns^pG0@J8NrR(*m3h^tH&0Yo>$RJBrm5JanB+KCfa9j`A1(Ey<7l>RmS zI8c-LTLPeqL*IrIRVYt;Sw`Vj>X8f=kl87_l(II(9A)VRNRD6W6cy9R;QAEx=0C}L znQJT5pu*(MDEF=@Gx+(1>?dfS80#%2Cl8)Dc9igjfw;s6%S5Qe%}M;HM45I5))k@b zjR6U@h2mT&7LOq@37|NXjJ%q}N0E`yWe_WL_5lu z9sp$5gkFO7+>_`9QS*eS1epT`i;%x-LK{F2y&R6U6W$UxB6=s`=}7XrxNdM2WJC;o z1cS!<$>E0+JJw^UAyVd^29eibe#GAP8GMLSOz{2!>Ij9oXHh(d0(%b*C-vMZ6oV*U zMDY@cyiq9~?ljyd(6!tJTUaH}r>H^I3H(=zw`{E7KK!QP!^ zdX{W;1h&`P;J7B|Z+xfjdflQQA*gTbf^TcaxBdGAUo6>t2;*vUzUnIj z2;pk}*RmcweRW>2c1AEmU?bU+*vCh~7(q7-)q58O;M!mz_-KG7wQ-%5-8uczU=%1+Y?I${S zPo4UcS{fv`3nKYFcB#EVO<@{VsC&wwg##9XB{FD@YRU#Jk>8WqJ=Ln5j2b#~0E`kR zs3rp#5~J$Xe6bqf1Puj137x9kwIU~|=T#sLMa-Z*>Ic&0Q~VCbwRoS^BkI6+IroUP;aT+1Ul zK?`fUVOR2j1D1|;@KzZ9oMf0(zz3{zSt_z|F1akl!3bHris^w>Z3w%V2eipPOG^Pc zK=^a;HkeTSHI%NCcUFv9VZ~%%zFO@`xmF0JN>0UNh2k#a0V}7foL5w_z8ij}ggv~g zj0Xf3koN#nC;^C674L!RLS>FmNh{$29Tj=NVmHVG+F`n2hwoWKrV9>PD`V-JN;vHD znu_eTsu*%g3?Q8{zh(voko=Ya7#xAf1@P#rGJqEOi=@PL=|c+0s2#&`^YBMMW0Qdc zK<)(2e)NxAAQFv50vF@4i(Dib3G|Fc6R^@8Sl5xfB=TyBNEn8@j&H|O9TD+m$G0O2 z=@)F5-~;x9+-?vLH0bq#p+oXGVO0EaxKLFDH2RBx#$(1;nPlXign%cyN&rUSaYK~A z3ilL*AwiWVOG1)}#>Xp=A}k~FkDVkeg@1g^P>N{DRVSm|AxL%5OQ^uSMJ)1A3|2dV zzqCrt>u1Ji4RdvKw)yc} z^+Nv(!VBT-nTx_OmpPLV!b!n2_6>1^g7q2U_zS`_rvzp&Yk2XS;ss9%&%P+U6c$d0 zg=fzQCx--v%^D)BvVwy3=x-9jiBRU)%fivHz?{h%hW>>Uf>o>10AJicQh^W@oG%Mu zR(ScGU_YN_qJJzI*zWDCr#{@|?q{fvoZkLA>eG5-zft{ZtG8dH{&X)5`YnwXC1dWd zR^Rf|DC^w)+ts(W(IA7`N(eB|L}Nq4W30&FD0mPlJY~&yJy3^bBnHjXk2R)(J6!Nq zmd97ej(T2CT+_Tt3Vl`7$m>}Rt7Ua>Ys;{seg&kcE z*PK)){Jb(+0*X>x16tDpiZD;wd4@w&r(qdDmo#g62-TS+%!yyXLP}PDL0OHHb67;X z0ya$9cne@Nn3%$9t-WII6l|x|fYriutPqM_Mgor&3as?<){E<)|JCtMNu%F9$=g$~ z!Wg4*O>|NVE8Py9X9G$tkQ%m%c_>@wW*u^B083W(QHF)t)oXGwmaH6}TaHoAKkK;So2a;=afe_0Bu<@*#^kv_5nYh>3_`U@rKLmWJgHQx{P z_QeO;TOL?? zLi-8uwWaeE(!%PXh1E+ftdXy;*urX|g+Z<44ixnmbrpJyt$ba@dRSXpieLrOkZe%C zTZ34(EA%o=vS0ZZ%G;eV^PYr}uS)27Z$iua5^CO$UnAM5gi2;5d=>BIeZ2oh^Lx^A z4WT}9xtLs%{b+T{OIGTQGERrNMX?*C8yzVh(1l;hE`n<&4xJbqI-eB5`^o3p(B=!Q z5u>7M<5-Lh3`fD-kxK@i27|A_hQQOr;;Zf1=UvlGli#(8p!Eg@NJVwEAV8V<*pBp+4aC_gono9IrUJpyo;2|*~ zCQn|#4-|aLjNt<#0DR)MYofUl2v_1N#@UG6$zn?066d1lqF_Et%=|cH81kwpn>WNS zML4jPy_`3Twr}#9{?=_<$ns9!awa@%o}@|&HzHsu+#gbRhiFXZ%ph@TBYG?B+c z@iDZoli+JmT~uC9aiKnxj4P|~TpYYVp!Z4aYM~6I34vS!6OEO!EjuE{4P;&s1bzau z-HHO*2gL%4&+BJ!8$59k!7z*vmUBnl-hLyL+;@n7h7>w;$ zFvP@~ra1$~zF><1H8cuFx)E`y>^tb=(YelBSTKK#4&%BRnmw4LTVnmdhQ#?ESYyL` z+2kRF1YF|Vobu);=68PFo!vArby=KLH7-~i)4{BD<1G&OWvl1P$c%HcuRvo9*7n&;v*+ic zAFFTneZ6Hb38&y5CrYsPQ;A&npH>Xp=uKrAGf6i4q)qRI?ESn+R zWri{^lRT9F^ef+=n9}?=t7Cf2wRKn5&20N|`=WDg`l*by1@g?;f7Ji|{^`LC6If(g z(qP5ewqS0{m^&uBmn_~H%{zwchIC!V*Rkl^KDTAQH)}b7CMWWnhKz6XqHpKi@%gh^ zOV4C4nt^zh997fFYp-5?HEqp!I~Kh==C)=WJEwHZI<#uAyQaIZ^Ao`3Au~khLGofkj5*%D%TJ zW`pma%rx6Ez1-P*a(3dS;-*8LgR0r)Ux z+jZSfFa7fD2hT2?cpfskWcFfBwK#G%kM!RP?ZR>2W?*#7ltHWe;{HC#u)~vcxPPES z!w%1v_YKlRXU^`pqQ!^K2MO$$KK0ih+CqK0tNyTt`t=s`VOsqgibkn1A9kvL_wIUrX;Rwl1rYT6iTId8G<)eTCHXunlLJWl_Aq( z9k+D_p9)zm17R>nE==vL^3#g3Dicg4HdvJ5E%@~T#46SWB8knaQVf9Deg(ujCQiz0 z-W_Bk6XBZ!Fysf;$*bPO<3ZR|6Jv*A*Nz0V0+*uUz{B{eOL`4_G@?FnODu?#AMVFY&- zuo!_{1du6cD`=}^=vhSH5}36j!g>qSgk6LPZ*Zxt4AY2c#x0p)l1zm}$dusT2It?# z&f_@{1x$m+nD*(Wsj(?$1#Ba|1)B099CKHzt_S#X*FYKExzFYwU|1tsaPCa+ zOb2H--#9gQDbu!B*tkzHJxL(V+<}|kTSG!PmTinHXr>sWY@TbG^9f96*6>6rGTAj> zyk>1blw}S}-0a&2)0f}ZWNWu8xxta!z2x*vpSyPP>cw<0Q?+riYV#bOac-H?11xJ= zLx$y?(|>Kl)eSSzjC0+hvwe2|lzz$9lF%j6 zLF4=4zC{|e+&QP~iXI0o_k#o)CVaTHzMG*wX3X8y>W}?2$dW!6VLRz_|EH$3&&4f! zN>95wVe%)U@J~cxWx7%6$0edkwJL@AC!!DbQN09%XUdzZE}^-6Ky&$l=Bi$e=Hjbw)V*gA%jF@W3rVbMLSmhj z$N!2nmmd(q3$lw~{lq|l=86S+qr)TL1j)r+f>d&sQM?Lb!YcDnWa!&RFaaSZ$|xv> zuOjYb!iIYd!)U354|P`r+!AH^Fe{tU&R zqxe@Sra;JCQ0Z2XGK3H`RHRO>V<3eI1pvMk=Pyb^|*Jy%g+!8Ex;!#;2@l-S6Z6J=idmo zv-d`Dt~ay(2_d*gFm?T3;dTgan)iRa_2%V|_S|e2m}jzv`T-~PX=nXF zGxeF%JP=TS)w4hTr`ddxlh8el=lj`nd~P!D^qfd!F_jR+4? zU_WXhOtB}ywp*Fl_$j(Up@HT|0JOXfpKL4l__7+GxUC!=RZeviyjg+mLGFf}G%&7# zvlqzu0w;(WR9RBjK@gP___y$eLt(}QGN8x_WNdF75|f6nm82o7qL>2&)D=*^ykNgN z%2)9!VjPO|g2Hz3RXG@98d|ztyu>tg1r}xX?c#>7wO!mOvma&K#bHN*V&YeM%Po{M zY*IJl0)Mfo9U7DpqfjW1$KEDhc|-a-;F}(85^65lCeDyl*@*Fg{UPH4b49~UWtmq& z<4_yQulJ+}VERxwl=9{G)oYEFw{qrWl^hE!2D^AGl%zUF18)Nc3xNJ?1pQ$ z&}GQ!^Ge7Gjrv@lKekEVD|AhmW}JH_58c_h|0g!V7D#K;7t^N~EL(4>CVR=M z3Rz6?&m?Div*xXWX6qOCJ7l&Aw&@bC>C5|oC1A4!&`yO%4Vk)SoodC|iv^Th*Uzp;7K|_gd7yVrY;<;FDY>qgt)@j~dl9 zJRJAI(}eN*|7T=_H3DU113kq+e`|T{kqXSx6o%d`7kc3rj=Y0LhnMII)&eRYHgcSd zQ!ti5&u=a2dsjGY1V+N$JoDYw;+2UD%IG1+h!V!V;tdiu5({iifOV0FHb}(a7!|T| zqJ=FKmP#&Q|Dg-V@`Q@F;#dTxJmPUD{|-0{M+w7H`RJ2!gD_eo(a@MGj8utq6C_f}+^)cN8TOrH^QzH1Xe%_;u=~ciAX~Bp|4NMRH z=HHYsLsf9Jin67GxAV?D@GakbAGbqDB|twk-Un7V3Pr?aXn0w&1%m9~gac5y9m~d^+7?4h(?+l#Y9L`*r_2Qoz8%4J}(;k2io=q z*5Tn^8{tGcF#rDwsApmufiBXaE)VZdSY^va-XNtS!TlRZ`GiSMRYKbI$^9KXCL24+ zvKYtVEr0=u-@>)2yeW~44x_P7@;m`Tn>K+hRV%t7GeMjL0+g}fgR%v*717WIbbK5| z0T&T&ih!e^Vu<3F3GOR_qtbn&q5;#p7z<)0I6jS7u92<%d1GW$-gZH_Dbz$70hH^NpGMJyXYEWf5$nqF1A7CR5e6 zScRC+)4Aa3%)kLLraQY2{A8ctY))^Vt)68T?AvcS$QJ zTgy)Arp6Zy{^e#&{PG%#abDRweNxc)zqo%+-a_Fn-kN*x*4A6Cg8g`wIk9AOkxjYo zufHkxe@mNktIIa!{#e|U`z36X7SChx2OGS7 zYpD<2?tTaL(K;`%Qy;h2545R1*<*k9io)N$J&y<`hIIUq{Z{y}~Q{ntrZ7bj(;HzJcV zoKTC~Mu@f#Z^>KSzW$*0fOKTki!Z`qg{L=?_)Z7Jc**8=u$7F*hdB~g8+d_j$Y{ru z*Tv6VfI~%#UPyc6D@4Q1VXV)g_$3M){+PhF|%8qjzXsF;@Sa8`=g7kJRD*nL2^d&Qzaj=OHQ|x&&xb5Pnf@EV#^{4 zD228$Q0XL1jP8e6#NSMwyd`-$BHNdhW~%U2zIqvwlJk|sQs6|t;b_(<(csa@Sn)On z$_V4rr-N(o5ZAL~&^F-^R}M+CyjeM78EoY=Sh~oLQLYu zmBWQg4*AzS17^a*(KGGj&>hiU+Jeu`10$nxF4=>p+KIN*2qyAca_BIJW;Nn@6+YC~ zpuqVvIn9im)+1V7C-B){p+LJjVqQbmmI#?mXd6P9kkP*EeANerJ>rSAd%6DzesN-# zcmTPE4wI(;6V>$Jsm9+?=HF5F|4i-rEwu}tENj%X`*#}GvWaqZOj>fz_DNgLvu66# zwb0d2#?vBD&U-p5ZNJ+@sm#-Bvb6V~RchL{Oo6zkrc^!{vuN90rtnrzH!M>iq_?QK zHFVpubr0PD$KWn&;TOg-t7BP@ngKNCXV#$0v7T~`;R)nZ@P!Y_p8_AcvDGf*DivJo{n{0*#!QDZRG7SzDh+Ocdy z&5p4;P;+9e7}~NMQ|&>mUsW7e&R>7mah9fapPQ}AT2!Ru6V-sB8;L4LNmK^Iw-D8e z5!i@or(E7;5)>9u?L8Mhy?Y!!lKkNe5IY4z`ZRc3)k6D8PT^OI7&S3o)Wm2}6Jte9 zj1)C7PShaEmU}ML220A4h$_ZIR52o=iZKyYjEbmYTttP)>h8Hvz58TQ z{?rgVR7}58G5y6AQ$^dCDG<_Iymc3UiMx&tdK)PV_?5moYGMsVO{}4)i8T~8v4)~1 z)=<Szwbizi{)1w nus;@vJK7Bh_4R&i^I$pVfIi^onx5U%C%g5%8uhQ#G|2x0xI%{= diff --git a/addon/addon.xml b/addon/addon.xml index e29259a..2ca7853 100644 --- a/addon/addon.xml +++ b/addon/addon.xml @@ -1,5 +1,5 @@ - - + + @@ -10,8 +10,8 @@ video - ViewIt Kodi Plugin - Streaming-Addon für Streamingseiten: Suche, Staffeln/Episoden und Wiedergabe. + Suche und Wiedergabe fuer mehrere Quellen + Findet Titel in unterstuetzten Quellen und startet Filme oder Episoden direkt in Kodi. icon.png diff --git a/addon/default.py b/addon/default.py index ee63d76..db9bc5c 100644 --- a/addon/default.py +++ b/addon/default.py @@ -8,6 +8,7 @@ ruft Plugin-Implementierungen auf und startet die Wiedergabe. from __future__ import annotations import asyncio +import atexit from contextlib import contextmanager from datetime import datetime import importlib.util @@ -16,11 +17,27 @@ import json import os import re import sys +import threading +import time import xml.etree.ElementTree as ET from pathlib import Path from types import ModuleType from urllib.parse import parse_qs, urlencode + +def _ensure_windows_selector_policy() -> None: + """Erzwingt unter Windows einen Selector-Loop (thread-kompatibel in Kodi).""" + if not sys.platform.startswith("win"): + return + try: + current = asyncio.get_event_loop_policy() + if current.__class__.__name__ == "WindowsSelectorEventLoopPolicy": + return + asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) + except Exception: + # Fallback: Wenn die Policy nicht verfügbar ist, arbeitet der Code mit Default-Policy weiter. + return + try: # pragma: no cover - Kodi runtime import xbmc # type: ignore[import-not-found] import xbmcaddon # type: ignore[import-not-found] @@ -87,6 +104,14 @@ except ImportError: # pragma: no cover - allow importing outside Kodi (e.g. lin xbmcplugin = _XbmcPluginStub() from plugin_interface import BasisPlugin +from http_session_pool import close_all_sessions +from plugin_helpers import normalize_resolved_stream_url +from metadata_utils import ( + collect_plugin_metadata as _collect_plugin_metadata, + merge_metadata as _merge_metadata, + metadata_policy as _metadata_policy_impl, + needs_tmdb as _needs_tmdb, +) from tmdb import TmdbCastMember, fetch_tv_episode_credits, lookup_movie, lookup_tv_season, lookup_tv_season_summary, lookup_tv_show PLUGIN_DIR = Path(__file__).with_name("plugins") @@ -101,7 +126,23 @@ _TMDB_LOG_PATH: str | None = None _GENRE_TITLES_CACHE: dict[tuple[str, str], list[str]] = {} _ADDON_INSTANCE = None _PLAYSTATE_CACHE: dict[str, dict[str, object]] | None = None +_PLAYSTATE_LOCK = threading.RLock() +_TMDB_LOCK = threading.RLock() WATCHED_THRESHOLD = 0.9 +POPULAR_MENU_LABEL = "Haeufig gesehen" +LATEST_MENU_LABEL = "Neuste Titel" + +atexit.register(close_all_sessions) + + +def _tmdb_cache_get(cache: dict, key, default=None): + with _TMDB_LOCK: + return cache.get(key, default) + + +def _tmdb_cache_set(cache: dict, key, value) -> None: + with _TMDB_LOCK: + cache[key] = value def _tmdb_prefetch_concurrency() -> int: @@ -140,12 +181,19 @@ def _busy_close() -> None: @contextmanager -def _busy_dialog(): - _busy_open() - try: - yield - finally: - _busy_close() +def _busy_dialog(message: str = "Bitte warten...", *, heading: str = "Bitte warten"): + """Progress-Dialog statt Spinner, mit kurzem Status-Text.""" + with _progress_dialog(heading, message) as progress: + progress(10, message) + + def _update(step_message: str, percent: int | None = None) -> bool: + pct = 50 if percent is None else max(5, min(95, int(percent))) + return progress(pct, step_message or message) + + try: + yield _update + finally: + progress(100, "Fertig") @contextmanager @@ -187,6 +235,42 @@ def _progress_dialog(heading: str, message: str = ""): pass +def _run_with_progress(heading: str, message: str, loader): + """Fuehrt eine Ladefunktion mit sichtbarem Fortschrittsdialog aus.""" + with _progress_dialog(heading, message) as progress: + progress(10, message) + result = loader() + progress(100, "Fertig") + return result + + +def _method_accepts_kwarg(method: object, kwarg_name: str) -> bool: + if not callable(method): + return False + try: + signature = inspect.signature(method) + except Exception: + return False + for param in signature.parameters.values(): + if param.kind == inspect.Parameter.VAR_KEYWORD: + return True + if param.name == kwarg_name and param.kind in ( + inspect.Parameter.POSITIONAL_OR_KEYWORD, + inspect.Parameter.KEYWORD_ONLY, + ): + return True + return False + + +def _call_plugin_search(plugin: BasisPlugin, query: str, *, progress_callback=None): + method = getattr(plugin, "search_titles", None) + if not callable(method): + raise RuntimeError("Plugin hat keine gueltige search_titles Methode.") + if progress_callback is not None and _method_accepts_kwarg(method, "progress_callback"): + return method(query, progress_callback=progress_callback) + return method(query) + + def _get_handle() -> int: return int(sys.argv[1]) if len(sys.argv) > 1 else -1 @@ -226,115 +310,26 @@ def _playstate_path() -> str: def _load_playstate() -> dict[str, dict[str, object]]: - global _PLAYSTATE_CACHE - if _PLAYSTATE_CACHE is not None: - return _PLAYSTATE_CACHE - path = _playstate_path() - try: - if xbmcvfs and xbmcvfs.exists(path): - handle = xbmcvfs.File(path) - raw = handle.read() - handle.close() - else: - with open(path, "r", encoding="utf-8") as handle: - raw = handle.read() - data = json.loads(raw or "{}") - if isinstance(data, dict): - normalized: dict[str, dict[str, object]] = {} - for key, value in data.items(): - if isinstance(key, str) and isinstance(value, dict): - normalized[key] = dict(value) - _PLAYSTATE_CACHE = normalized - return normalized - except Exception: - pass - _PLAYSTATE_CACHE = {} return {} def _save_playstate(state: dict[str, dict[str, object]]) -> None: - global _PLAYSTATE_CACHE - _PLAYSTATE_CACHE = state - path = _playstate_path() - try: - payload = json.dumps(state, ensure_ascii=False, sort_keys=True) - except Exception: - return - try: - if xbmcvfs: - directory = os.path.dirname(path) - if directory and not xbmcvfs.exists(directory): - xbmcvfs.mkdirs(directory) - handle = xbmcvfs.File(path, "w") - handle.write(payload) - handle.close() - else: - with open(path, "w", encoding="utf-8") as handle: - handle.write(payload) - except Exception: - return + return def _get_playstate(key: str) -> dict[str, object]: - return dict(_load_playstate().get(key, {}) or {}) + return {} def _set_playstate(key: str, value: dict[str, object]) -> None: - state = _load_playstate() - if value: - state[key] = dict(value) - else: - state.pop(key, None) - _save_playstate(state) + return def _apply_playstate_to_info(info_labels: dict[str, object], playstate: dict[str, object]) -> dict[str, object]: - info_labels = dict(info_labels or {}) - watched = bool(playstate.get("watched") or False) - resume_position = playstate.get("resume_position") - resume_total = playstate.get("resume_total") - if watched: - info_labels["playcount"] = 1 - info_labels.pop("resume_position", None) - info_labels.pop("resume_total", None) - else: - try: - pos = int(resume_position) if resume_position is not None else 0 - tot = int(resume_total) if resume_total is not None else 0 - except Exception: - pos, tot = 0, 0 - if pos > 0 and tot > 0: - info_labels["resume_position"] = pos - info_labels["resume_total"] = tot - return info_labels - - -def _time_label(seconds: int) -> str: - try: - seconds = int(seconds or 0) - except Exception: - seconds = 0 - if seconds <= 0: - return "" - hours = seconds // 3600 - minutes = (seconds % 3600) // 60 - secs = seconds % 60 - if hours > 0: - return f"{hours:02d}:{minutes:02d}:{secs:02d}" - return f"{minutes:02d}:{secs:02d}" + return dict(info_labels or {}) def _label_with_playstate(label: str, playstate: dict[str, object]) -> str: - watched = bool(playstate.get("watched") or False) - if watched: - return f"✓ {label}" - resume_pos = playstate.get("resume_position") - try: - pos = int(resume_pos) if resume_pos is not None else 0 - except Exception: - pos = 0 - if pos > 0: - return f"↩ {_time_label(pos)} {label}" return label @@ -402,6 +397,59 @@ def _get_setting_bool(setting_id: str, *, default: bool = False) -> bool: return default +def _get_setting_int(setting_id: str, *, default: int = 0) -> int: + if xbmcaddon is None: + return default + addon = _get_addon() + if addon is None: + return default + getter = getattr(addon, "getSettingInt", None) + if callable(getter): + raw_getter = getattr(addon, "getSetting", None) + if callable(raw_getter): + try: + raw = str(raw_getter(setting_id) or "").strip() + except TypeError: + raw = "" + if raw == "": + return default + try: + return int(getter(setting_id)) + except TypeError: + return default + getter = getattr(addon, "getSetting", None) + if callable(getter): + try: + raw = str(getter(setting_id) or "").strip() + except TypeError: + return default + if raw == "": + return default + try: + return int(raw) + except ValueError: + return default + return default + + +def _metadata_policy( + plugin_name: str, + plugin: BasisPlugin, + *, + allow_tmdb: bool, +) -> tuple[bool, bool, bool]: + return _metadata_policy_impl( + plugin_name, + plugin, + allow_tmdb=allow_tmdb, + get_setting_int=_get_setting_int, + ) + + +def _tmdb_list_enabled() -> bool: + return _tmdb_enabled() and _get_setting_bool("tmdb_genre_metadata", default=False) + + def _set_setting_string(setting_id: str, value: str) -> None: if xbmcaddon is None: return @@ -625,11 +673,11 @@ def _tmdb_labels_and_art(title: str) -> tuple[dict[str, str], dict[str, str], li show_cast = _get_setting_bool("tmdb_show_cast", default=False) flags = f"p{int(show_plot)}a{int(show_art)}f{int(show_fanart)}r{int(show_rating)}v{int(show_votes)}c{int(show_cast)}" cache_key = f"{language}|{flags}|{title_key}" - cached = _TMDB_CACHE.get(cache_key) + cached = _tmdb_cache_get(_TMDB_CACHE, cache_key) if cached is not None: info, art = cached # Cast wird nicht in _TMDB_CACHE gehalten (weil es ListItem.setCast betrifft), daher separat cachen: - cast_cached = _TMDB_CAST_CACHE.get(cache_key, []) + cast_cached = _tmdb_cache_get(_TMDB_CAST_CACHE, cache_key, []) return info, art, list(cast_cached) info_labels: dict[str, str] = {"title": title} @@ -687,7 +735,7 @@ def _tmdb_labels_and_art(title: str) -> tuple[dict[str, str], dict[str, str], li if meta: # Nur TV-IDs cachen (für Staffel-/Episoden-Lookups); Movie-IDs würden dort fehlschlagen. if is_tv: - _TMDB_ID_CACHE[title_key] = int(getattr(meta, "tmdb_id", 0) or 0) + _tmdb_cache_set(_TMDB_ID_CACHE, title_key, int(getattr(meta, "tmdb_id", 0) or 0)) info_labels.setdefault("mediatype", "tvshow") else: info_labels.setdefault("mediatype", "movie") @@ -715,8 +763,8 @@ def _tmdb_labels_and_art(title: str) -> tuple[dict[str, str], dict[str, str], li elif log_requests or log_responses: _tmdb_file_log(f"TMDB MISS title={title!r}") - _TMDB_CACHE[cache_key] = (info_labels, art) - _TMDB_CAST_CACHE[cache_key] = list(cast) + _tmdb_cache_set(_TMDB_CACHE, cache_key, (info_labels, art)) + _tmdb_cache_set(_TMDB_CAST_CACHE, cache_key, list(cast)) return info_labels, art, list(cast) @@ -762,10 +810,10 @@ def _tmdb_episode_labels_and_art(*, title: str, season_label: str, episode_label if not _tmdb_enabled(): return {"title": episode_label}, {} title_key = (title or "").strip().casefold() - tmdb_id = _TMDB_ID_CACHE.get(title_key) + tmdb_id = _tmdb_cache_get(_TMDB_ID_CACHE, title_key) if not tmdb_id: _tmdb_labels_and_art(title) - tmdb_id = _TMDB_ID_CACHE.get(title_key) + tmdb_id = _tmdb_cache_get(_TMDB_ID_CACHE, title_key) if not tmdb_id: return {"title": episode_label}, {} @@ -779,7 +827,7 @@ def _tmdb_episode_labels_and_art(*, title: str, season_label: str, episode_label show_art = _get_setting_bool("tmdb_show_art", default=True) flags = f"p{int(show_plot)}a{int(show_art)}" season_key = (tmdb_id, season_number, language, flags) - cached_season = _TMDB_SEASON_CACHE.get(season_key) + cached_season = _tmdb_cache_get(_TMDB_SEASON_CACHE, season_key) if cached_season is None: api_key = _get_setting_string("tmdb_api_key").strip() if not api_key: @@ -812,7 +860,7 @@ def _tmdb_episode_labels_and_art(*, title: str, season_label: str, episode_label if show_art and ep.thumb: art = {"thumb": ep.thumb} mapped[ep_no] = (info, art) - _TMDB_SEASON_CACHE[season_key] = mapped + _tmdb_cache_set(_TMDB_SEASON_CACHE, season_key, mapped) cached_season = mapped return cached_season.get(episode_number, ({"title": episode_label}, {})) @@ -826,10 +874,10 @@ def _tmdb_episode_cast(*, title: str, season_label: str, episode_label: str) -> return [] title_key = (title or "").strip().casefold() - tmdb_id = _TMDB_ID_CACHE.get(title_key) + tmdb_id = _tmdb_cache_get(_TMDB_ID_CACHE, title_key) if not tmdb_id: _tmdb_labels_and_art(title) - tmdb_id = _TMDB_ID_CACHE.get(title_key) + tmdb_id = _tmdb_cache_get(_TMDB_ID_CACHE, title_key) if not tmdb_id: return [] @@ -840,13 +888,13 @@ def _tmdb_episode_cast(*, title: str, season_label: str, episode_label: str) -> language = _get_setting_string("tmdb_language").strip() or "de-DE" cache_key = (tmdb_id, season_number, episode_number, language) - cached = _TMDB_EPISODE_CAST_CACHE.get(cache_key) + cached = _tmdb_cache_get(_TMDB_EPISODE_CAST_CACHE, cache_key) if cached is not None: return list(cached) api_key = _get_setting_string("tmdb_api_key").strip() if not api_key: - _TMDB_EPISODE_CAST_CACHE[cache_key] = [] + _tmdb_cache_set(_TMDB_EPISODE_CAST_CACHE, cache_key, []) return [] log_requests = _get_setting_bool("tmdb_log_requests", default=False) @@ -868,7 +916,7 @@ def _tmdb_episode_cast(*, title: str, season_label: str, episode_label: str) -> f"TMDB ERROR episode_credits_failed tmdb_id={tmdb_id} season={season_number} episode={episode_number} error={exc!r}" ) cast = [] - _TMDB_EPISODE_CAST_CACHE[cache_key] = list(cast) + _tmdb_cache_set(_TMDB_EPISODE_CAST_CACHE, cache_key, list(cast)) return list(cast) @@ -921,6 +969,33 @@ def _normalize_update_info_url(raw: str) -> str: return value.rstrip("/") + "/addons.xml" +UPDATE_CHANNEL_MAIN = 0 +UPDATE_CHANNEL_NIGHTLY = 1 +UPDATE_CHANNEL_CUSTOM = 2 +AUTO_UPDATE_INTERVAL_SEC = 6 * 60 * 60 + + +def _selected_update_channel() -> int: + channel = _get_setting_int("update_channel", default=UPDATE_CHANNEL_MAIN) + if channel not in {UPDATE_CHANNEL_MAIN, UPDATE_CHANNEL_NIGHTLY, UPDATE_CHANNEL_CUSTOM}: + return UPDATE_CHANNEL_MAIN + return channel + + +def _resolve_update_info_url() -> str: + channel = _selected_update_channel() + if channel == UPDATE_CHANNEL_NIGHTLY: + raw = _get_setting_string("update_repo_url_nightly") + elif channel == UPDATE_CHANNEL_CUSTOM: + raw = _get_setting_string("update_repo_url") + else: + raw = _get_setting_string("update_repo_url_main") + info_url = _normalize_update_info_url(raw) + # Legacy-Setting beibehalten, damit bestehende Installationen und alte Builds weiterlaufen. + _set_setting_string("update_repo_url", info_url) + return info_url + + def _repo_addon_xml_path() -> str: if xbmcvfs is None: return "" @@ -963,52 +1038,6 @@ def _settings_key_for_plugin(name: str) -> str: return f"update_version_{safe}" if safe else "update_version_unknown" -def _collect_plugin_metadata(plugin: BasisPlugin, titles: list[str]) -> dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember] | None]]: - getter = getattr(plugin, "metadata_for", None) - if not callable(getter): - return {} - collected: dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember] | None]] = {} - for title in titles: - try: - labels, art, cast = getter(title) - except Exception: - continue - if isinstance(labels, dict) or isinstance(art, dict) or cast: - label_map = {str(k): str(v) for k, v in dict(labels or {}).items() if v} - art_map = {str(k): str(v) for k, v in dict(art or {}).items() if v} - collected[title] = (label_map, art_map, cast if isinstance(cast, list) else None) - return collected - - -def _needs_tmdb(labels: dict[str, str], art: dict[str, str], *, want_plot: bool, want_art: bool) -> bool: - if want_plot and not labels.get("plot"): - return True - if want_art and not (art.get("thumb") or art.get("poster") or art.get("fanart") or art.get("landscape")): - return True - return False - - -def _merge_metadata( - title: str, - tmdb_labels: dict[str, str] | None, - tmdb_art: dict[str, str] | None, - tmdb_cast: list[TmdbCastMember] | None, - plugin_meta: tuple[dict[str, str], dict[str, str], list[TmdbCastMember] | None] | None, -) -> tuple[dict[str, str], dict[str, str], list[TmdbCastMember] | None]: - labels = dict(tmdb_labels or {}) - art = dict(tmdb_art or {}) - cast = tmdb_cast - if plugin_meta is not None: - meta_labels, meta_art, meta_cast = plugin_meta - labels.update({k: str(v) for k, v in dict(meta_labels or {}).items() if v}) - art.update({k: str(v) for k, v in dict(meta_art or {}).items() if v}) - if meta_cast is not None: - cast = meta_cast - if "title" not in labels: - labels["title"] = title - return labels, art, cast - - def _sync_update_version_settings() -> None: addon = _get_addon() addon_version = "0.0.0" @@ -1038,7 +1067,7 @@ def _sync_update_version_settings() -> None: def _show_root_menu() -> None: handle = _get_handle() _log("Root-Menue wird angezeigt.") - _add_directory_item(handle, "Globale Suche", "search") + _add_directory_item(handle, "Suche in allen Quellen", "search") plugins = _discover_plugins() for plugin_name in sorted(plugins.keys(), key=lambda value: value.casefold()): @@ -1053,7 +1082,7 @@ def _show_plugin_menu(plugin_name: str) -> None: plugin_name = (plugin_name or "").strip() plugin = _discover_plugins().get(plugin_name) if not plugin: - xbmcgui.Dialog().notification("Plugin", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Quelle", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return @@ -1061,11 +1090,8 @@ def _show_plugin_menu(plugin_name: str) -> None: _add_directory_item(handle, "Suche", "plugin_search", {"plugin": plugin_name}, is_folder=True) - if _plugin_has_capability(plugin, "new_titles"): - _add_directory_item(handle, "Neue Titel", "new_titles", {"plugin": plugin_name, "page": "1"}, is_folder=True) - - if _plugin_has_capability(plugin, "latest_episodes"): - _add_directory_item(handle, "Neueste Folgen", "latest_episodes", {"plugin": plugin_name, "page": "1"}, is_folder=True) + if _plugin_has_capability(plugin, "new_titles") or _plugin_has_capability(plugin, "latest_episodes"): + _add_directory_item(handle, LATEST_MENU_LABEL, "latest_titles", {"plugin": plugin_name, "page": "1"}, is_folder=True) if _plugin_has_capability(plugin, "genres"): _add_directory_item(handle, "Genres", "genres", {"plugin": plugin_name}, is_folder=True) @@ -1077,7 +1103,7 @@ def _show_plugin_menu(plugin_name: str) -> None: _add_directory_item(handle, "Serien", "series_catalog", {"plugin": plugin_name, "page": "1"}, is_folder=True) if _plugin_has_capability(plugin, "popular_series"): - _add_directory_item(handle, "Meist gesehen", "popular", {"plugin": plugin_name, "page": "1"}, is_folder=True) + _add_directory_item(handle, POPULAR_MENU_LABEL, "popular", {"plugin": plugin_name, "page": "1"}, is_folder=True) xbmcplugin.endOfDirectory(handle) @@ -1086,7 +1112,7 @@ def _show_plugin_search(plugin_name: str) -> None: plugin_name = (plugin_name or "").strip() plugin = _discover_plugins().get(plugin_name) if not plugin: - xbmcgui.Dialog().notification("Suche", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Suche", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) _show_root_menu() return @@ -1107,7 +1133,7 @@ def _show_plugin_search_results(plugin_name: str, query: str) -> None: query = (query or "").strip() plugin = _discover_plugins().get(plugin_name) if not plugin: - xbmcgui.Dialog().notification("Suche", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Suche", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return @@ -1118,20 +1144,34 @@ def _show_plugin_search_results(plugin_name: str, query: str) -> None: list_items: list[dict[str, object]] = [] canceled = False try: - with _progress_dialog("Suche läuft", f"{plugin_name} (1/1) starte…") as progress: - canceled = progress(5, f"{plugin_name} (1/1) Suche…") - results = _run_async(plugin.search_titles(query)) - results = [str(t).strip() for t in (results or []) if t and str(t).strip()] + with _progress_dialog("Suche laeuft", f"{plugin_name} (1/1) startet...") as progress: + canceled = progress(5, f"{plugin_name} (1/1) Suche...") + plugin_progress = lambda msg="", pct=None: progress( # noqa: E731 - kompakte Callback-Bruecke + max(5, min(95, int(pct))) if pct is not None else 20, + f"{plugin_name} (1/1) {str(msg or 'Suche...').strip()}", + ) + search_coro = _call_plugin_search(plugin, query, progress_callback=plugin_progress) + try: + results = _run_async(search_coro) + except Exception: + if inspect.iscoroutine(search_coro): + try: + search_coro.close() + except Exception: + pass + raise + results = _clean_search_titles([str(t).strip() for t in (results or []) if t and str(t).strip()]) results.sort(key=lambda value: value.casefold()) - plugin_meta = _collect_plugin_metadata(plugin, results) + use_source, show_tmdb, prefer_source = _metadata_policy( + plugin_name, plugin, allow_tmdb=_tmdb_enabled() + ) + plugin_meta = _collect_plugin_metadata(plugin, results) if use_source else {} tmdb_prefetched: dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]] = {} - show_tmdb = _tmdb_enabled() show_plot = _get_setting_bool("tmdb_show_plot", default=True) show_art = _get_setting_bool("tmdb_show_art", default=True) - prefer_source = bool(getattr(plugin, "prefer_source_metadata", False)) - tmdb_titles = list(results) - if show_tmdb and prefer_source: + tmdb_titles = list(results) if show_tmdb else [] + if show_tmdb and prefer_source and use_source: tmdb_titles = [] for title in results: meta = plugin_meta.get(title) @@ -1140,7 +1180,7 @@ def _show_plugin_search_results(plugin_name: str, query: str) -> None: if _needs_tmdb(meta_labels, meta_art, want_plot=show_plot, want_art=show_art): tmdb_titles.append(title) if show_tmdb and tmdb_titles and not canceled: - canceled = progress(35, f"{plugin_name} (1/1) Metadaten…") + canceled = progress(35, f"{plugin_name} (1/1) Metadaten...") tmdb_prefetched = _tmdb_labels_and_art_bulk(list(tmdb_titles)) total_results = max(1, len(results)) @@ -1230,11 +1270,16 @@ def _discover_plugins() -> dict[str, BasisPlugin]: except Exception as exc: xbmc.log(f"Plugin-Datei {file_path.name} konnte nicht geladen werden: {exc}", xbmc.LOGWARNING) continue - plugin_classes = [ - obj - for obj in module.__dict__.values() - if inspect.isclass(obj) and issubclass(obj, BasisPlugin) and obj is not BasisPlugin - ] + preferred = getattr(module, "Plugin", None) + if inspect.isclass(preferred) and issubclass(preferred, BasisPlugin) and preferred is not BasisPlugin: + plugin_classes = [preferred] + else: + plugin_classes = [ + obj + for obj in module.__dict__.values() + if inspect.isclass(obj) and issubclass(obj, BasisPlugin) and obj is not BasisPlugin + ] + plugin_classes.sort(key=lambda cls: cls.__name__.casefold()) for cls in plugin_classes: try: instance = cls() @@ -1245,24 +1290,55 @@ def _discover_plugins() -> dict[str, BasisPlugin]: reason = getattr(instance, "unavailable_reason", "Nicht verfuegbar.") xbmc.log(f"Plugin {cls.__name__} deaktiviert: {reason}", xbmc.LOGWARNING) continue - plugins[instance.name] = instance + plugin_name = str(getattr(instance, "name", "") or "").strip() + if not plugin_name: + xbmc.log( + f"Plugin {cls.__name__} wurde ohne Name registriert und wird uebersprungen.", + xbmc.LOGWARNING, + ) + continue + if plugin_name in plugins: + xbmc.log( + f"Plugin-Name doppelt ({plugin_name}), {cls.__name__} wird uebersprungen.", + xbmc.LOGWARNING, + ) + continue + plugins[plugin_name] = instance + plugins = dict(sorted(plugins.items(), key=lambda item: item[0].casefold())) _PLUGIN_CACHE = plugins return plugins def _run_async(coro): """Fuehrt eine Coroutine aus, auch wenn Kodi bereits einen Event-Loop hat.""" + _ensure_windows_selector_policy() + + def _run_with_asyncio_run(): + return asyncio.run(coro) + try: - loop = asyncio.get_event_loop() + running_loop = asyncio.get_running_loop() except RuntimeError: - loop = None - if loop and loop.is_running(): - temp_loop = asyncio.new_event_loop() - try: - return temp_loop.run_until_complete(coro) - finally: - temp_loop.close() - return asyncio.run(coro) + running_loop = None + + if running_loop and running_loop.is_running(): + result_box: dict[str, object] = {} + error_box: dict[str, BaseException] = {} + + def _worker() -> None: + try: + result_box["value"] = _run_with_asyncio_run() + except BaseException as exc: # pragma: no cover - defensive + error_box["error"] = exc + + worker = threading.Thread(target=_worker, name="viewit-async-runner") + worker.start() + worker.join() + if "error" in error_box: + raise error_box["error"] + return result_box.get("value") + + return _run_with_asyncio_run() def _series_url_params(plugin: BasisPlugin, title: str) -> dict[str, str]: @@ -1276,10 +1352,37 @@ def _series_url_params(plugin: BasisPlugin, title: str) -> dict[str, str]: return {"series_url": series_url} if series_url else {} +def _clean_search_titles(values: list[str]) -> list[str]: + """Filtert offensichtliche Platzhalter und dedupliziert Treffer.""" + blocked = { + "stream", + "streams", + "film", + "movie", + "play", + "details", + "details/play", + } + cleaned: list[str] = [] + seen: set[str] = set() + for raw in values: + title = (raw or "").strip() + if not title: + continue + key = title.casefold() + if key in blocked: + continue + if key in seen: + continue + seen.add(key) + cleaned.append(title) + return cleaned + + def _show_search() -> None: _log("Suche gestartet.") dialog = xbmcgui.Dialog() - query = dialog.input("Serientitel eingeben", type=xbmcgui.INPUT_ALPHANUM).strip() + query = dialog.input("Titel eingeben", type=xbmcgui.INPUT_ALPHANUM).strip() if not query: _log("Suche abgebrochen (leere Eingabe).", xbmc.LOGDEBUG) _show_root_menu() @@ -1294,35 +1397,46 @@ def _show_search_results(query: str) -> None: _set_content(handle, "tvshows") plugins = _discover_plugins() if not plugins: - xbmcgui.Dialog().notification("Suche", "Keine Plugins gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Suche", "Keine Quellen gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return list_items: list[dict[str, object]] = [] canceled = False plugin_entries = list(plugins.items()) total_plugins = max(1, len(plugin_entries)) - with _progress_dialog("Suche läuft", "Suche gestartet…") as progress: + with _progress_dialog("Suche laeuft", "Suche startet...") as progress: for plugin_index, (plugin_name, plugin) in enumerate(plugin_entries, start=1): range_start = int(((plugin_index - 1) / float(total_plugins)) * 100) range_end = int((plugin_index / float(total_plugins)) * 100) - canceled = progress(range_start, f"{plugin_name} ({plugin_index}/{total_plugins}) Suche…") + canceled = progress(range_start, f"{plugin_name} ({plugin_index}/{total_plugins}) Suche...") if canceled: break + plugin_progress = lambda msg="", pct=None: progress( # noqa: E731 - kompakte Callback-Bruecke + max(range_start, min(range_end, int(pct))) if pct is not None else range_start + 20, + f"{plugin_name} ({plugin_index}/{total_plugins}) {str(msg or 'Suche...').strip()}", + ) + search_coro = _call_plugin_search(plugin, query, progress_callback=plugin_progress) try: - results = _run_async(plugin.search_titles(query)) + results = _run_async(search_coro) except Exception as exc: + if inspect.iscoroutine(search_coro): + try: + search_coro.close() + except Exception: + pass _log(f"Suche fehlgeschlagen ({plugin_name}): {exc}", xbmc.LOGWARNING) continue - results = [str(t).strip() for t in (results or []) if t and str(t).strip()] + results = _clean_search_titles([str(t).strip() for t in (results or []) if t and str(t).strip()]) _log(f"Treffer ({plugin_name}): {len(results)}", xbmc.LOGDEBUG) - plugin_meta = _collect_plugin_metadata(plugin, results) + use_source, show_tmdb, prefer_source = _metadata_policy( + plugin_name, plugin, allow_tmdb=_tmdb_enabled() + ) + plugin_meta = _collect_plugin_metadata(plugin, results) if use_source else {} tmdb_prefetched: dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]] = {} - show_tmdb = _tmdb_enabled() show_plot = _get_setting_bool("tmdb_show_plot", default=True) show_art = _get_setting_bool("tmdb_show_art", default=True) - prefer_source = bool(getattr(plugin, "prefer_source_metadata", False)) - tmdb_titles = list(results) - if show_tmdb and prefer_source: + tmdb_titles = list(results) if show_tmdb else [] + if show_tmdb and prefer_source and use_source: tmdb_titles = [] for title in results: meta = plugin_meta.get(title) @@ -1333,7 +1447,7 @@ def _show_search_results(query: str) -> None: if show_tmdb and tmdb_titles: canceled = progress( range_start + int((range_end - range_start) * 0.35), - f"{plugin_name} ({plugin_index}/{total_plugins}) Metadaten…", + f"{plugin_name} ({plugin_index}/{total_plugins}) Metadaten...", ) if canceled: break @@ -1376,7 +1490,7 @@ def _show_search_results(query: str) -> None: if canceled: break if not canceled: - progress(100, "Suche abgeschlossen") + progress(100, "Suche fertig") if canceled and not list_items: xbmcgui.Dialog().notification("Suche", "Suche abgebrochen.", xbmcgui.NOTIFICATION_INFO, 2500) xbmcplugin.endOfDirectory(handle) @@ -1396,12 +1510,79 @@ def _show_search_results(query: str) -> None: xbmcplugin.endOfDirectory(handle) +def _movie_seed_for_title(plugin: BasisPlugin, title: str, seasons: list[str]) -> tuple[str, str] | None: + """Ermittelt ein Film-Seed (Season/Episode), um direkt Provider anzeigen zu können.""" + if not seasons or len(seasons) != 1: + return None + season = str(seasons[0] or "").strip() + if not season: + return None + try: + episodes = [str(value or "").strip() for value in (plugin.episodes_for(title, season) or [])] + except Exception: + return None + episodes = [value for value in episodes if value] + if len(episodes) != 1: + return None + episode = episodes[0] + season_key = season.casefold() + episode_key = episode.casefold() + title_key = (title or "").strip().casefold() + generic_seasons = {"film", "movie", "stream"} + generic_episodes = {"stream", "film", "play", title_key} + if season_key in generic_seasons and episode_key in generic_episodes: + return (season, episode) + return None + + +def _show_movie_streams( + plugin_name: str, + title: str, + season: str, + episode: str, + *, + series_url: str = "", +) -> None: + handle = _get_handle() + plugin = _discover_plugins().get(plugin_name) + if plugin is None: + xbmcgui.Dialog().notification("Streams", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcplugin.endOfDirectory(handle) + return + + if series_url: + remember_series_url = getattr(plugin, "remember_series_url", None) + if callable(remember_series_url): + try: + remember_series_url(title, series_url) + except Exception: + pass + + xbmcplugin.setPluginCategory(handle, f"{title} - Streams") + _set_content(handle, "videos") + + base_params = {"plugin": plugin_name, "title": title, "season": season, "episode": episode} + if series_url: + base_params["series_url"] = series_url + + # Hoster bleiben im Auswahldialog der Wiedergabe (wie bisher). + _add_directory_item( + handle, + title, + "play_episode", + dict(base_params), + is_folder=False, + info_labels={"title": title, "mediatype": "movie"}, + ) + xbmcplugin.endOfDirectory(handle) + + def _show_seasons(plugin_name: str, title: str, series_url: str = "") -> None: handle = _get_handle() _log(f"Staffeln laden: {plugin_name} / {title}") plugin = _discover_plugins().get(plugin_name) if plugin is None: - xbmcgui.Dialog().notification("Staffeln", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Staffeln", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return if series_url: @@ -1412,67 +1593,16 @@ def _show_seasons(plugin_name: str, title: str, series_url: str = "") -> None: except Exception: pass - # Einschalten liefert Filme. Für Playback soll nach dem Öffnen des Titels direkt ein - # einzelnes abspielbares Item angezeigt werden: -> ( abspielbar). - # Wichtig: ohne zusätzliche Netzwerkanfragen (sonst bleibt Kodi ggf. im Busy-Spinner hängen). - if (plugin_name or "").casefold() == "einschalten" and _get_setting_bool("einschalten_enable_playback", default=False): - xbmcplugin.setPluginCategory(handle, title) - _set_content(handle, "movies") - playstate = _title_playstate(plugin_name, title) - info_labels: dict[str, object] = {"title": title, "mediatype": "movie"} - info_labels = _apply_playstate_to_info(info_labels, playstate) - display_label = _label_with_playstate(title, playstate) - movie_params = {"plugin": plugin_name, "title": title} - if series_url: - movie_params["series_url"] = series_url - _add_directory_item( - handle, - display_label, - "play_movie", - movie_params, - is_folder=False, - info_labels=info_labels, - ) - xbmcplugin.endOfDirectory(handle) - return - - # Optional: Plugins können schnell (ohne Detail-Request) sagen, ob ein Titel ein Film ist. - # Dann zeigen wir direkt ein einzelnes abspielbares Item: -> (). - is_movie = getattr(plugin, "is_movie", None) - if callable(is_movie): - try: - if bool(is_movie(title)): - xbmcplugin.setPluginCategory(handle, title) - _set_content(handle, "movies") - playstate = _title_playstate(plugin_name, title) - info_labels: dict[str, object] = {"title": title, "mediatype": "movie"} - info_labels = _apply_playstate_to_info(info_labels, playstate) - display_label = _label_with_playstate(title, playstate) - movie_params = {"plugin": plugin_name, "title": title} - if series_url: - movie_params["series_url"] = series_url - else: - movie_params.update(_series_url_params(plugin, title)) - _add_directory_item( - handle, - display_label, - "play_movie", - movie_params, - is_folder=False, - info_labels=info_labels, - ) - xbmcplugin.endOfDirectory(handle) - return - except Exception: - pass - + use_source, show_tmdb, _prefer_source = _metadata_policy( + plugin_name, plugin, allow_tmdb=_tmdb_enabled() + ) title_info_labels: dict[str, str] | None = None title_art: dict[str, str] | None = None title_cast: list[TmdbCastMember] | None = None meta_getter = getattr(plugin, "metadata_for", None) - if callable(meta_getter): + if use_source and callable(meta_getter): try: - with _busy_dialog(): + with _busy_dialog("Metadaten werden geladen..."): meta_labels, meta_art, meta_cast = meta_getter(title) if isinstance(meta_labels, dict): title_info_labels = {str(k): str(v) for k, v in meta_labels.items() if v} @@ -1488,17 +1618,38 @@ def _show_seasons(plugin_name: str, title: str, series_url: str = "") -> None: seasons = plugin.seasons_for(title) except Exception as exc: _log(f"Staffeln laden fehlgeschlagen ({plugin_name}): {exc}", xbmc.LOGWARNING) - xbmcgui.Dialog().notification("Staffeln", "Konnte Staffeln nicht laden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Staffeln", "Staffeln konnten nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return + movie_seed = _movie_seed_for_title(plugin, title, seasons) + if movie_seed is not None: + # Dieser Action-Pfad wurde als Verzeichnis aufgerufen. Ohne endOfDirectory() + # bleibt Kodi im Busy-Zustand, auch wenn wir direkt in die Wiedergabe springen. + try: + xbmcplugin.endOfDirectory(handle, succeeded=False) + except Exception: + try: + xbmcplugin.endOfDirectory(handle) + except Exception: + pass + _play_episode( + plugin_name, + title, + movie_seed[0], + movie_seed[1], + series_url=series_url, + ) + return + count = len(seasons) suffix = "Staffel" if count == 1 else "Staffeln" xbmcplugin.setPluginCategory(handle, f"{title} ({count} {suffix})") _set_content(handle, "seasons") # Staffel-Metadaten (Plot/Poster) optional via TMDB. - _tmdb_labels_and_art(title) - api_key = _get_setting_string("tmdb_api_key").strip() + if show_tmdb: + _tmdb_labels_and_art(title) + api_key = _get_setting_string("tmdb_api_key").strip() if show_tmdb else "" language = _get_setting_string("tmdb_language").strip() or "de-DE" show_plot = _get_setting_bool("tmdb_show_plot", default=True) show_art = _get_setting_bool("tmdb_show_art", default=True) @@ -1511,8 +1662,8 @@ def _show_seasons(plugin_name: str, title: str, series_url: str = "") -> None: art: dict[str, str] | None = None season_number = _extract_first_int(season) if api_key and season_number is not None: - cache_key = (_TMDB_ID_CACHE.get((title or "").strip().casefold(), 0), season_number, language, flags) - cached = _TMDB_SEASON_SUMMARY_CACHE.get(cache_key) + cache_key = (_tmdb_cache_get(_TMDB_ID_CACHE, (title or "").strip().casefold(), 0), season_number, language, flags) + cached = _tmdb_cache_get(_TMDB_SEASON_SUMMARY_CACHE, cache_key) if cached is None and cache_key[0]: try: meta = lookup_tv_season_summary( @@ -1535,7 +1686,7 @@ def _show_seasons(plugin_name: str, title: str, series_url: str = "") -> None: if show_art and meta.poster: art_map = {"thumb": meta.poster, "poster": meta.poster} cached = (labels, art_map) - _TMDB_SEASON_SUMMARY_CACHE[cache_key] = cached + _tmdb_cache_set(_TMDB_SEASON_SUMMARY_CACHE, cache_key, cached) if cached is not None: info_labels, art = cached merged_labels = dict(info_labels or {}) @@ -1568,7 +1719,7 @@ def _show_episodes(plugin_name: str, title: str, season: str, series_url: str = _log(f"Episoden laden: {plugin_name} / {title} / {season}") plugin = _discover_plugins().get(plugin_name) if plugin is None: - xbmcgui.Dialog().notification("Episoden", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Episoden", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return if series_url: @@ -1587,13 +1738,42 @@ def _show_episodes(plugin_name: str, title: str, season: str, series_url: str = episodes = list(plugin.episodes_for(title, season)) if episodes: - show_info, show_art, show_cast = _tmdb_labels_and_art(title) + episode_url_getter = getattr(plugin, "episode_url_for", None) + supports_direct_episode_url = callable(getattr(plugin, "stream_link_for_url", None)) + use_source, show_tmdb, _prefer_source = _metadata_policy( + plugin_name, plugin, allow_tmdb=_tmdb_enabled() + ) + show_info: dict[str, str] = {} + show_art: dict[str, str] = {} + show_cast: list[TmdbCastMember] | None = None + if show_tmdb: + show_info, show_art, show_cast = _tmdb_labels_and_art(title) + elif use_source: + meta_getter = getattr(plugin, "metadata_for", None) + if callable(meta_getter): + try: + with _busy_dialog("Episoden-Metadaten werden geladen..."): + meta_labels, meta_art, meta_cast = meta_getter(title) + if isinstance(meta_labels, dict): + show_info = {str(k): str(v) for k, v in meta_labels.items() if v} + if isinstance(meta_art, dict): + show_art = {str(k): str(v) for k, v in meta_art.items() if v} + if isinstance(meta_cast, list): + show_cast = meta_cast # noqa: PGH003 + except Exception: + pass + show_fanart = (show_art or {}).get("fanart") if isinstance(show_art, dict) else "" show_poster = (show_art or {}).get("poster") if isinstance(show_art, dict) else "" - with _busy_dialog(): + with _busy_dialog("Episoden werden aufbereitet..."): for episode in episodes: - info_labels, art = _tmdb_episode_labels_and_art(title=title, season_label=season, episode_label=episode) - episode_cast = _tmdb_episode_cast(title=title, season_label=season, episode_label=episode) + if show_tmdb: + info_labels, art = _tmdb_episode_labels_and_art( + title=title, season_label=season, episode_label=episode + ) + episode_cast = _tmdb_episode_cast(title=title, season_label=season, episode_label=episode) + else: + info_labels, art, episode_cast = {}, {}, [] merged_info = dict(show_info or {}) merged_info.update(dict(info_labels or {})) merged_art: dict[str, str] = {} @@ -1623,11 +1803,25 @@ def _show_episodes(plugin_name: str, title: str, season: str, series_url: str = merged_info = _apply_playstate_to_info(merged_info, _get_playstate(key)) display_label = episode + play_params = { + "plugin": plugin_name, + "title": title, + "season": season, + "episode": episode, + "series_url": series_url, + } + if supports_direct_episode_url and callable(episode_url_getter): + try: + episode_url = str(episode_url_getter(title, season, episode) or "").strip() + except Exception: + episode_url = "" + if episode_url: + play_params["url"] = episode_url _add_directory_item( handle, display_label, "play_episode", - {"plugin": plugin_name, "title": title, "season": season, "episode": episode}, + play_params, is_folder=False, info_labels=merged_info, art=merged_art, @@ -1649,7 +1843,7 @@ def _show_genre_sources() -> None: sources.append((plugin_name, plugin)) if not sources: - xbmcgui.Dialog().notification("Genres", "Keine Genre-Quellen gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Genres", "Keine Quellen mit Genres gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return @@ -1669,11 +1863,15 @@ def _show_genres(plugin_name: str) -> None: _log(f"Genres laden: {plugin_name}") plugin = _discover_plugins().get(plugin_name) if plugin is None: - xbmcgui.Dialog().notification("Genres", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Genres", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return try: - genres = plugin.genres() + genres = _run_with_progress( + "Genres", + f"{plugin_name}: Genres werden geladen...", + lambda: plugin.genres(), + ) except Exception as exc: _log(f"Genres konnten nicht geladen werden ({plugin_name}): {exc}", xbmc.LOGWARNING) xbmcgui.Dialog().notification("Genres", "Genres konnten nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000) @@ -1706,7 +1904,7 @@ def _show_categories(plugin_name: str) -> None: _log(f"Kategorien laden: {plugin_name}") plugin = _discover_plugins().get(plugin_name) if plugin is None: - xbmcgui.Dialog().notification("Kategorien", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Kategorien", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return getter = getattr(plugin, "categories", None) @@ -1715,7 +1913,11 @@ def _show_categories(plugin_name: str) -> None: xbmcplugin.endOfDirectory(handle) return try: - categories = list(getter() or []) + categories = _run_with_progress( + "Kategorien", + f"{plugin_name}: Kategorien werden geladen...", + lambda: list(getter() or []), + ) except Exception as exc: _log(f"Kategorien konnten nicht geladen werden ({plugin_name}): {exc}", xbmc.LOGWARNING) xbmcgui.Dialog().notification("Kategorien", "Kategorien konnten nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000) @@ -1739,14 +1941,14 @@ def _show_category_titles_page(plugin_name: str, category: str, page: int = 1) - handle = _get_handle() plugin = _discover_plugins().get(plugin_name) if plugin is None: - xbmcgui.Dialog().notification("Kategorien", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Kategorien", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return page = max(1, int(page or 1)) paging_getter = getattr(plugin, "titles_for_genre_page", None) if not callable(paging_getter): - xbmcgui.Dialog().notification("Kategorien", "Paging nicht verfuegbar.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Kategorien", "Seitenwechsel nicht verfuegbar.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return @@ -1774,7 +1976,11 @@ def _show_category_titles_page(plugin_name: str, category: str, page: int = 1) - ) try: - titles = list(paging_getter(category, page) or []) + titles = _run_with_progress( + "Kategorien", + f"{plugin_name}: {category} Seite {page} wird geladen...", + lambda: list(paging_getter(category, page) or []), + ) except Exception as exc: _log(f"Kategorie-Seite konnte nicht geladen werden ({plugin_name}/{category} p{page}): {exc}", xbmc.LOGWARNING) xbmcgui.Dialog().notification("Kategorien", "Seite konnte nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000) @@ -1784,16 +1990,16 @@ def _show_category_titles_page(plugin_name: str, category: str, page: int = 1) - titles = [str(t).strip() for t in titles if t and str(t).strip()] titles.sort(key=lambda value: value.casefold()) - show_tmdb = _get_setting_bool("tmdb_genre_metadata", default=False) if titles: - plugin_meta = _collect_plugin_metadata(plugin, titles) - show_tmdb = _tmdb_enabled() + use_source, show_tmdb, prefer_source = _metadata_policy( + plugin_name, plugin, allow_tmdb=_tmdb_list_enabled() + ) + plugin_meta = _collect_plugin_metadata(plugin, titles) if use_source else {} show_plot = _get_setting_bool("tmdb_show_plot", default=True) show_art = _get_setting_bool("tmdb_show_art", default=True) - prefer_source = bool(getattr(plugin, "prefer_source_metadata", False)) tmdb_prefetched: dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]] = {} - tmdb_titles = list(titles) - if show_tmdb and prefer_source: + tmdb_titles = list(titles) if show_tmdb else [] + if show_tmdb and prefer_source and use_source: tmdb_titles = [] for title in titles: meta = plugin_meta.get(title) @@ -1802,53 +2008,33 @@ def _show_category_titles_page(plugin_name: str, category: str, page: int = 1) - if _needs_tmdb(meta_labels, meta_art, want_plot=show_plot, want_art=show_art): tmdb_titles.append(title) if show_tmdb and tmdb_titles: - with _busy_dialog(): + with _busy_dialog("Genre-Liste wird geladen..."): tmdb_prefetched = _tmdb_labels_and_art_bulk(tmdb_titles) - if show_tmdb: - for title in titles: - tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) - meta = plugin_meta.get(title) - info_labels, art, cast = _merge_metadata(title, tmdb_info, tmdb_art, tmdb_cast, meta) - info_labels.setdefault("mediatype", "tvshow") - if (info_labels.get("mediatype") or "").strip().casefold() == "tvshow": - info_labels.setdefault("tvshowtitle", title) - playstate = _title_playstate(plugin_name, title) - info_labels = _apply_playstate_to_info(dict(info_labels), playstate) - display_label = _label_with_duration(title, info_labels) - display_label = _label_with_playstate(display_label, playstate) - direct_play = bool( - plugin_name.casefold() == "einschalten" - and _get_setting_bool("einschalten_enable_playback", default=False) - ) - _add_directory_item( - handle, - display_label, - "play_movie" if direct_play else "seasons", - {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, - is_folder=not direct_play, - info_labels=info_labels, - art=art, - cast=cast, - ) - else: - for title in titles: - playstate = _title_playstate(plugin_name, title) - meta = plugin_meta.get(title) - info_labels, art, cast = _merge_metadata(title, {}, {}, None, meta) - direct_play = bool( - plugin_name.casefold() == "einschalten" - and _get_setting_bool("einschalten_enable_playback", default=False) - ) - _add_directory_item( - handle, - _label_with_playstate(title, playstate), - "play_movie" if direct_play else "seasons", - {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, - is_folder=not direct_play, - info_labels=_apply_playstate_to_info(info_labels, playstate), - art=art, - cast=cast, - ) + for title in titles: + tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) if show_tmdb else ({}, {}, []) + meta = plugin_meta.get(title) + info_labels, art, cast = _merge_metadata(title, tmdb_info, tmdb_art, tmdb_cast, meta) + info_labels.setdefault("mediatype", "tvshow") + if (info_labels.get("mediatype") or "").strip().casefold() == "tvshow": + info_labels.setdefault("tvshowtitle", title) + playstate = _title_playstate(plugin_name, title) + info_labels = _apply_playstate_to_info(dict(info_labels), playstate) + display_label = _label_with_duration(title, info_labels) + display_label = _label_with_playstate(display_label, playstate) + direct_play = bool( + plugin_name.casefold() == "einschalten" + and _get_setting_bool("einschalten_enable_playback", default=False) + ) + _add_directory_item( + handle, + display_label, + "play_movie" if direct_play else "seasons", + {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, + is_folder=not direct_play, + info_labels=info_labels, + art=art, + cast=cast, + ) show_next = False if total_pages is not None: @@ -1864,7 +2050,7 @@ def _show_category_titles_page(plugin_name: str, category: str, page: int = 1) - if show_next: _add_directory_item( handle, - "Nächste Seite", + "Naechste Seite", "category_titles_page", {"plugin": plugin_name, "category": category, "page": str(page + 1)}, is_folder=True, @@ -1875,14 +2061,14 @@ def _show_genre_titles_page(plugin_name: str, genre: str, page: int = 1) -> None handle = _get_handle() plugin = _discover_plugins().get(plugin_name) if plugin is None: - xbmcgui.Dialog().notification("Genres", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Genres", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return page = max(1, int(page or 1)) paging_getter = getattr(plugin, "titles_for_genre_page", None) if not callable(paging_getter): - xbmcgui.Dialog().notification("Genres", "Paging nicht verfügbar.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Genres", "Seitenwechsel nicht verfuegbar.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return @@ -1910,7 +2096,11 @@ def _show_genre_titles_page(plugin_name: str, genre: str, page: int = 1) -> None ) try: - titles = list(paging_getter(genre, page) or []) + titles = _run_with_progress( + "Genres", + f"{plugin_name}: {genre} Seite {page} wird geladen...", + lambda: list(paging_getter(genre, page) or []), + ) except Exception as exc: _log(f"Genre-Seite konnte nicht geladen werden ({plugin_name}/{genre} p{page}): {exc}", xbmc.LOGWARNING) xbmcgui.Dialog().notification("Genres", "Seite konnte nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000) @@ -1920,16 +2110,16 @@ def _show_genre_titles_page(plugin_name: str, genre: str, page: int = 1) -> None titles = [str(t).strip() for t in titles if t and str(t).strip()] titles.sort(key=lambda value: value.casefold()) - show_tmdb = _get_setting_bool("tmdb_genre_metadata", default=False) if titles: - plugin_meta = _collect_plugin_metadata(plugin, titles) - show_tmdb = show_tmdb and _tmdb_enabled() + use_source, show_tmdb, prefer_source = _metadata_policy( + plugin_name, plugin, allow_tmdb=_tmdb_list_enabled() + ) + plugin_meta = _collect_plugin_metadata(plugin, titles) if use_source else {} show_plot = _get_setting_bool("tmdb_show_plot", default=True) show_art = _get_setting_bool("tmdb_show_art", default=True) - prefer_source = bool(getattr(plugin, "prefer_source_metadata", False)) tmdb_prefetched: dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]] = {} - tmdb_titles = list(titles) - if show_tmdb and prefer_source: + tmdb_titles = list(titles) if show_tmdb else [] + if show_tmdb and prefer_source and use_source: tmdb_titles = [] for title in titles: meta = plugin_meta.get(title) @@ -1938,7 +2128,7 @@ def _show_genre_titles_page(plugin_name: str, genre: str, page: int = 1) -> None if _needs_tmdb(meta_labels, meta_art, want_plot=show_plot, want_art=show_art): tmdb_titles.append(title) if show_tmdb and tmdb_titles: - with _busy_dialog(): + with _busy_dialog("Genre-Seite wird geladen..."): tmdb_prefetched = _tmdb_labels_and_art_bulk(tmdb_titles) for title in titles: tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) if show_tmdb else ({}, {}, []) @@ -1980,7 +2170,7 @@ def _show_genre_titles_page(plugin_name: str, genre: str, page: int = 1) -> None if show_next: _add_directory_item( handle, - "Nächste Seite", + "Naechste Seite", "genre_titles_page", {"plugin": plugin_name, "genre": genre, "page": str(page + 1)}, is_folder=True, @@ -1993,16 +2183,20 @@ def _show_alpha_index(plugin_name: str) -> None: _log(f"A-Z laden: {plugin_name}") plugin = _discover_plugins().get(plugin_name) if plugin is None: - xbmcgui.Dialog().notification("A-Z", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("A-Z", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return getter = getattr(plugin, "alpha_index", None) if not callable(getter): - xbmcgui.Dialog().notification("A-Z", "A-Z nicht verfügbar.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("A-Z", "A-Z nicht verfuegbar.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return try: - letters = list(getter() or []) + letters = _run_with_progress( + "A-Z", + f"{plugin_name}: Index wird geladen...", + lambda: list(getter() or []), + ) except Exception as exc: _log(f"A-Z konnte nicht geladen werden ({plugin_name}): {exc}", xbmc.LOGWARNING) xbmcgui.Dialog().notification("A-Z", "A-Z konnte nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000) @@ -2026,14 +2220,14 @@ def _show_alpha_titles_page(plugin_name: str, letter: str, page: int = 1) -> Non handle = _get_handle() plugin = _discover_plugins().get(plugin_name) if plugin is None: - xbmcgui.Dialog().notification("A-Z", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("A-Z", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return page = max(1, int(page or 1)) paging_getter = getattr(plugin, "titles_for_alpha_page", None) if not callable(paging_getter): - xbmcgui.Dialog().notification("A-Z", "Paging nicht verfügbar.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("A-Z", "Seitenwechsel nicht verfuegbar.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return @@ -2061,7 +2255,11 @@ def _show_alpha_titles_page(plugin_name: str, letter: str, page: int = 1) -> Non ) try: - titles = list(paging_getter(letter, page) or []) + titles = _run_with_progress( + "A-Z", + f"{plugin_name}: {letter} Seite {page} wird geladen...", + lambda: list(paging_getter(letter, page) or []), + ) except Exception as exc: _log(f"A-Z Seite konnte nicht geladen werden ({plugin_name}/{letter} p{page}): {exc}", xbmc.LOGWARNING) xbmcgui.Dialog().notification("A-Z", "Seite konnte nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000) @@ -2071,50 +2269,52 @@ def _show_alpha_titles_page(plugin_name: str, letter: str, page: int = 1) -> Non titles = [str(t).strip() for t in titles if t and str(t).strip()] titles.sort(key=lambda value: value.casefold()) - show_tmdb = _get_setting_bool("tmdb_genre_metadata", default=False) if titles: - if show_tmdb: - with _busy_dialog(): - tmdb_prefetched = _tmdb_labels_and_art_bulk(titles) - for title in titles: - info_labels, art, cast = tmdb_prefetched.get(title, _tmdb_labels_and_art(title)) - info_labels = dict(info_labels or {}) - info_labels.setdefault("mediatype", "tvshow") - if (info_labels.get("mediatype") or "").strip().casefold() == "tvshow": - info_labels.setdefault("tvshowtitle", title) - playstate = _title_playstate(plugin_name, title) - info_labels = _apply_playstate_to_info(dict(info_labels), playstate) - display_label = _label_with_duration(title, info_labels) - display_label = _label_with_playstate(display_label, playstate) - direct_play = bool( - plugin_name.casefold() == "einschalten" - and _get_setting_bool("einschalten_enable_playback", default=False) - ) - _add_directory_item( - handle, - display_label, - "play_movie" if direct_play else "seasons", - {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, - is_folder=not direct_play, - info_labels=info_labels, - art=art, - cast=cast, - ) - else: + use_source, show_tmdb, prefer_source = _metadata_policy( + plugin_name, plugin, allow_tmdb=_tmdb_list_enabled() + ) + plugin_meta = _collect_plugin_metadata(plugin, titles) if use_source else {} + show_plot = _get_setting_bool("tmdb_show_plot", default=True) + show_art = _get_setting_bool("tmdb_show_art", default=True) + tmdb_prefetched: dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]] = {} + tmdb_titles = list(titles) if show_tmdb else [] + if show_tmdb and prefer_source and use_source: + tmdb_titles = [] for title in titles: - playstate = _title_playstate(plugin_name, title) - direct_play = bool( - plugin_name.casefold() == "einschalten" - and _get_setting_bool("einschalten_enable_playback", default=False) - ) - _add_directory_item( - handle, - _label_with_playstate(title, playstate), - "play_movie" if direct_play else "seasons", - {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, - is_folder=not direct_play, - info_labels=_apply_playstate_to_info({"title": title}, playstate), - ) + meta = plugin_meta.get(title) + meta_labels = meta[0] if meta else {} + meta_art = meta[1] if meta else {} + if _needs_tmdb(meta_labels, meta_art, want_plot=show_plot, want_art=show_art): + tmdb_titles.append(title) + if show_tmdb and tmdb_titles: + with _busy_dialog("A-Z Liste wird geladen..."): + tmdb_prefetched = _tmdb_labels_and_art_bulk(tmdb_titles) + for title in titles: + tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) if show_tmdb else ({}, {}, []) + meta = plugin_meta.get(title) + info_labels, art, cast = _merge_metadata(title, tmdb_info, tmdb_art, tmdb_cast, meta) + info_labels = dict(info_labels or {}) + info_labels.setdefault("mediatype", "tvshow") + if (info_labels.get("mediatype") or "").strip().casefold() == "tvshow": + info_labels.setdefault("tvshowtitle", title) + playstate = _title_playstate(plugin_name, title) + info_labels = _apply_playstate_to_info(dict(info_labels), playstate) + display_label = _label_with_duration(title, info_labels) + display_label = _label_with_playstate(display_label, playstate) + direct_play = bool( + plugin_name.casefold() == "einschalten" + and _get_setting_bool("einschalten_enable_playback", default=False) + ) + _add_directory_item( + handle, + display_label, + "play_movie" if direct_play else "seasons", + {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, + is_folder=not direct_play, + info_labels=info_labels, + art=art, + cast=cast, + ) show_next = False if total_pages is not None: @@ -2123,7 +2323,7 @@ def _show_alpha_titles_page(plugin_name: str, letter: str, page: int = 1) -> Non if show_next: _add_directory_item( handle, - "Nächste Seite", + "Naechste Seite", "alpha_titles_page", {"plugin": plugin_name, "letter": letter, "page": str(page + 1)}, is_folder=True, @@ -2136,14 +2336,14 @@ def _show_series_catalog(plugin_name: str, page: int = 1) -> None: plugin_name = (plugin_name or "").strip() plugin = _discover_plugins().get(plugin_name) if plugin is None: - xbmcgui.Dialog().notification("Serien", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Serien", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return page = max(1, int(page or 1)) paging_getter = getattr(plugin, "series_catalog_page", None) if not callable(paging_getter): - xbmcgui.Dialog().notification("Serien", "Serien nicht verfügbar.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Serien", "Serienkatalog nicht verfuegbar.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return @@ -2171,7 +2371,11 @@ def _show_series_catalog(plugin_name: str, page: int = 1) -> None: ) try: - titles = list(paging_getter(page) or []) + titles = _run_with_progress( + "Serien", + f"{plugin_name}: Seite {page} wird geladen...", + lambda: list(paging_getter(page) or []), + ) except Exception as exc: _log(f"Serien konnten nicht geladen werden ({plugin_name} p{page}): {exc}", xbmc.LOGWARNING) xbmcgui.Dialog().notification("Serien", "Serien konnten nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000) @@ -2181,42 +2385,48 @@ def _show_series_catalog(plugin_name: str, page: int = 1) -> None: titles = [str(t).strip() for t in titles if t and str(t).strip()] titles.sort(key=lambda value: value.casefold()) - show_tmdb = _get_setting_bool("tmdb_genre_metadata", default=False) if titles: - if show_tmdb: - with _busy_dialog(): - tmdb_prefetched = _tmdb_labels_and_art_bulk(titles) - for title in titles: - info_labels, art, cast = tmdb_prefetched.get(title, _tmdb_labels_and_art(title)) - info_labels = dict(info_labels or {}) - info_labels.setdefault("mediatype", "tvshow") - if (info_labels.get("mediatype") or "").strip().casefold() == "tvshow": - info_labels.setdefault("tvshowtitle", title) - playstate = _title_playstate(plugin_name, title) - info_labels = _apply_playstate_to_info(dict(info_labels), playstate) - display_label = _label_with_duration(title, info_labels) - display_label = _label_with_playstate(display_label, playstate) - _add_directory_item( - handle, - display_label, - "seasons", - {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, - is_folder=True, - info_labels=info_labels, - art=art, - cast=cast, - ) - else: + use_source, show_tmdb, prefer_source = _metadata_policy( + plugin_name, plugin, allow_tmdb=_tmdb_list_enabled() + ) + plugin_meta = _collect_plugin_metadata(plugin, titles) if use_source else {} + show_plot = _get_setting_bool("tmdb_show_plot", default=True) + show_art = _get_setting_bool("tmdb_show_art", default=True) + tmdb_prefetched: dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]] = {} + tmdb_titles = list(titles) if show_tmdb else [] + if show_tmdb and prefer_source and use_source: + tmdb_titles = [] for title in titles: - playstate = _title_playstate(plugin_name, title) - _add_directory_item( - handle, - _label_with_playstate(title, playstate), - "seasons", - {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, - is_folder=True, - info_labels=_apply_playstate_to_info({"title": title}, playstate), - ) + meta = plugin_meta.get(title) + meta_labels = meta[0] if meta else {} + meta_art = meta[1] if meta else {} + if _needs_tmdb(meta_labels, meta_art, want_plot=show_plot, want_art=show_art): + tmdb_titles.append(title) + if show_tmdb and tmdb_titles: + with _busy_dialog("A-Z Seite wird geladen..."): + tmdb_prefetched = _tmdb_labels_and_art_bulk(tmdb_titles) + for title in titles: + tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) if show_tmdb else ({}, {}, []) + meta = plugin_meta.get(title) + info_labels, art, cast = _merge_metadata(title, tmdb_info, tmdb_art, tmdb_cast, meta) + info_labels = dict(info_labels or {}) + info_labels.setdefault("mediatype", "tvshow") + if (info_labels.get("mediatype") or "").strip().casefold() == "tvshow": + info_labels.setdefault("tvshowtitle", title) + playstate = _title_playstate(plugin_name, title) + info_labels = _apply_playstate_to_info(dict(info_labels), playstate) + display_label = _label_with_duration(title, info_labels) + display_label = _label_with_playstate(display_label, playstate) + _add_directory_item( + handle, + display_label, + "seasons", + {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, + is_folder=True, + info_labels=info_labels, + art=art, + cast=cast, + ) show_next = False if total_pages is not None: @@ -2232,7 +2442,7 @@ def _show_series_catalog(plugin_name: str, page: int = 1) -> None: if show_next: _add_directory_item( handle, - "Nächste Seite", + "Naechste Seite", "series_catalog", {"plugin": plugin_name, "page": str(page + 1)}, is_folder=True, @@ -2370,22 +2580,30 @@ def _show_popular(plugin_name: str | None = None, page: int = 1) -> None: if plugin_name: plugin = _discover_plugins().get(plugin_name) if plugin is None or not _plugin_has_capability(plugin, "popular_series"): - xbmcgui.Dialog().notification("Beliebte Serien", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification(POPULAR_MENU_LABEL, "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return try: popular_getter = getattr(plugin, "popular_series", None) if callable(popular_getter): - titles = list(popular_getter() or []) + titles = _run_with_progress( + POPULAR_MENU_LABEL, + f"{plugin_name}: Liste wird geladen...", + lambda: list(popular_getter() or []), + ) else: label = _popular_genre_label(plugin) if not label: titles = [] else: - titles = list(plugin.titles_for_genre(label) or []) + titles = _run_with_progress( + POPULAR_MENU_LABEL, + f"{plugin_name}: Liste wird geladen...", + lambda: list(plugin.titles_for_genre(label) or []), + ) except Exception as exc: - _log(f"Beliebte Serien konnten nicht geladen werden ({plugin_name}): {exc}", xbmc.LOGWARNING) - xbmcgui.Dialog().notification("Beliebte Serien", "Serien konnten nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000) + _log(f"{POPULAR_MENU_LABEL} konnte nicht geladen werden ({plugin_name}): {exc}", xbmc.LOGWARNING) + xbmcgui.Dialog().notification(POPULAR_MENU_LABEL, "Serien konnten nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return @@ -2394,7 +2612,7 @@ def _show_popular(plugin_name: str | None = None, page: int = 1) -> None: total = len(titles) total_pages = max(1, (total + page_size - 1) // page_size) page = min(page, total_pages) - xbmcplugin.setPluginCategory(handle, f"Beliebte Serien [{plugin_name}] ({page}/{total_pages})") + xbmcplugin.setPluginCategory(handle, f"{POPULAR_MENU_LABEL} [{plugin_name}] ({page}/{total_pages})") _set_content(handle, "tvshows") if total_pages > 1 and page > 1: @@ -2410,16 +2628,16 @@ def _show_popular(plugin_name: str | None = None, page: int = 1) -> None: end = start + page_size page_items = titles[start:end] - show_tmdb = _get_setting_bool("tmdb_genre_metadata", default=False) if page_items: - plugin_meta = _collect_plugin_metadata(plugin, page_items) - show_tmdb = show_tmdb and _tmdb_enabled() + use_source, show_tmdb, prefer_source = _metadata_policy( + plugin_name, plugin, allow_tmdb=_tmdb_list_enabled() + ) + plugin_meta = _collect_plugin_metadata(plugin, page_items) if use_source else {} show_plot = _get_setting_bool("tmdb_show_plot", default=True) show_art = _get_setting_bool("tmdb_show_art", default=True) - prefer_source = bool(getattr(plugin, "prefer_source_metadata", False)) tmdb_prefetched: dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]] = {} - tmdb_titles = list(page_items) - if show_tmdb and prefer_source: + tmdb_titles = list(page_items) if show_tmdb else [] + if show_tmdb and prefer_source and use_source: tmdb_titles = [] for title in page_items: meta = plugin_meta.get(title) @@ -2428,10 +2646,10 @@ def _show_popular(plugin_name: str | None = None, page: int = 1) -> None: if _needs_tmdb(meta_labels, meta_art, want_plot=show_plot, want_art=show_art): tmdb_titles.append(title) if show_tmdb and tmdb_titles: - with _busy_dialog(): + with _busy_dialog(f"{POPULAR_MENU_LABEL} wird geladen..."): tmdb_prefetched = _tmdb_labels_and_art_bulk(tmdb_titles) for title in page_items: - tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) + tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) if show_tmdb else ({}, {}, []) meta = plugin_meta.get(title) info_labels, art, cast = _merge_metadata(title, tmdb_info, tmdb_art, tmdb_cast, meta) info_labels.setdefault("mediatype", "tvshow") @@ -2455,7 +2673,7 @@ def _show_popular(plugin_name: str | None = None, page: int = 1) -> None: if total_pages > 1 and page < total_pages: _add_directory_item( handle, - "Nächste Seite", + "Naechste Seite", "popular", {"plugin": plugin_name, "page": str(page + 1)}, is_folder=True, @@ -2465,15 +2683,15 @@ def _show_popular(plugin_name: str | None = None, page: int = 1) -> None: sources = _plugins_with_popular() if not sources: - xbmcgui.Dialog().notification("Beliebte Serien", "Keine Quellen gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification(POPULAR_MENU_LABEL, "Keine Quellen gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return - xbmcplugin.setPluginCategory(handle, "Beliebte Serien") + xbmcplugin.setPluginCategory(handle, POPULAR_MENU_LABEL) for name, plugin, _label in sources: _add_directory_item( handle, - f"Beliebte Serien [{plugin.name}]", + f"{POPULAR_MENU_LABEL} [{plugin.name}]", "popular", {"plugin": name, "page": "1"}, is_folder=True, @@ -2481,7 +2699,7 @@ def _show_popular(plugin_name: str | None = None, page: int = 1) -> None: xbmcplugin.endOfDirectory(handle) -def _show_new_titles(plugin_name: str, page: int = 1) -> None: +def _show_new_titles(plugin_name: str, page: int = 1, *, action_name: str = "new_titles") -> None: handle = _get_handle() page_size = 10 page = max(1, int(page or 1)) @@ -2489,13 +2707,13 @@ def _show_new_titles(plugin_name: str, page: int = 1) -> None: plugin_name = (plugin_name or "").strip() plugin = _discover_plugins().get(plugin_name) if plugin is None or not _plugin_has_capability(plugin, "new_titles"): - xbmcgui.Dialog().notification("Neue Titel", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification(LATEST_MENU_LABEL, "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return getter = getattr(plugin, "new_titles", None) if not callable(getter): - xbmcgui.Dialog().notification("Neue Titel", "Nicht verfügbar.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification(LATEST_MENU_LABEL, "Diese Liste ist nicht verfuegbar.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return @@ -2503,31 +2721,39 @@ def _show_new_titles(plugin_name: str, page: int = 1) -> None: has_more_getter = getattr(plugin, "new_titles_has_more", None) if callable(paging_getter): - xbmcplugin.setPluginCategory(handle, f"Neue Titel [{plugin_name}] ({page})") + xbmcplugin.setPluginCategory(handle, f"{LATEST_MENU_LABEL} [{plugin_name}] ({page})") _set_content(handle, "movies" if plugin_name.casefold() == "einschalten" else "tvshows") if page > 1: _add_directory_item( handle, "Vorherige Seite", - "new_titles", + action_name, {"plugin": plugin_name, "page": str(page - 1)}, is_folder=True, ) try: - page_items = list(paging_getter(page) or []) + page_items = _run_with_progress( + LATEST_MENU_LABEL, + f"{plugin_name}: Seite {page} wird geladen...", + lambda: list(paging_getter(page) or []), + ) except Exception as exc: - _log(f"Neue Titel konnten nicht geladen werden ({plugin_name} p{page}): {exc}", xbmc.LOGWARNING) - xbmcgui.Dialog().notification("Neue Titel", "Titel konnten nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000) + _log(f"{LATEST_MENU_LABEL} konnten nicht geladen werden ({plugin_name} p{page}): {exc}", xbmc.LOGWARNING) + xbmcgui.Dialog().notification(LATEST_MENU_LABEL, "Titel konnten nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return page_items = [str(t).strip() for t in page_items if t and str(t).strip()] page_items.sort(key=lambda value: value.casefold()) else: try: - titles = list(getter() or []) + titles = _run_with_progress( + LATEST_MENU_LABEL, + f"{plugin_name}: Liste wird geladen...", + lambda: list(getter() or []), + ) except Exception as exc: - _log(f"Neue Titel konnten nicht geladen werden ({plugin_name}): {exc}", xbmc.LOGWARNING) - xbmcgui.Dialog().notification("Neue Titel", "Titel konnten nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000) + _log(f"{LATEST_MENU_LABEL} konnten nicht geladen werden ({plugin_name}): {exc}", xbmc.LOGWARNING) + xbmcgui.Dialog().notification(LATEST_MENU_LABEL, "Titel konnten nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return @@ -2536,21 +2762,21 @@ def _show_new_titles(plugin_name: str, page: int = 1) -> None: total = len(titles) if total == 0: xbmcgui.Dialog().notification( - "Neue Titel", - "Keine Titel gefunden (Basis-URL/Index prüfen).", + LATEST_MENU_LABEL, + "Keine Titel gefunden. Bitte Basis-URL oder Index pruefen.", xbmcgui.NOTIFICATION_INFO, 4000, ) total_pages = max(1, (total + page_size - 1) // page_size) page = min(page, total_pages) - xbmcplugin.setPluginCategory(handle, f"Neue Titel [{plugin_name}] ({page}/{total_pages})") + xbmcplugin.setPluginCategory(handle, f"{LATEST_MENU_LABEL} [{plugin_name}] ({page}/{total_pages})") _set_content(handle, "movies" if plugin_name.casefold() == "einschalten" else "tvshows") if total_pages > 1 and page > 1: _add_directory_item( handle, "Vorherige Seite", - "new_titles", + action_name, {"plugin": plugin_name, "page": str(page - 1)}, is_folder=True, ) @@ -2558,48 +2784,50 @@ def _show_new_titles(plugin_name: str, page: int = 1) -> None: start = (page - 1) * page_size end = start + page_size page_items = titles[start:end] - show_tmdb = _get_setting_bool("tmdb_genre_metadata", default=False) if page_items: - if show_tmdb: - with _busy_dialog(): - tmdb_prefetched = _tmdb_labels_and_art_bulk(page_items) - for title in page_items: - info_labels, art, cast = tmdb_prefetched.get(title, _tmdb_labels_and_art(title)) - info_labels = dict(info_labels or {}) - info_labels.setdefault("mediatype", "movie") - playstate = _title_playstate(plugin_name, title) - info_labels = _apply_playstate_to_info(dict(info_labels), playstate) - display_label = _label_with_duration(title, info_labels) - display_label = _label_with_playstate(display_label, playstate) - direct_play = bool( - plugin_name.casefold() == "einschalten" - and _get_setting_bool("einschalten_enable_playback", default=False) - ) - _add_directory_item( - handle, - display_label, - "play_movie" if direct_play else "seasons", - {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, - is_folder=not direct_play, - info_labels=info_labels, - art=art, - cast=cast, - ) - else: + use_source, show_tmdb, prefer_source = _metadata_policy( + plugin_name, plugin, allow_tmdb=_tmdb_list_enabled() + ) + plugin_meta = _collect_plugin_metadata(plugin, page_items) if use_source else {} + show_plot = _get_setting_bool("tmdb_show_plot", default=True) + show_art = _get_setting_bool("tmdb_show_art", default=True) + tmdb_prefetched: dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]] = {} + tmdb_titles = list(page_items) if show_tmdb else [] + if show_tmdb and prefer_source and use_source: + tmdb_titles = [] for title in page_items: - playstate = _title_playstate(plugin_name, title) - direct_play = bool( - plugin_name.casefold() == "einschalten" - and _get_setting_bool("einschalten_enable_playback", default=False) - ) - _add_directory_item( - handle, - _label_with_playstate(title, playstate), - "play_movie" if direct_play else "seasons", - {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, - is_folder=not direct_play, - info_labels=_apply_playstate_to_info({"title": title}, playstate), - ) + meta = plugin_meta.get(title) + meta_labels = meta[0] if meta else {} + meta_art = meta[1] if meta else {} + if _needs_tmdb(meta_labels, meta_art, want_plot=show_plot, want_art=show_art): + tmdb_titles.append(title) + if show_tmdb and tmdb_titles: + with _busy_dialog(f"{LATEST_MENU_LABEL} wird geladen..."): + tmdb_prefetched = _tmdb_labels_and_art_bulk(tmdb_titles) + for title in page_items: + tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) if show_tmdb else ({}, {}, []) + meta = plugin_meta.get(title) + info_labels, art, cast = _merge_metadata(title, tmdb_info, tmdb_art, tmdb_cast, meta) + info_labels = dict(info_labels or {}) + info_labels.setdefault("mediatype", "movie") + playstate = _title_playstate(plugin_name, title) + info_labels = _apply_playstate_to_info(dict(info_labels), playstate) + display_label = _label_with_duration(title, info_labels) + display_label = _label_with_playstate(display_label, playstate) + direct_play = bool( + plugin_name.casefold() == "einschalten" + and _get_setting_bool("einschalten_enable_playback", default=False) + ) + _add_directory_item( + handle, + display_label, + "play_movie" if direct_play else "seasons", + {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, + is_folder=not direct_play, + info_labels=info_labels, + art=art, + cast=cast, + ) show_next = False if callable(paging_getter) and callable(has_more_getter): @@ -2613,8 +2841,8 @@ def _show_new_titles(plugin_name: str, page: int = 1) -> None: if show_next: _add_directory_item( handle, - "Nächste Seite", - "new_titles", + "Naechste Seite", + action_name, {"plugin": plugin_name, "page": str(page + 1)}, is_folder=True, ) @@ -2626,25 +2854,28 @@ def _show_latest_episodes(plugin_name: str, page: int = 1) -> None: plugin_name = (plugin_name or "").strip() plugin = _discover_plugins().get(plugin_name) if not plugin: - xbmcgui.Dialog().notification("Neueste Folgen", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification(LATEST_MENU_LABEL, "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return getter = getattr(plugin, "latest_episodes", None) if not callable(getter): - xbmcgui.Dialog().notification("Neueste Folgen", "Nicht unterstützt.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification(LATEST_MENU_LABEL, "Diese Quelle bietet das nicht an.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return - xbmcplugin.setPluginCategory(handle, f"{plugin_name}: Neueste Folgen") + xbmcplugin.setPluginCategory(handle, f"{plugin_name}: {LATEST_MENU_LABEL}") _set_content(handle, "episodes") try: - with _busy_dialog(): - entries = list(getter(page) or []) + entries = _run_with_progress( + LATEST_MENU_LABEL, + f"{plugin_name}: Seite {page} wird geladen...", + lambda: list(getter(page) or []), + ) except Exception as exc: - _log(f"Neueste Folgen fehlgeschlagen ({plugin_name}): {exc}", xbmc.LOGWARNING) - xbmcgui.Dialog().notification("Neueste Folgen", "Abruf fehlgeschlagen.", xbmcgui.NOTIFICATION_INFO, 3000) + _log(f"{LATEST_MENU_LABEL} fehlgeschlagen ({plugin_name}): {exc}", xbmc.LOGWARNING) + xbmcgui.Dialog().notification(LATEST_MENU_LABEL, "Abruf fehlgeschlagen.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return @@ -2697,13 +2928,32 @@ def _show_latest_episodes(plugin_name: str, page: int = 1) -> None: xbmcplugin.endOfDirectory(handle) +def _show_latest_titles(plugin_name: str, page: int = 1) -> None: + plugin_name = (plugin_name or "").strip() + plugin = _discover_plugins().get(plugin_name) + if plugin is None: + handle = _get_handle() + xbmcgui.Dialog().notification(LATEST_MENU_LABEL, "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcplugin.endOfDirectory(handle) + return + if _plugin_has_capability(plugin, "latest_episodes"): + _show_latest_episodes(plugin_name, page) + return + if _plugin_has_capability(plugin, "new_titles"): + _show_new_titles(plugin_name, page, action_name="latest_titles") + return + handle = _get_handle() + xbmcgui.Dialog().notification(LATEST_MENU_LABEL, "Diese Quelle bietet das nicht an.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcplugin.endOfDirectory(handle) + + def _show_genre_series_group(plugin_name: str, genre: str, group_code: str, page: int = 1) -> None: handle = _get_handle() page_size = 10 page = max(1, int(page or 1)) plugin = _discover_plugins().get(plugin_name) if plugin is None: - xbmcgui.Dialog().notification("Genres", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Genres", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcplugin.endOfDirectory(handle) return @@ -2711,7 +2961,12 @@ def _show_genre_series_group(plugin_name: str, genre: str, group_code: str, page grouped_has_more = getattr(plugin, "genre_group_has_more", None) if callable(grouped_paging): try: - page_items = [str(t).strip() for t in list(grouped_paging(genre, group_code, page, page_size) or []) if t and str(t).strip()] + raw_items = _run_with_progress( + "Genres", + f"{plugin_name}: {genre} [{group_code}] Seite {page} wird geladen...", + lambda: list(grouped_paging(genre, group_code, page, page_size) or []), + ) + page_items = [str(t).strip() for t in raw_items if t and str(t).strip()] except Exception as exc: _log(f"Genre-Serien konnten nicht geladen werden ({plugin_name}/{genre}/{group_code} p{page}): {exc}", xbmc.LOGWARNING) xbmcgui.Dialog().notification("Genres", "Serien konnten nicht geladen werden.", xbmcgui.NOTIFICATION_INFO, 3000) @@ -2719,7 +2974,9 @@ def _show_genre_series_group(plugin_name: str, genre: str, group_code: str, page return xbmcplugin.setPluginCategory(handle, f"{genre} [{group_code}] ({page})") - show_tmdb = _get_setting_bool("tmdb_genre_metadata", default=False) + use_source, show_tmdb, prefer_source = _metadata_policy( + plugin_name, plugin, allow_tmdb=_tmdb_list_enabled() + ) if page > 1: _add_directory_item( handle, @@ -2729,40 +2986,44 @@ def _show_genre_series_group(plugin_name: str, genre: str, group_code: str, page is_folder=True, ) if page_items: - if show_tmdb: - with _busy_dialog(): - tmdb_prefetched = _tmdb_labels_and_art_bulk(page_items) - for title in page_items: - info_labels, art, cast = tmdb_prefetched.get(title, _tmdb_labels_and_art(title)) - info_labels = dict(info_labels or {}) - info_labels.setdefault("mediatype", "tvshow") - if (info_labels.get("mediatype") or "").strip().casefold() == "tvshow": - info_labels.setdefault("tvshowtitle", title) - playstate = _title_playstate(plugin_name, title) - info_labels = _apply_playstate_to_info(dict(info_labels), playstate) - display_label = _label_with_duration(title, info_labels) - display_label = _label_with_playstate(display_label, playstate) - _add_directory_item( - handle, - display_label, - "seasons", - {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, - is_folder=True, - info_labels=info_labels, - art=art, - cast=cast, - ) - else: + plugin_meta = _collect_plugin_metadata(plugin, page_items) if use_source else {} + show_plot = _get_setting_bool("tmdb_show_plot", default=True) + show_art = _get_setting_bool("tmdb_show_art", default=True) + tmdb_prefetched: dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]] = {} + tmdb_titles = list(page_items) if show_tmdb else [] + if show_tmdb and prefer_source and use_source: + tmdb_titles = [] for title in page_items: - playstate = _title_playstate(plugin_name, title) - _add_directory_item( - handle, - _label_with_playstate(title, playstate), - "seasons", - {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, - is_folder=True, - info_labels=_apply_playstate_to_info({"title": title}, playstate), - ) + meta = plugin_meta.get(title) + meta_labels = meta[0] if meta else {} + meta_art = meta[1] if meta else {} + if _needs_tmdb(meta_labels, meta_art, want_plot=show_plot, want_art=show_art): + tmdb_titles.append(title) + if show_tmdb and tmdb_titles: + with _busy_dialog("Genre-Gruppe wird geladen..."): + tmdb_prefetched = _tmdb_labels_and_art_bulk(tmdb_titles) + for title in page_items: + tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) if show_tmdb else ({}, {}, []) + meta = plugin_meta.get(title) + info_labels, art, cast = _merge_metadata(title, tmdb_info, tmdb_art, tmdb_cast, meta) + info_labels = dict(info_labels or {}) + info_labels.setdefault("mediatype", "tvshow") + if (info_labels.get("mediatype") or "").strip().casefold() == "tvshow": + info_labels.setdefault("tvshowtitle", title) + playstate = _title_playstate(plugin_name, title) + info_labels = _apply_playstate_to_info(dict(info_labels), playstate) + display_label = _label_with_duration(title, info_labels) + display_label = _label_with_playstate(display_label, playstate) + _add_directory_item( + handle, + display_label, + "seasons", + {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, + is_folder=True, + info_labels=info_labels, + art=art, + cast=cast, + ) show_next = False if callable(grouped_has_more): try: @@ -2774,7 +3035,7 @@ def _show_genre_series_group(plugin_name: str, genre: str, group_code: str, page if show_next: _add_directory_item( handle, - "Nächste Seite", + "Naechste Seite", "genre_series_group", {"plugin": plugin_name, "genre": genre, "group": group_code, "page": str(page + 1)}, is_folder=True, @@ -2808,48 +3069,54 @@ def _show_genre_series_group(plugin_name: str, genre: str, group_code: str, page start = (page - 1) * page_size end = start + page_size page_items = filtered[start:end] - show_tmdb = _get_setting_bool("tmdb_genre_metadata", default=False) + use_source, show_tmdb, prefer_source = _metadata_policy( + plugin_name, plugin, allow_tmdb=_tmdb_list_enabled() + ) if page_items: - if show_tmdb: - with _busy_dialog(): - tmdb_prefetched = _tmdb_labels_and_art_bulk(page_items) - for title in page_items: - info_labels, art, cast = tmdb_prefetched.get(title, _tmdb_labels_and_art(title)) - info_labels = dict(info_labels or {}) - info_labels.setdefault("mediatype", "tvshow") - if (info_labels.get("mediatype") or "").strip().casefold() == "tvshow": - info_labels.setdefault("tvshowtitle", title) - playstate = _title_playstate(plugin_name, title) - info_labels = _apply_playstate_to_info(dict(info_labels), playstate) - display_label = _label_with_duration(title, info_labels) - display_label = _label_with_playstate(display_label, playstate) - _add_directory_item( - handle, - display_label, - "seasons", - {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, - is_folder=True, - info_labels=info_labels, - art=art, - cast=cast, - ) - else: + plugin_meta = _collect_plugin_metadata(plugin, page_items) if use_source else {} + show_plot = _get_setting_bool("tmdb_show_plot", default=True) + show_art = _get_setting_bool("tmdb_show_art", default=True) + tmdb_prefetched: dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember]]] = {} + tmdb_titles = list(page_items) if show_tmdb else [] + if show_tmdb and prefer_source and use_source: + tmdb_titles = [] for title in page_items: - playstate = _title_playstate(plugin_name, title) - _add_directory_item( - handle, - _label_with_playstate(title, playstate), - "seasons", - {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, - is_folder=True, - info_labels=_apply_playstate_to_info({"title": title}, playstate), - ) + meta = plugin_meta.get(title) + meta_labels = meta[0] if meta else {} + meta_art = meta[1] if meta else {} + if _needs_tmdb(meta_labels, meta_art, want_plot=show_plot, want_art=show_art): + tmdb_titles.append(title) + if show_tmdb and tmdb_titles: + with _busy_dialog("Genre-Serien werden geladen..."): + tmdb_prefetched = _tmdb_labels_and_art_bulk(tmdb_titles) + for title in page_items: + tmdb_info, tmdb_art, tmdb_cast = tmdb_prefetched.get(title, ({}, {}, [])) if show_tmdb else ({}, {}, []) + meta = plugin_meta.get(title) + info_labels, art, cast = _merge_metadata(title, tmdb_info, tmdb_art, tmdb_cast, meta) + info_labels = dict(info_labels or {}) + info_labels.setdefault("mediatype", "tvshow") + if (info_labels.get("mediatype") or "").strip().casefold() == "tvshow": + info_labels.setdefault("tvshowtitle", title) + playstate = _title_playstate(plugin_name, title) + info_labels = _apply_playstate_to_info(dict(info_labels), playstate) + display_label = _label_with_duration(title, info_labels) + display_label = _label_with_playstate(display_label, playstate) + _add_directory_item( + handle, + display_label, + "seasons", + {"plugin": plugin_name, "title": title, **_series_url_params(plugin, title)}, + is_folder=True, + info_labels=info_labels, + art=art, + cast=cast, + ) if total_pages > 1 and page < total_pages: _add_directory_item( handle, - "Nächste Seite", + "Naechste Seite", "genre_series_group", {"plugin": plugin_name, "genre": genre, "group": group_code, "page": str(page + 1)}, is_folder=True, @@ -2865,27 +3132,44 @@ def _open_settings() -> None: addon.openSettings() -def _run_update_check() -> None: - """Stoesst Kodi-Repo- und Addon-Updates an und informiert den Benutzer.""" +def _run_update_check(*, silent: bool = False) -> None: + """Stoesst Kodi-Repo- und Addon-Updates an.""" if xbmc is None: # pragma: no cover - outside Kodi return try: - info_url = _normalize_update_info_url(_get_setting_string("update_repo_url")) - _set_setting_string("update_repo_url", info_url) + info_url = _resolve_update_info_url() _sync_update_version_settings() _update_repository_source(info_url) builtin = getattr(xbmc, "executebuiltin", None) if callable(builtin): builtin("UpdateAddonRepos") builtin("UpdateLocalAddons") - builtin("ActivateWindow(addonbrowser,addons://updates/)") - xbmcgui.Dialog().notification("ViewIT Update", "Update-Pruefung gestartet.", xbmcgui.NOTIFICATION_INFO, 4000) + if not silent: + builtin("ActivateWindow(addonbrowser,addons://updates/)") + if not silent: + xbmcgui.Dialog().notification("Updates", "Update-Check gestartet.", xbmcgui.NOTIFICATION_INFO, 4000) except Exception as exc: _log(f"Update-Pruefung fehlgeschlagen: {exc}", xbmc.LOGWARNING) - try: - xbmcgui.Dialog().notification("ViewIT Update", "Update-Pruefung fehlgeschlagen.", xbmcgui.NOTIFICATION_ERROR, 4000) - except Exception: - pass + if not silent: + try: + xbmcgui.Dialog().notification("Updates", "Update-Check fehlgeschlagen.", xbmcgui.NOTIFICATION_ERROR, 4000) + except Exception: + pass + + +def _maybe_run_auto_update_check(action: str | None) -> None: + action = (action or "").strip() + # Auto-Check nur beim Root-Menue, nicht in jedem Untermenue. + if action: + return + if not _get_setting_bool("auto_update_enabled", default=False): + return + now = int(time.time()) + last = _get_setting_int("auto_update_last_ts", default=0) + if last > 0 and (now - last) < AUTO_UPDATE_INTERVAL_SEC: + return + _set_setting_string("auto_update_last_ts", str(now)) + _run_update_check(silent=True) def _extract_first_int(value: str) -> int | None: @@ -2959,65 +3243,12 @@ def _play_final_link( def _track_playback_and_update_state(key: str) -> None: - if not key: - return - monitor = xbmc.Monitor() if xbmc is not None and hasattr(xbmc, "Monitor") else None - player = xbmc.Player() + return - # Wait for playback start. - started = False - for _ in range(30): - try: - if player.isPlayingVideo(): - started = True - break - except Exception: - pass - if monitor and monitor.waitForAbort(0.5): - return - if not started: - return - last_pos = 0.0 - total = 0.0 - while True: - try: - if not player.isPlayingVideo(): - break - last_pos = float(player.getTime() or 0.0) - total = float(player.getTotalTime() or 0.0) - except Exception: - pass - if monitor and monitor.waitForAbort(1.0): - return - - if total <= 0.0: - return - percent = max(0.0, min(1.0, last_pos / total)) - state: dict[str, object] = {"last_position": int(last_pos), "resume_total": int(total), "percent": percent} - if percent >= WATCHED_THRESHOLD: - state["watched"] = True - state["resume_position"] = 0 - elif last_pos > 0: - state["watched"] = False - state["resume_position"] = int(last_pos) - _set_playstate(key, state) - - # Zusätzlich aggregiert speichern, damit Titel-/Staffel-Listen "gesehen/fortsetzen" - # anzeigen können (für Filme/Serien gleichermaßen). - try: - parts = str(key).split("\t") - if len(parts) == 4: - plugin_name, title, season, _episode = parts - plugin_name = (plugin_name or "").strip() - title = (title or "").strip() - season = (season or "").strip() - if plugin_name and title: - _set_playstate(_playstate_key(plugin_name=plugin_name, title=title, season="", episode=""), state) - if season: - _set_playstate(_playstate_key(plugin_name=plugin_name, title=title, season=season, episode=""), state) - except Exception: - pass +def _track_playback_and_update_state_async(key: str) -> None: + # Eigenes Resume/Watched ist deaktiviert; Kodi verwaltet das selbst. + return def _play_episode( @@ -3026,29 +3257,62 @@ def _play_episode( season: str, episode: str, *, + forced_hoster: str = "", + episode_url: str = "", + series_url: str = "", resolve_handle: int | None = None, ) -> None: + episode_url = (episode_url or "").strip() + if episode_url: + _play_episode_url( + plugin_name, + title=title, + season_number=_extract_first_int(season) or 0, + episode_number=_extract_first_int(episode) or 0, + episode_url=episode_url, + season_label_override=season, + episode_label_override=episode, + resolve_handle=resolve_handle, + ) + return + + series_url = (series_url or "").strip() + if series_url: + plugin_for_url = _discover_plugins().get(plugin_name) + remember_series_url = getattr(plugin_for_url, "remember_series_url", None) if plugin_for_url is not None else None + if callable(remember_series_url): + try: + remember_series_url(title, series_url) + except Exception: + pass + _log(f"Play anfordern: {plugin_name} / {title} / {season} / {episode}") plugin = _discover_plugins().get(plugin_name) if plugin is None: - xbmcgui.Dialog().notification("Play", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Wiedergabe", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) return available_hosters: list[str] = [] hoster_getter = getattr(plugin, "available_hosters_for", None) if callable(hoster_getter): try: - with _busy_dialog(): + with _busy_dialog("Hoster werden geladen..."): available_hosters = list(hoster_getter(title, season, episode) or []) except Exception as exc: _log(f"Hoster laden fehlgeschlagen ({plugin_name}): {exc}", xbmc.LOGWARNING) selected_hoster: str | None = None + forced_hoster = (forced_hoster or "").strip() if available_hosters: - if len(available_hosters) == 1: + if forced_hoster: + for hoster in available_hosters: + if hoster.casefold() == forced_hoster.casefold(): + selected_hoster = hoster + break + if selected_hoster is None and len(available_hosters) == 1: selected_hoster = available_hosters[0] - else: - selected_index = xbmcgui.Dialog().select("Hoster wählen", available_hosters) + elif selected_hoster is None: + selected_index = xbmcgui.Dialog().select("Hoster waehlen", available_hosters) if selected_index is None or selected_index < 0: _log("Play abgebrochen (kein Hoster gewählt).", xbmc.LOGDEBUG) return @@ -3066,11 +3330,12 @@ def _play_episode( try: link = plugin.stream_link_for(title, season, episode) if not link: - _log("Kein Stream-Link gefunden.", xbmc.LOGWARNING) - xbmcgui.Dialog().notification("Play", "Kein Stream-Link gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + _log("Kein Stream gefunden.", xbmc.LOGWARNING) + xbmcgui.Dialog().notification("Wiedergabe", "Kein Stream gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) return _log(f"Stream-Link: {link}", xbmc.LOGDEBUG) final_link = plugin.resolve_stream_link(link) or link + final_link = normalize_resolved_stream_url(final_link, source_url=link) finally: if restore_hosters is not None and callable(preferred_setter): preferred_setter(restore_hosters) @@ -3092,7 +3357,7 @@ def _play_episode( cast=cast, resolve_handle=resolve_handle, ) - _track_playback_and_update_state( + _track_playback_and_update_state_async( _playstate_key(plugin_name=plugin_name, title=title, season=season, episode=episode) ) @@ -3104,21 +3369,25 @@ def _play_episode_url( season_number: int, episode_number: int, episode_url: str, + season_label_override: str = "", + episode_label_override: str = "", resolve_handle: int | None = None, ) -> None: - season_label = f"Staffel {season_number}" if season_number > 0 else "" - episode_label = f"Episode {episode_number}" if episode_number > 0 else "" + season_label = (season_label_override or "").strip() or (f"Staffel {season_number}" if season_number > 0 else "") + episode_label = (episode_label_override or "").strip() or ( + f"Episode {episode_number}" if episode_number > 0 else "" + ) _log(f"Play (URL) anfordern: {plugin_name} / {title} / {season_label} / {episode_label} / {episode_url}") plugin = _discover_plugins().get(plugin_name) if plugin is None: - xbmcgui.Dialog().notification("Play", "Plugin nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Wiedergabe", "Quelle nicht gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) return available_hosters: list[str] = [] hoster_getter = getattr(plugin, "available_hosters_for_url", None) if callable(hoster_getter): try: - with _busy_dialog(): + with _busy_dialog("Hoster werden geladen..."): available_hosters = list(hoster_getter(episode_url) or []) except Exception as exc: _log(f"Hoster laden fehlgeschlagen ({plugin_name}): {exc}", xbmc.LOGWARNING) @@ -3128,7 +3397,7 @@ def _play_episode_url( if len(available_hosters) == 1: selected_hoster = available_hosters[0] else: - selected_index = xbmcgui.Dialog().select("Hoster wählen", available_hosters) + selected_index = xbmcgui.Dialog().select("Hoster waehlen", available_hosters) if selected_index is None or selected_index < 0: _log("Play abgebrochen (kein Hoster gewählt).", xbmc.LOGDEBUG) return @@ -3145,15 +3414,16 @@ def _play_episode_url( try: link_getter = getattr(plugin, "stream_link_for_url", None) if not callable(link_getter): - xbmcgui.Dialog().notification("Play", "Nicht unterstützt.", xbmcgui.NOTIFICATION_INFO, 3000) + xbmcgui.Dialog().notification("Wiedergabe", "Diese Funktion wird von der Quelle nicht unterstuetzt.", xbmcgui.NOTIFICATION_INFO, 3000) return link = link_getter(episode_url) if not link: - _log("Kein Stream-Link gefunden.", xbmc.LOGWARNING) - xbmcgui.Dialog().notification("Play", "Kein Stream-Link gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) + _log("Kein Stream gefunden.", xbmc.LOGWARNING) + xbmcgui.Dialog().notification("Wiedergabe", "Kein Stream gefunden.", xbmcgui.NOTIFICATION_INFO, 3000) return _log(f"Stream-Link: {link}", xbmc.LOGDEBUG) final_link = plugin.resolve_stream_link(link) or link + final_link = normalize_resolved_stream_url(final_link, source_url=link) finally: if restore_hosters is not None and callable(preferred_setter): preferred_setter(restore_hosters) @@ -3176,7 +3446,7 @@ def _play_episode_url( cast=cast, resolve_handle=resolve_handle, ) - _track_playback_and_update_state( + _track_playback_and_update_state_async( _playstate_key(plugin_name=plugin_name, title=title, season=season_label, episode=episode_label) ) @@ -3193,6 +3463,7 @@ def run() -> None: params = _parse_params() action = params.get("action") _log(f"Action: {action}", xbmc.LOGDEBUG) + _maybe_run_auto_update_check(action) if action == "search": _show_search() elif action == "plugin_menu": @@ -3205,6 +3476,11 @@ def run() -> None: _show_genres(params.get("plugin", "")) elif action == "categories": _show_categories(params.get("plugin", "")) + elif action == "latest_titles": + _show_latest_titles( + params.get("plugin", ""), + _parse_positive_int(params.get("page", "1"), default=1), + ) elif action == "new_titles": _show_new_titles( params.get("plugin", ""), @@ -3276,6 +3552,9 @@ def run() -> None: params.get("title", ""), params.get("season", ""), params.get("episode", ""), + forced_hoster=params.get("hoster", ""), + episode_url=params.get("url", ""), + series_url=params.get("series_url", ""), resolve_handle=_get_handle(), ) elif action == "play_movie": diff --git a/addon/http_session_pool.py b/addon/http_session_pool.py index 725fa43..3abda8e 100644 --- a/addon/http_session_pool.py +++ b/addon/http_session_pool.py @@ -32,3 +32,12 @@ def get_requests_session(key: str, *, headers: Optional[dict[str, str]] = None): pass return session + +def close_all_sessions() -> None: + """Close and clear all pooled sessions.""" + for session in list(_SESSIONS.values()): + try: + session.close() + except Exception: + pass + _SESSIONS.clear() diff --git a/addon/metadata_utils.py b/addon/metadata_utils.py new file mode 100644 index 0000000..b58f229 --- /dev/null +++ b/addon/metadata_utils.py @@ -0,0 +1,93 @@ +from __future__ import annotations + +import re + +from plugin_interface import BasisPlugin +from tmdb import TmdbCastMember + +METADATA_MODE_AUTO = 0 +METADATA_MODE_SOURCE = 1 +METADATA_MODE_TMDB = 2 +METADATA_MODE_MIX = 3 + + +def metadata_setting_id(plugin_name: str) -> str: + safe = re.sub(r"[^a-z0-9]+", "_", (plugin_name or "").strip().casefold()).strip("_") + return f"{safe}_metadata_source" if safe else "metadata_source" + + +def plugin_supports_metadata(plugin: BasisPlugin) -> bool: + try: + return plugin.__class__.metadata_for is not BasisPlugin.metadata_for + except Exception: + return False + + +def metadata_policy( + plugin_name: str, + plugin: BasisPlugin, + *, + allow_tmdb: bool, + get_setting_int=None, +) -> tuple[bool, bool, bool]: + if not callable(get_setting_int): + return plugin_supports_metadata(plugin), allow_tmdb, bool(getattr(plugin, "prefer_source_metadata", False)) + mode = get_setting_int(metadata_setting_id(plugin_name), default=METADATA_MODE_AUTO) + supports_source = plugin_supports_metadata(plugin) + if mode == METADATA_MODE_SOURCE: + return supports_source, False, True + if mode == METADATA_MODE_TMDB: + return False, allow_tmdb, False + if mode == METADATA_MODE_MIX: + return supports_source, allow_tmdb, True + prefer_source = bool(getattr(plugin, "prefer_source_metadata", False)) + return supports_source, allow_tmdb, prefer_source + + +def collect_plugin_metadata( + plugin: BasisPlugin, + titles: list[str], +) -> dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember] | None]]: + getter = getattr(plugin, "metadata_for", None) + if not callable(getter): + return {} + collected: dict[str, tuple[dict[str, str], dict[str, str], list[TmdbCastMember] | None]] = {} + for title in titles: + try: + labels, art, cast = getter(title) + except Exception: + continue + if isinstance(labels, dict) or isinstance(art, dict) or cast: + label_map = {str(k): str(v) for k, v in dict(labels or {}).items() if v} + art_map = {str(k): str(v) for k, v in dict(art or {}).items() if v} + collected[title] = (label_map, art_map, cast if isinstance(cast, list) else None) + return collected + + +def needs_tmdb(labels: dict[str, str], art: dict[str, str], *, want_plot: bool, want_art: bool) -> bool: + if want_plot and not labels.get("plot"): + return True + if want_art and not (art.get("thumb") or art.get("poster") or art.get("fanart") or art.get("landscape")): + return True + return False + + +def merge_metadata( + title: str, + tmdb_labels: dict[str, str] | None, + tmdb_art: dict[str, str] | None, + tmdb_cast: list[TmdbCastMember] | None, + plugin_meta: tuple[dict[str, str], dict[str, str], list[TmdbCastMember] | None] | None, +) -> tuple[dict[str, str], dict[str, str], list[TmdbCastMember] | None]: + labels = dict(tmdb_labels or {}) + art = dict(tmdb_art or {}) + cast = tmdb_cast + if plugin_meta is not None: + meta_labels, meta_art, meta_cast = plugin_meta + labels.update({k: str(v) for k, v in dict(meta_labels or {}).items() if v}) + art.update({k: str(v) for k, v in dict(meta_art or {}).items() if v}) + if meta_cast is not None: + cast = meta_cast + if "title" not in labels: + labels["title"] = title + return labels, art, cast diff --git a/addon/plugin_helpers.py b/addon/plugin_helpers.py index a21c038..31c4d42 100644 --- a/addon/plugin_helpers.py +++ b/addon/plugin_helpers.py @@ -15,7 +15,9 @@ from __future__ import annotations from datetime import datetime import hashlib import os +import re from typing import Optional +from urllib.parse import parse_qsl, urlencode try: # pragma: no cover - Kodi runtime import xbmcaddon # type: ignore[import-not-found] @@ -237,3 +239,40 @@ def dump_response_html( max_files = get_setting_int(addon_id, max_files_setting_id, default=200) _prune_dump_files(log_dir, prefix=filename_prefix, max_files=max_files) _append_text_file(path, content) + + +def normalize_resolved_stream_url(final_url: str, *, source_url: str = "") -> str: + """Normalisiert hoster-spezifische Header im finalen Stream-Link. + + `final_url` kann ein Kodi-Header-Suffix enthalten: `url|Key=Value&...`. + Die Funktion passt nur bekannte Problemfaelle an und laesst sonst alles unveraendert. + """ + + url = (final_url or "").strip() + if not url: + return "" + normalized = _normalize_supervideo_serversicuro(url, source_url=source_url) + return normalized + + +def _normalize_supervideo_serversicuro(final_url: str, *, source_url: str = "") -> str: + if "serversicuro.cc/hls/" not in final_url.casefold() or "|" not in final_url: + return final_url + + source = (source_url or "").strip() + code_match = re.search( + r"supervideo\.(?:tv|cc)/(?:e/)?([a-z0-9]+)(?:\\.html)?", + source, + flags=re.IGNORECASE, + ) + if not code_match: + return final_url + + code = (code_match.group(1) or "").strip() + if not code: + return final_url + + media_url, header_suffix = final_url.split("|", 1) + headers = dict(parse_qsl(header_suffix, keep_blank_values=True)) + headers["Referer"] = f"https://supervideo.cc/e/{code}" + return f"{media_url}|{urlencode(headers)}" diff --git a/addon/plugin_interface.py b/addon/plugin_interface.py index f8c266d..83948d7 100644 --- a/addon/plugin_interface.py +++ b/addon/plugin_interface.py @@ -4,7 +4,7 @@ from __future__ import annotations from abc import ABC, abstractmethod -from typing import List, Optional, Set +from typing import Any, Callable, Dict, List, Optional, Set, Tuple class BasisPlugin(ABC): @@ -12,9 +12,14 @@ class BasisPlugin(ABC): name: str version: str = "0.0.0" + prefer_source_metadata: bool = False @abstractmethod - async def search_titles(self, query: str) -> List[str]: + async def search_titles( + self, + query: str, + progress_callback: Optional[Callable[[str, Optional[int]], Any]] = None, + ) -> List[str]: """Liefert eine Liste aller Treffer fuer die Suche.""" @abstractmethod @@ -29,6 +34,10 @@ class BasisPlugin(ABC): """Optional: Liefert den Stream-Link fuer eine konkrete Folge.""" return None + def metadata_for(self, title: str) -> Tuple[Dict[str, str], Dict[str, str], Optional[List[Any]]]: + """Optional: Liefert Info-Labels, Art und Cast fuer einen Titel.""" + return {}, {}, None + def resolve_stream_link(self, link: str) -> Optional[str]: """Optional: Folgt einem Stream-Link und liefert die finale URL.""" return None diff --git a/addon/plugins/__pycache__/__init__.cpython-312.pyc b/addon/plugins/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 26e3918be4fd8347e5d9adbe76f617ee3abfdbbd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 168 zcmX@j%ge<81YZ`#XG#I-#~=<2FhUuhIe?7m3@Hpz43&(UOjW|(`6-zSi76@hc?tzN zrRkY@#d?04jJMe1<5TjJHLMC?=&c%PDd37CXY7aOJMNJ)$Pm5g{mX%P=9 ztx8sDQ`(h|-|^z10K&Wy`D5pOkcWN&bO|Y4caXC0e!!i%?>0z0%y9bwcL2BF54zK0 z_iE@E!Y#)U$%?JOdjftbB`T(ItC+clVAOiO^&Qo1m$_xM61;|%Igcg;QiR0pHS~_{ zV{!-O0x&a;D^*RlurP09B4AY)bZlc&xT@mi5i5U1vln2lcThG}vtKYVQL%1X1j`kv z%5b?tJU*mi6AMGIjVdgwL=nbEE(}i#vTYd^*;38&qOgE9S;4RqTcyhtEX-lpTDOFX zY6-K_ENmp&72zr-%UFk{{qbjW!eyhXVuFivSjW1K1x5AGfu+61B1{3}i-b`KGrENd zaHp0RbA3XsFjx>~E31;JTG->ZP+gsk|1>9Dkxk%umhv&%C(I5GOqHaW$@ODaicEzol#R;Q z?XX{MV9cngnkE;|7ETMjmkmXI(X)R}7^uS1*YMnB)heDnbFOgac~~(#JvQ1WXzCIc zF5&W$k*mFI;T5a6XjQa687R~;(@k-O&YW7=JzLRUeEt0C!V7&Tiziv{XSFT`=Z8vt z3idEq_Ym82zhlxm%Vw7PzBS}r(CP^QlO)AEdylL2^jEhTPFG*GuLod2!H;a!v5c#H{;3Z9loCgU;=yelH)u`GqC$U^;&gS~qc8W}0Kmk{n zG;GL1LX~D1$Ya77)<62NYU@joWMzF$#lR0cw7>|#t2|?sz1jc*(E@=GcqzA;h0Z;w zf47iC=h?T4KHzBLhyoKVFMPl`yot4WlWrl9BLJzBsk&-OQmxBB!IiM!BTBH$q#31- z8mFIIDZEHWbDes)9pN-X)_{xGpB#SBb-owvI;Y=}(@$xPJQ5dS-Y%>c-#>{nJdv z^#2Gt&9c@zEQ({_Ep&tT``fL5*LZ^ie0Y1_@74RQEswiFHNNZfL-{s$i@U+@YC9L) zNbMZckwx8veg5ejx&`M;&AdK^7vGzrPSm%Plbp1n{8n&@5QC5+>`Hz@K@xxyS;TOk zAf^+xt5AKh6B#ZI43#FQz>d$b1zu>F)EfjZCX&(uw)~*kMB94e z;G=B!d)8-1p8xpB^AEG<)|2NNiOjp_-#)*d5FT}Qf7W^86E&^u#|OxwG+-@d&bJ>4~9lGSD8iMS#o5Xl z)NQs!0Wo?#F)SJCrlxPFY~8D$nu?*I?v#LZ!ZxHIz)lGgz#{~nDa{+Cc3?06K5mv$ zQnzVl-8I6NUjmj&0Ap^`&2`|nf%(AgATub-elQ@nbrJLQ?4{{BWmWI zXWD1HJ7JeoR|cvwu!8TC*xk{UVs~n6Mq;32MgTEV*}E6lCOj+rpA<|m8UYOlg zANxH*x6aOC(9s-J!YTHW@HN&<4|h9)&DkwFNCNay|Ivq5&4TMDAoGL7ScdYuMFmAD zXku$ml1`SJYWbdUZ${prs!af>qwiW!OWUvQwd7u{Zn-thVE2&c7$e&%q}P1Cv1OvjH8?}3+Nbl5W?eH!~--)Ib+Kezs9@7YQrJye7f7Io7 zx%sW&eDF5E%H;yJ;=lGz>>3C`GtwmfB{U*2TNW)}r8v?l$g(i;O;%_|lGTo}_&3`fR+q15d6{Hd@I( zI=i2O0~8zt;PCXlLwad9S|vsm6Kj>6qG(p}&(YqC6r88v6$&m;KrgR^UKYt^3TVA! zT4lA3^w1#%^zmlW#CZpO6W~M9jU-BT)`vEEE_8MyjKb+}16=6nW)ONCwCBwQLnE9w z$-41K=@mT>Sh>7OMVL5}0T4TL%V z`xe9<`wI2^1HJModS!!;b3LE)ZC~&`8)-d~+|2~Si+?T#ngzNfGfag*h2mqKhuy2D7ze``>xc0l(zelwFVdFIpMcclGH3B4- z`f#ItUwy2x-{tMdr~LjOH)4SMaifzhWxw Z2ZIk{(ZSB(gHG1(jtuq&A3VbW{2yR`Z-D>+ diff --git a/addon/plugins/__pycache__/einschalten_plugin.cpython-312.pyc b/addon/plugins/__pycache__/einschalten_plugin.cpython-312.pyc deleted file mode 100644 index b5da04f3334215cebf7837ef852bbe3ddd9a08a0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 48789 zcmeIb33MFSc_v!bd!rlZ1{#eWL}N!28$p63xDzBm0u&dC0JT611frT`lUV5LhD1UQ zn{u2nC~+cC&I}<-K7wq$2pTbCI`Jf&7rzO$#E~<}oNhD}G&{&xNt`$1yu3M(C?^V? zlk>j+R@Z_?0hH`GdFPDl0Jo~E?!9%F|Ni^^>%V4ZW^g#(+WV#8?_cA%zoH-VVU{lV zJBJM%ca0OcK2G2T!w^4g=riziZ5%T88QIy?XTsSuWFEHkS=cl4kagJBXB)Qn*@rXw zGKL*}j$vn?lRdKxWe#WcWevOfT*K}@H_zFn+Oqqy1#4f9VC?gx?&S65pBuef2_NUxQH8*C-VCH3=m`sjyBc6MV0j`kD<~z#>?$ zm%qhJqhR0;aze#dIHB?_j0PQVNhhvvgKpj`hWufuuo_}o~^=up%Ldcp+jiGd7IEIY(SY#p#{0N z3kQUaxVu9*C~U%cr_d#|;=D`f7B=I&TR0?a!Fi8xpRg6@z4+aRbGvX@*oN~y;fSyu z=l!gwJJ9-_fr7V;QYZU5E*dMjKyDQ$y8FDCpKQU_mlN>zc^={L%loMJ%5CZVH2*Yr z!T1RGG@r_E#_SmT@*d%?{I&eVe{}qOXK*AmcxGTI92oK4KQwkaIMS4naqN6JI65*g z6bSh`0w>2#1?4 zm^5_+2g6Cz;b177w88hZj^zcFba6NdsLgQG$qX~pH)(cnnZhKtbopr!{}cB$fP%=t>XswGTsq0deq*&V$o|5wu>Xy1>Z5$)Q01`JsV}CkF=4CCjwcJv|Uk)z%*l z3?olCkjxTjPWw|`4JBRDZ#F<|Ts+c^(3#PvsaO3$Ok9$?%QxS3G!Ktr07A{^!(D5$ z)L2|djT_Hv9ylMAvpf?NCYss%$J>vLjt7T^2AVfFZSd9IH!1{o_#VVKc8C4G_Vee5 z0uKdF-WLovZ{D=EY1067&MpET_cLaX@1V5wTNh z!SZ+d@belM;QDwVZo>&6*nm+m?8IHch$|DjG6QYXFH7o|nfx zz%M8Lx@hxfChbRPO1d$#(O zFdeUk+Bzohs*KIkQv0GTE=3)UPb| z%abbS5^_duciyNrM7D_f!vWb#ylx(Mj!PXAC zVgN=RmY{4GT@jFodnnmUNjoJ3Gt&HTp{q7I>B2?mSCK3OD3ABj!O5Pu$(zW{yVNx~ z7&qla^jHLqzNvZ!c81^)+9aCrh-8* zMmU+_xMyzQEj9O9tF|WTT77SX>*4(7qYS#nGah|npmBUdMAt7lH1HfWqCzECXLh$^XeA4{Xz|dGAL|yZVG+W8M{*h5}7>Ij305Z<#CQ%9T z5FQfR4IMyoiA!YWJoo6-$pvRc%vlk4R$b~`bY{J9;PQc~T{D$4V{vDrdS}b@rWteG zS+n45h&dbL&ZbM935WaAVKxKG;8PoD=-A*N#>F)*tPDAZBXvDth!{frMeypHr@Z|1 zWlk`B!#DwA5HU#C5#v)dM8=~B#Qmt$&$FIjSj=A-3y2rl+Dy8YJ`2FJxQ+ERa4`ht z>0F4~jyP110r7;x0aaWwO6IIiI0v%_M9 z3mHzM(-FRdd%-Y>>p{cgKtB@(6-pyq#GnVM*9|gTAL6eRM+{dDSJs(Oe!|d$lxJ5@ z({M6~!)Ow2^Bcv3IEgeE@(r57q($Ovl4j{lYeX5z{QgrxaL6Kv9C1JWCof72oY;e0 zLHvgZ!Dt|@EaohWJIgP1-gdYXuIx#BqOAH)9ml5v*Vo-N-#YP!kG}orpZXtou5-?D zJZd`r4>*M~P{TJ$+IiFeX+i>cw+ew%17kzsWl*Oq`6|E|pbmwkVo zdQ!Q<1R?B@p%4SDEZT?xGM>8hjS=Ism0`PF!oWpnsi$B&g=!4)@3Rg$pL&*;eh;VC zf~m|@bI4aeYrU0RI7_YrG7)5)2tR0e1OTPTIL4!l&xhDRY<>@r$;fF5AZ$tdg_FaB zjL}G%+lk*u+S)rhjvejq?nqi891MiRqC^%6W|A4Wm8vIc@4P^)Au)iaWQHt61O@Q~ z4KH}M#5`N#p0=p7 zZPAr8wRyo)8S_-m9Ey9I=RBKlY>d0MEVy>YTsz~g-BH`_PZu4zA9K7pAMbrGYsrYK zyP*QK=ehQbeLVNJw;t&`yknos_)Zp&R39*jpU|SiumXly$NY6(rYt~ohNEIHa{CRU z3n!6KHm;wbYZ}E6m6Va$@}cD%R0^Z4;&$J1Dlq`M!&^_Uod ztSY@oJcH&wi~mps5&{la)SADTRTM4VIlt?8bZ1YrxHq14B5FIaWaMlG%SjcAa2EWt zj(0Jnq4Ei!?y2PrUO<^ADk2>qO&S!Vhl5oFbe7(dc8mkHTsO)yKEX%8p9C4UKdYb> zwTNJnk%_{`tbtS*ab>ju=_x4Xtbz{IdLzmtP8h=;`F_M0&XuoaIwBe(#vUN_9uA1b zEkBhIOAz#A8DcSyKhOzgW8h3MAclR1dX63S4UC0+Pwos2f?)~!g2Jwfk<*Q!%;7+V zFMRQQU{{4!bR{9RVs}$h)6Qn8#O^1XB;IISs)|RS*!}o=RP$)XV~;&n`?y?L>L+`; zyRK>d9=}opgi-UUp@Gw(AJe@09bzX=NsGiBB{RAY9zAxvbANkJXVTJfthfE};iUPr zI68Ko(Zr;Qnv^s{8Wcj}<5V&u7=oH3JTNjCNSXxbV}R^TOh;g5yhYL|4m^z~;M~)Y zTwy9+FMoj0R&QkhL&_!&67M&`XM8)`OziAYJ{S9vs1A`;g`|)LJH1s&s z1hv$gCXw)Z(n%^Ra9$XM;Q;H4_+{M32+-P;Mj$_J1P&iNNCVIVLPDKq1(p#Ko2>W} zzl!It;y;v!1bEftxY9N6-y1y^h@KmXj$Qmy)A&kUs`Q#D*R^E~*J}M148Vb!egn5C zxDvR<$a9YX78HiXbadRSfr);mjH6*mnmao8oj53d1#kA7)G-igF|qZEDhghG(5 zwX^PGT~2BJAkmA&Wqb?&Ol5M3qji$GXmkC#ZN9N1+H*E~{^=!-`x1Y^@OK<%KWLCS zgUs7DC#{XlyCEk^f4QKTd)Z!V{0+WVT@?fcgtZxD{4Fj>7Jv}+Sw@N{F-XIXD$FJb zu~9HyH>sl-;X1g-Aq0Yby%*e3t!A*DHZ&&fz!M9e5bK?G;EmA1Y2SFI&4gLP+)>xr zp)+^ZqvOj|z>=DmCU4xjooPRICC$Cvy@xy5ERLJD1HF%1eJtNDzcp=AlSaV4q?{qy zMVez{(j0`cFO;+nj*bxbm^8897;#l%aj+?uUN63gC;tKep-Ci{IFdb09n`WR%3@1#Y->(6AsAsLidn9ymkj_|KZIf!j!2qL zjtUo(E*aDGpBDqCf){{hh?$VkOhFo&={RxZ{(jO@%XsGBA?q?clfz~}ejhV9h||dQ zHf1bCLKr7+(dJ$(-jr~<5}DbFf+h@^*&-nw?Kqf_oh>Ms-q?at7$jvxkRx@xl{%-i z56~_|_;YpW2!GKCYn~}$c#0E?RE}3=kMtNk`ArkXkvc3Dnvt|aFbfu$ffB6LGu>Ra z^js5=FSRX0?KPH)Y8TTsRM9cR-L-nNLwwuOLa|b29qJWHpO8 zgU5eFSzbc|ZGgl5+>WVz3mK)cjM8{US&A{qO;|G~hv%$Nxpw7ChB4PxYLqem3WF$A_N0Yum1Fo9>w@k9(?S9*B8r7d(wIPvh+1jg~lsxPh3b zZL%Zba82!+bNCXTe24(f@+BMRbiJ_i^3LhZxT7X&s!=(dv<3KTyQ6&<_x3LPKD+T9 z8;?|>@#q6b3xEOM7m;4$PIKVlQ(DF#x0aT%gPYtk9-{_R{5RIj;XsamE9dZM9!>INu)wM&)+B#Yd>X@};SiuY<&q9E zwzNwYyrfgL+I7RwlClGKkQp#TG6UWZ3-y~8O(pB8&U#83DPbnM%%nxqzl$Q>3sEwI zBx!^)D`^^nHbgQdk=&@!nL!+daWQFuYTjCF51lU7+4e1LK@d}}^AG&*=r>d#fG z*v6EtFbGLDDQZeGOKY6$KgF7sbg4Z=?*1TSPNBP14%u38en5l;RFOv|Wv=);sCW_o zq2B>91=e2YyL2FtQ}|*dWXkN?SaxkZ+kXd!i!5l~T!qusujkIYY7)-eAJLCj{V(}v z_P^XT+c@uP!_9=-d(D2;K3(~>^z2Zuzi{#L#p%4b%QtDFnyOwm&AY0pP;SA;Tq)zO zJf$h=G~q4)Tb^0CSiEjJ{OX05F3g%<{$jkiY03r})>|-bde!!lZ6-fn;GZiXeP8Z| zh1@N%+%56kw#m-ho&uCl6qmi)_EOu-k$CaO$-`7zO`@$M+IIiz-kHdJd295+zNxGw z3s+kHYS&9$Gf%`zS|^WCw%SBnXSD5s*DGg+=PO#H4?Q~NLbgoL3r8*=fhsbQUzR8; zTP&}d>3!qj*B+i7j#uxTtKN6Z5HH`q=&J-f*!5c1>|^n&?Q>OoZ=Q_%+7so?iHep_ zGOgJ;KQY=fvp)8tw@ZZ>)1_XXbLLz+^64j=xZF~R;F*O<^xTN^Pr)=ww`yQKdyzDf znKeKCBqMdRHc?UYqKj_U-n|1!fP1I1=s>3F-QBJOrKb1FTnCy=-_NukD6)RPkf*fN zaiHG%{q;PhO^yRQt>53lBh@E&zXoCp9dO&hi(rH;q%EY4(oGjoYOTr4z7P08|LLp6pbxaZH zCJZ15`uP>o@@$cYT-+nCjF>@8Xs+}Lt4fZnn%-GeMW1Gre-yE%h`LRa*S2ybKtPuh zb(`)eD5Obl>4aUQiqtQYPL!N-*|Q2E2H2;Lw^HYcj7SC}>iTtN%5_XQFar*dWQFWT zl=rbwbVl@R$Wm&^L=BnCh`Lfct04`bmH?15n`l zo_vN>wA&wrw)yb}AAB!@Bg}tl`!l#RG4UjP{)k#04}{0WkEGGo!^VlCg@%9tZ-hp`M$!O=uE1{X~Z|r_;_l?GQ<=*+S_GtLRloJA|+q)XO z^Wpz)?9LLgJ0IE1M<(S{k<|*~syDF^jm-CcX zJGwSm-`~hny2a6Lvi`uxBh}}UeyJ4TxX3S+0)R6*eyJ4rS5XQuB0Mgs1tdE!BgUVL z(u!mq5N{w^MRs17PRvs7c2IA_Ln$P_jiUDx5nEG+uKRfj*Dq!0bQwAyW4d(wpF@Tg ze;0G8;Caj?NA3}j0{J`t4eBqgaXZ4NcyUEBORgOPf365n1XCZ}MqqUN1|0D;QnMmY zqSn+Q%Pde98y^3EAvGJrDKjasiBUC@YUDDn#*@sbXD@)POl`*jyGF`}0(qzfie5!h zm7Y_ox|~2_SKn>YeRq%6yAD#gUVVxLEw^IA95KVnv6V*VN;pjHB0E+ouSm=3ipaCs za!K_pkKY7jarE(9tJJAT)Du=nY*t8YsS&~$DT-pIhPACygDN@OQG*>dC~D=1@{HAx zu}Te!{Hw^FY8$k2CpY0-r3NL}L?)D{S(@3()N$l_45kWKBoi`x58%qJ$?evi+g#1e zxOHb%kvl)v%w$KhF*ExkEjBwL0OD3nh?>U5!Bio^^V)S_vXKD;Rs_vdNVJ!@5D z{D0P3(#vv8HyrFkMB`PF9Hhw3mf#B1xKBblL?z{gbeNYc*q8nCmZ3^VUiR=d0%D5%kr?4l>qelq6! z(jO!jYPO9LZiCX;nQ81oN&d9>2guEQ?lh{D)TN|RDM@ESB?FNl?PH3f_!mJ`LZ#p| z;9CKY=e8GjO@AToDu*V;>Y8f#@&qjI2~XLKZF)SKz5ddnl~$f@=8QBe&vMnu`gzyp zHLW~Qk=VV91;x{yuXexGJtMq)EMCws*$wlF(>rY=6FuB*+@7h8s9{oHM;26s%ve zqx@0^8Z>!e(N&ai<)fdAx%t!PSM4v^XZ#qhxdN&rchf@dwpi}Acj#iG*b0Qs5DX2sWSom;o-=Eiu@?uDX` zSW!p3=s=>dHj!U>uSO_csEZfW&lSM4G@iS0A-64-+ZNB=evcL;JcXZ>y4)Ebb1t}B zo!|k7OW@L+?oU4%;5m083}be0(wPgrklp*!PjXYYN)rVo&v(Uc(=k+YdLH|G1H-w8e3R zGym}(9_gPL9Y=D^f0E5p>UA8cGXF^>k5rKuu>4e)&^au`YjC9g?Cv6u{}=FS+D$+D z&LX)#qqE3{&*?0p=jd@fVgx^V<}+FMay7QAd)VtzT!11uoK<858dB*{IDJSH#7{yJ zQRD&e%XT@RdWN@Eio=%ABqa#ea~(jD{1pKX6@0|J69ulo?d!@d#gT#B^4OtxSdA-G z(>C6W=86<}WQ5=kTp&Y!WW z9Y9Re`zr6>bLWsQCmXVmebP|(lS{xZ~5pnsyaRHb0 z(z2C40v~f8bLE=U$iBbE`ffFkbPxle9H6vx@OO5z^)7F67kES? zFjg7@S#h@DY{S`#vmIv}&KWq{adrp|LI%!G!67(b;e|#a z6HzrXQ@@&oEd0ts#0?knxNvsk>=rf%**It8cMi@uLW|(R*@NG?IOhr*1uxECVUv)D za~{t5IOhwkLIKVN!e*fm=R#qNP=s@luvI9=xfuCMa4tblN^vd~+JtpDuM@TjWjL1! z+XWxaK4FJYj&r%N6R}B5hPRi>p{OHu z7ybTy$?Q%wq=Xc=Vyp?Hdj$LqS??O9d%jceeoyqlK|Fx%CgD*LJ2bHWB;b2sEP&nl zP2xC;EJMn)gfLP3ompIHw>?F;{uGLUQ5MgnLo^87h%J2^6!NAf8%h&+boZO&{6YGq zs7}}~sEee7{h~$24))8o?3aSL*fRy`N?Wbm@(w89slb_`F!okCk~zvAs`dxlyAQYT zJKULc21AHffW7P#2SYrK7bJ7WMwC0)NHswFT5Q-v5ei|$9wHRLO-SCunaqKQJ?(^N zF1mCZ`_VJyd(uzZdnlmh@{vd}uS!Q`$=r3JjgnI_M*i zLU1G)?(ZKjS^)!1^0OZj{&@(!19Mx_G-;Z*75@49ov&3zJ9_6&oQ?LLi|!kuyJcVs zCQVl|6AeC;f0CI6H!~q z%3-3Vz!un9Txf?$Z$k(+K&G^kdS24q#wm#Qm1L2REm+de6l;{el9Z>P&`lFBale;1~km5(!TL1q)jixoGQE{5n zOk*lSg*1)pXwcaL)6QkSJeNm1AL?h@d3vM?&6g)M?SVS0;>T3u9VDS;)9BLD44TVw zow?MI6=eD8XPD5o`F)SfyZRPfPsUtN#$6}pT%qWx(Er90>eHQ2k369VJC7dkOiiXo zo=nZndrzc8VhloTL6+)Fq9jt#V*@C`y;fxF{O$+lUB?$(564^&$6bAMuG7&cPJg}$ zR0z}obpG=rR-i%yts&J7RfFtkZH#A-PrB4E?1y4Fk1&|{w@8)|6<0rUY6nOU%9m%c z(;$(0bTzakV_{;x?&I{gk;pED=QGv zgehX84UDS(AD*{-P_ewy)<{f;RZDxy&`*Z6#IMuUVI+Q=h7>8#-qYEBe1DgOx)f;F z+)RgdV$i{95XZo3RcVs6)0o5nCQnPG$stGOA$+E{6jELUcQcz^ zP6^XJSI^E=y;1*K{p?`8Vq-jO)1)zxt_Fu*sU3Adgu!h}D`r3STD} zNkb#%>(->?l52{XqyTant(Z|Tv&E{}lc%wnEAU13dCCXau}QE)7K!kggG@g!?VK}$ zz%V$H7Idj<-_hvx+n=KLPGlrk)T2V|*34S}S%k0FPz3 z7Yp!Q7JLZ6h|NolA;jDC4U7oB!0<`j4W9}4*z8N%#NdeH2%Yj?Cd|`$VE|Ds1KTqg z8bZ?Odoo3tns)|;-5Q^0#f_Q(RnL!_Uj5|BCepn;>8qzpv^C|n3y*2}4U)E!=%BP} z7|tbtmvQbt(xd+eNzynxa6z+N*y>?x#v(~!Hnv>IvZgK=$zs+oj*f;UBul7CdbfmS zUD6_Bs)tpVj#M#?HD&S9(wRa%@Hful+_@`+frPhk!CM{kR>!@y3*M%fw+Sr2chh9X zZC4?a6>_FpetmGd>g)cQ@~@4=UDaSnfQ8S_Y>MU8TsL0df5Z0Hk$Bzqc-9V}VeC#V zf37XzEtu+@&VFUv%&F^xH>%zWMyuQ6#oMAq+oL%?Ig=SHi8nzxTVE6w=$`9q z1@Q*Nrg#?(vQ;3Qkf6Li(s>%IglU?w4P0etYS0MW_)DB5zi<-DmNW9%QjjoNfkdRQ zQils6Vs{B2H9q0OX`Rcv% zhXV7SQwyGo|_NG$#H85I`ng?wPyAzMQ=fqGJb zC7B};LTw#HKJm{exlV~l3DFCf8z}5mDt4sgH>B05r_mz1DI zUO`FA-~b|d3gU74f#Bq+u%|*gAFV`>W1(NbNsL$M5T8_pIXTY(TKaiFy;%C$c`p`E zS1hXpHH1O|QT2LH`*xG_u~ z`H*;rnwYl+ZX_Rb@HBzn2=+dc>Nfy=W1@b;x7*%qn=&o9%VX~Hnd+FkE|FhC=1pwp zwIcsgmern}C|vjIx|i0)3;hd)&9TDfcwx)rp+sTnLgB_(;l>-A-)@sw8BJDQB| zHS$Q8AtFM-3=th-h)CmZtDq!>&}ki~;V!Kgh41G?LL_RgfqvAb%7{0A5nFRL1W}5+ zgnrNgLaI0?L=RCc75hFevVUm$CN~x~3YnDQtMQUC0m0FMf^AsvuWe!t_Shr@mB-?CC?uP785RO4% zJs_ylL@#KPDXYra_7vhhU=!+>N;0qhp8!f9yVNeIy(tE>fc6B_?^e zu;N(|G!_X?{2$1Yw03p2cXS@_NxH>>Ac7@;5n<8$#zIm!wQ0&^Vlhh}$0dU5{3X@Q zxRFAtFTygAr8Q}y;71|iMYZCJCx!5|4Dfih4w91aqBdJUdLQ~n)CU_ATp}*KI5C4D zIMtK3gu^rSh39rbGGTbCE|y)lklhf=ZkT;Ip1nnjr(S!C;iyDb&O%mIEURkf;doZl zr17>Z2Zfgm4tpNrVNLZ+kA2{*UUcS98|R%Rh^n*Tsfu~3;+`7B+VSLHYrEPuz5jat z&5EhExMvscIo%7+@|d%H-dRcId#^ow_2C!$XFRj5A2>HGI#VGWzW$}zO*c%xYyXaY z-nl)op_Q(F%XRbkWXFQDGUlwD>54fUseXj~d2!E-A?mF7bkT*_5WGF_Hkt=kDDQ5F z$nD!j`*w8Xaqs2XJ1UItl>-~Cf`Q2BqcusYOpy)-l45h}>LxT;g2V+m?xv!tAVvr5 zyNV|?jFJ2%h9?w@KYEcm6g)w!4>7uF2UshGAreg2%?h3%yrSSEwUAsNR=nCi(^r+)^>%VLH%o#!c&DkkZT0iQ^o+#^VR$EmE1Z+3)0}Du!3>q@}fhtQC@Kl zQ=*e<@dP%MuZT_xg)lWl#7B8Jh9<;ybX7(P1R~lK%@Bi^ZhVz;eu)x70ur96qz9zb zQWw5hO+U|57RKF#=xPNTAPgYZP|jLPp2p7=NI>R#GX|D^?2 zRm@ctchy8~LiD77bfuz(uq2zoR$!{nA+ml39DT_g1Xd6X6vP62CBaJCq33|2>TZb} zytES~b>>@>%cL=FDaHlhHVuas(NG3*LVY$H&ZoL9iWqjHjS&kzieQ4((fo>G!U`v1 z8(LzHSR=MnZ*&D<(Y1;QXqCRIgQq&UoR!kKAF>>pLi=@TltWi4W@EM=AD=7fqxz z-SRR;6zu0710Xl+*M=`~*o(NeXo#4zR!OvmeKQDo2&JSQ&yvWjb6;>Ug+-N0eZr*M zW`}+Wr8mxTK(YJ_Z)abzK^bzQNIRY_Q0v#vp{XZ}L51%OshvHho&C(9UZ)_ zVr4zzI>CLmdgXon+?qPs_jdrzwJXail-?e(i@$-Ii0e)}#6979`Bua(l;xV08dG+&tj#fF) zs!i%!I6!TA01x8ZzeMn})Fpg3eII??UJ7tsQuuP*f+ zji&P#{l4(14Hug0pZ7lWs|0h_!bT0zJ5%Ao3sgIB8#?@w9px`z0I_N zEHrNPHTw{Noh+`BnKhgID1yVV(lkq<`I_-&bNI~I@JSd>**Ck!J!&3#b6GPK#seeh zN76n7qg7~d;C#SeBq^kdiHQC=njulrI{JBmenu(TP6^v_ww|tj4GBKV6cU1`gJF?v zJ!6t0;}i+{#9frMQ#KQQb5Bxv69kJ6N6>##^hxqD*nxH@ZE}ZKiBjN{=aj@&J?BsD^C=?n&me(2 z{|coS6Zz_5p1N5tzLvoR%qYH*vr4Bu)56pq2!$UyvR~Nq+@3^n`KxR1o^`%KHV(uKPfl49ZqK!hs~Iy7-gqijynEighgDKJ=c-&R zu9#_^t$b~J6r=RuLgyo~&PNtHpNw@r866Pfoq=e_sc7@*c=4Gj8ZHEa69Y{9&H zqx|cc*{*r_X0#Ieg{sLfMQ!VryAxm$sGsz@4uD^crB?_G`M|QzCqZQgHc~pkI8hXA z1)*w$V1?0VqBt?1X@^oXgnV7bk5@zBvhe7sw=%1R!t$w=%Jo4MSKFt{zNJKQFn!C~ zuBa73d~~cHaw$C3Vbe5Y;Fa67N=}S69k4qpB6~z(XPIJ7VU?hmgTPH6s#^9r5X;fk zW$jGy6IA~B#B7G7e}{@O2|HyBh|mpHxF+c*-2@;j*d)ha$)K=?f=!$49m0t>P$vul zlKdRcD$`2O|AP84hBZk;7SG!b%!v<^)y6#=qt1=GGBeHozu(ui-^{&hcI@{U-_7BX z4yL9R4>c}b9D4c7rKK^j|0^yntrkqMXk-FKV^F#FHQln@LkGJar#-;_w!ViSTFXWhGhyh zo#oD=7_LyL6{NK4YTHuw#_{#5D%Doxjnnd9L_@$5k%GJ;mQ@kYs?s6=_b)<} z$!=X#L5Td{@6X%c%)Q&lQ`+pzTb|nkaq1O8WRAVkdQA{_zn*xYcRA3YNL{Dbzd7t&R+K4z!gWP{fmpT&4aBRaTWY zjQX#kIA_#cp}caHWu(MpZE>pxS6jseI_Y~A$ZJ%ikgPlRJEo-}~AYZc-6zz)o zT=RBy3{(Y%eomSCo5N)4Wk*CoK#yUrCoB`z30t^aek#~0!j+H_u9WXYl>OE~SPJKW zk+wct7*-SuYP==D21!+Ehh!!kDMPsZQVonL5u8$yq+Ehyt@06zm=h)`iyheY?Udqi zUC&hEB4UB+bDFuH0DqVe6BL?HHexFJ_PYJ`m-=nJ!DS43+#&+2yk(Ie) zu7-@o|BJH!7bGFll}g4=q>_7P>1vpnIc$zMe|GfD3j7V>=!+Nw+Gn@Y3zHc(SK#f> z=7G6n@cye|_r{)R@1ya?$70U)aK`SM9%5L2_s#IlQ@0NO@&4%j5B}Ht-rp51d1T(% zhnOKzh-2G#-@Nd9O(Z?H#M~`6s^;8V6WOKFY(Kn98{$oSZne%e9ZYQBqvnXYx5|Vf z+QjOPHgtaAJ|Gc{EwIxpI2&PtofTrvO{)_S!b)$uI*K}XaDTAgP3aED0lV@0HXi9R z3Zkg@zk&$^1wldSNPYBHSB)=YO(~Bmy@Bc|65C2MuDr4t)zMSgE7W5G5+e*f3PzUS zM|%?Bt#uaIjvO!)ldIfj;C~Y$zVladfNyoEhkhAZRZ6Uzo&p|tOhd%rQP$9SBPbLL)u_D|v@9pRjLJ!IZZI!zK^lM$#gEp^J8e zLkoBuw^nd=t~5lbw=pm+iwzVjG>ghEaeoaJ6LCVyj?Z~`V9^a-n8u;gVgG*RB-k2$lwR(0kQVy+7M%7oMXLf7T4sS{)sn|GEc8aICX;WrT>#l&-!f2~<;%dH@io0_cYr3`q;jC>_D8Y)WMiN&Y)YzCX=^ z>d^p~9LqpYouHGgh~NK$1I?3aEp5({3IhMOw3J`U(>Xtd4dm!#Q7_}W-2}v+XNnHHjF&wO7&a?QP zekiz5(X0VKp%>|gX5N`Q4Dm|f!s%V?_@z6HJQ6Zpf()S1tm!(=q~=4b5>UBte37JB zu9G5T%Jr^rn$y$G$-5(0llb8ba}bq_h*kKrNbzq|*&a$5&uEo+#=~?&hi6P$Q&E#w zgdY;=^L}f1!7TF;uA;@YhOpkTdnYO1-4!cc}N}s|c_0xTCM2 zq7QU1#efd$C)c;k<4)J#LRQ!>SM!6ZQ+)2EH&H6r3Z=Zl@*&*5;_oY?0Pzj@h?v<| z_|A~1uE}i)leUK)a&BxE$km_{i(dcL;Av8O1vR9sqDqkK)wEOko*o5fQ`HFi^(z9s zJX7#-vB4XpgTN8N*=z_J&HPU&I2&}HTL2&G3ze(0mCB`ZA&(lHd*!Zrq3h+KC_Cu8 z%y?I=nqR(>>&4gp?AU+0oZQ*dOin5+D&e7jW>~cUf??6V92PZ-leHk&4wzjpkBxek zp$scQ@zz)+Cr}L?fbM4LiUceL9w37K%g#F6?&6bM0%eaBpqIb$mh68Ht-9Xxf>oY_ zr(E`3>x|Vk?&2Tdz0y8ff;{Fs;V%&XJ6-=1C10gvZQag!$|6$oo0Kr$ROY;JkbW`` z2BOYM^WYGs9Fr7a$_JO;RRN)8b-BpSF;)VRWSbkkRfBlpRzgP@G>v!{SRZLxo z=Xc^B_WruD1E+f4T}$P2UoCj4V6L!X=Aj#{AGq5RMeFFRLNC`(St%gl>DR;ao`ytY z^VETauNILCz0K36#WMeF)pt$vWt$TvRWSb}LQvalZL)~*ezICy;xB_BfN3uwKKEfZ;iw&Hc#&dw*s4g_RCun^-a?UW?H{~cqt$6 z_^5y@Scjly3k6NFf~JLn)>uL7e8CpYoz1a=%{NZX7wk$DtV3*BndOOR!Fz&S`<)n} zEW77l%C&zr%LSBAY`|+`?wSR61Kd64-A$T1jWKuQ?5Pjjo0qUHGLJrq=<~z*zpy%q zAv5BveiG4}bWboV4nBMpK1jAQ2btg7j3BB2G8lUFn_+~4ZZ?JBki}{At6nqXRb|KL)-*;_0hX#gE z3In^xTUIK?Q?iewFKgO~Z^aG_g?8gYDsmZ^@(xYlC2l70#>i_U@tQ4{IHnCiz(wP^ zz(oW{XIeW(4*nOtpQJ5GY$LW`e1H-^J;3)=QW4-4)g3f+qheTA;4#sT6nJ6}W$mTp zIDW2B-eoRpwq6juWf{f@-`_?>|C9Ul68Gn_?rw#98U+Z~)#@>?wDqxUh*iuVjnv)V zhS?K0Iu=?xW38R>R@hLx;C%Kkq1m&$UT|D?#0s`dI_7O#;YT%@`44P_zPx|K{*C-H zI5MHr=sbVto4C-yCc0V}d7UcQVGRrdhD^a7SYbB2Chb@YQwTiPgyK`hHY_Y_K1ek8 zd6$8BlE5Zy5r|Z+p3R;y522NS|002m1o*ec{_u-$e=&aWp-U3riviRHkX4n{6z?@@ zAxOqnV&blnCajFm3Ng4HkH!mDE<&jhSLl_rf;Av2yWYv_<%G?-VDrUnzIj{4q6_hv z7V;Wmc?}DBEwQ|oc;2Siwuk0j4@YeeuT~p` z9(5M-zZhE}BL^mTe#YglVI?S#33IIkvQQWU6~grPL8~5BaN$t2BoR3xlj3zR!>zjK zki~S89XWwVq=SNcEpvy^XhBp$j`r!0f4TR}l;@CZRL{_(D4G%MepFpKus`RlqG(m~ z%k=?`s_nC!Em3pGS1ApTDb1Bbs}WhF6yENWYf{hhh~e#(^4(Q(Vj<8mVGY+xS74B= zVS;IP$Tg*mtM&4I)iHy)d)OxI6B(a(KH<<~f3`{^_ba*&%u$p4CiSd=5oNDj#Bhr3 zff`vK$#}L-lSBD{R>W{_D^`3FTu?rDtuXvL!Yy(O7kO_8jKlNpKYBF?j$)f(ihi1KMEtjo-^TjcxdS;%0nP!*cK>*kYJ z07jurEiGR~q&-89vpeKF>RH={D-r0|9L$JNCFfYh(Z!Jpv3L&o5arTAc-KhDYF32< zk{)ASI^RiDSki5<&5fD#lj$dzsqU+&UZd)$q^Ek@aGA1#G#Z+{xXhvDdvx!Il&s0o z!sbCb4w2Ch)(_UB2;E!mV9Y!YRVSA;o|m!At9}Mq97X%bYwt}F8DaZ6(VtqT@Aw&* zUoFqU_F(v99{+-;G3IHU4Nxp1idMw<<|P}`5s;sZ6sqh!zu8$9C?IjuiTFx;PL8)lE_Vw0zR~>8qkNn4?_dgW(KOD2wOj@RF(+#@Te1|9M zC|m68e!uSBC&(OMA9L1A!NYELL>t=Yocj`)h0)CF>y@)Z@%1}yhUeDr|FH9rPS%)n z--qcEgVoMH2vkI7|2c2X%)W)(##nA+JhyqW^R}m8x^t%Yr6XGZfGfu+49tbg7pBXf zi!9bR&5lt_mbZ2-j|3xo34Gx6ExGWHB{%aB`1F(Jl$s+CL!fMxCR+4P(Y~r~FZTnl zqr1}hg9;w$pq3*cwx4k%vj9c#lPU*--|2^f%}6nUSSY|Q+PYHNleZRJW?AfSad7u0r7#6q+E@Ec%?Lo?DLI+cfnl^$=}Q+JM*zaCK0*6h-sQTpje3 z89dw}NY23l6RleE=Wdg(3xsKWzr3$+yDzNg3^CwitTc0jpik8M$ngOl5zD+Fh}AeC zdF|LG8*du+Rox>P%4EosaF#6Ur=dskkq{q3vO?6)Vy)>{TqJxeao+MA|Ba9xc@64q zKw}bq!Dqy11-i?h82Amb>u zlst+WR)rGs?xF}a>Dv{{SX7#-m=i&6=ABK8j*|J()|-J3933C#mQ2CoUYc+hPCgU0 z6|I!w2&du2{2k&@Fom)V!>{QyL)K)SsgtoKLg9fA7aLSJ88uKO+f0_iE^veY%s5&! zMtGV&a=@W~e=0m9v>a>#53n%)aC2g_*MZ)MG;!k7w8T>qG))rnOH)GaSA2J!f%C!8 zs1T4QO2;oi`}}WFG;{*RVXVn1c(Gx6-$HhEEW0|MjSuP3#>G9C_rx6~Gx!Q%-F)Nj z`8^LtpA3A^cq-v7dhyZelMA`EvE15tt{>!cUFB5g)Yho8el`b5!j(N~m*$A(MLv`{ z`X>6Kb&*p)JEFD9lFe2bSL2!+6M*Sc9RJ$+3=!jJ%7-~(Z#U>sDg`=NC_gb5>TIIe zX@`=_%wUKgX#}j@J6~Z0?`9zaX;1yV^YbH;FNTiKga!eDc3zV5DO6i+?s6M$YJ3PX zEk`9_WN0R7Iu#rm7RLz;Eh$1~-z{6xc}{)EjYY1vPR>;NL$K z9686Zj7~i5v@CZBeq-ned6pyGdfTn)4_sY9Eh}RJ>cF?B2p`)#xsX#E%c+g$_$RF& z**LeCN~@4>nRC?4Ze6I~5v$)3uitg6V6MI!NGg%(d7=Mue>`*DOx;}O`Wx1po98y{ zy>)P*qc_&k8}E29+IMoU<0Nu^i0brE9d>5tvyjF0F>Dj+!p9-;{YqCMcK%NHMs4M5 zkSGEDy#`+f0m0jcxM6+*E?^o(vBEEjP6M!@3Io!-Mu2(fAC_@tWlGW`K3$9Lp;u~T z+c+ZJNYWtB+`mSLo(lL)$sBls(IrgG{j|3o0kFj{V>+21S<(g*2m7W~3Q;9ZG@Hr% zRH1$rU|p?YIf?Pexiqt^XLZ)emVv&6+8H2FSl9YLcOHQV;>?Za*3UZ{Zo_8_U!B`9 z=V`mq5p}k~vv|_+=_mFSdtRTYSwH8jBtvBVUA9mE%a(HPHAl1YKl071IBt>7@EV(f zGg2@h@je;^YJ%8G$zLGRSc;NPL^v2Y85|0Rg9xLbgO|0>J=7gZL!NM#OrMFmYNNJV z2`2CwFDT40Q!t8)Ckg(fmky87F}!Z zT10%2Ii;vz>H?;+P)k`PjR?D|_uM35tF%%k$>OHIi}Yk@0r5wP( zeJU8}MpR{bNm^Ha4Xs&m_pY6lqxY?NCXz&fyJX!jKokTciMGyTHT`=!%jga)jcz_zHL1)b^pH;8MpOlij?R|2WTN zF1_P4n0n*S#s+iQ9f5b@Mg@CRf~%cwBvm$Z7k?+0-Q>;d?^rgY zwh^BFwwNE`@3>9K<=lwm49Y(QGrz-Je5c54&bgE2HCNnevf_O?yt(cZi^VMARMtyO zd^h!21zfrUr=tSA3TzC-7+`9jKS1SanMgQ@Ef|aVgi26O$SJ{invlWB9CQzeu!K3M|=)tE7XzxiL62 z5JDVT5mb&gN?kmU$VnpWyoGWTGcVdGu~P;oc&MQueWC+fOhw`-q=-aLx(ZOj=*yXn4;y%=D2|FrW8lJ|NF+b$K0h>u?|a~Tde}#EYG5!R64#K-O5Hva7((!;keEf4 zk@`z?Q<6+l=M(CK*=B_XDGQq$(JbkDkaZT@**YEl*~}Vf{FY-UA;|7loG;3 z5^`FOu;7d{JSvP01$K)sqZ#<)Lcfm`d>zk!WU%n2pSU@`@~=3@pK3z8UzD@8R(YmF+u!!e#LJOGVs{y}-{rzn{NjGw=@r z`R4E&@2uycIDyfjwuzoSgJ&i_^G;SSk|rx}xs&7KYwuJw@Huy$;P>(T`a6dV_4x4^ zzlV3<=`iH;IiIxdv-7pnj-POJ{&eYB7H9Jy06Omj|9z List[str]: + async def search_titles( + self, + query: str, + progress_callback: Optional[Callable[[str, Optional[int]], Any]] = None, + ) -> List[str]: """TODO: Suche auf der Zielseite implementieren.""" - _ = query + _ = (query, progress_callback) return [] def seasons_for(self, title: str) -> List[str]: diff --git a/addon/plugins/aniworld_plugin.py b/addon/plugins/aniworld_plugin.py index 7271a7a..943e865 100644 --- a/addon/plugins/aniworld_plugin.py +++ b/addon/plugins/aniworld_plugin.py @@ -13,7 +13,8 @@ import hashlib import json import re import time -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, TypeAlias +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple +from urllib.parse import quote try: # pragma: no cover - optional dependency import requests @@ -43,8 +44,8 @@ if TYPE_CHECKING: # pragma: no cover from requests import Session as RequestsSession from bs4 import BeautifulSoup as BeautifulSoupT # type: ignore[import-not-found] else: # pragma: no cover - RequestsSession: TypeAlias = Any - BeautifulSoupT: TypeAlias = Any + RequestsSession = Any + BeautifulSoupT = Any SETTING_BASE_URL = "aniworld_base_url" @@ -69,6 +70,16 @@ HEADERS = { SESSION_CACHE_TTL_SECONDS = 300 SESSION_CACHE_PREFIX = "viewit.aniworld" SESSION_CACHE_MAX_TITLE_URLS = 800 +ProgressCallback = Optional[Callable[[str, Optional[int]], Any]] + + +def _emit_progress(callback: ProgressCallback, message: str, percent: Optional[int] = None) -> None: + if not callable(callback): + return + try: + callback(str(message or ""), None if percent is None else int(percent)) + except Exception: + return @dataclass @@ -126,7 +137,7 @@ def _latest_episodes_url() -> str: def _search_url(query: str) -> str: - return f"{_get_base_url()}/search?q={query}" + return f"{_get_base_url()}/search?q={quote((query or '').strip())}" def _search_api_url() -> str: @@ -289,37 +300,56 @@ def _get_soup(url: str, *, session: Optional[RequestsSession] = None) -> Beautif _ensure_requests() _log_visit(url) sess = session or get_requests_session("aniworld", headers=HEADERS) + response = None try: response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT) response.raise_for_status() except Exception as exc: _log_error(f"GET {url} failed: {exc}") raise - if response.url and response.url != url: - _log_url(response.url, kind="REDIRECT") - _log_response_html(url, response.text) - if _looks_like_cloudflare_challenge(response.text): - raise RuntimeError("Cloudflare-Schutz erkannt. requests reicht ggf. nicht aus.") - return BeautifulSoup(response.text, "html.parser") + try: + final_url = (response.url or url) if response is not None else url + body = (response.text or "") if response is not None else "" + if final_url != url: + _log_url(final_url, kind="REDIRECT") + _log_response_html(url, body) + if _looks_like_cloudflare_challenge(body): + raise RuntimeError("Cloudflare-Schutz erkannt. requests reicht ggf. nicht aus.") + return BeautifulSoup(body, "html.parser") + finally: + if response is not None: + try: + response.close() + except Exception: + pass def _get_html_simple(url: str) -> str: _ensure_requests() _log_visit(url) sess = get_requests_session("aniworld", headers=HEADERS) + response = None try: response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT) response.raise_for_status() except Exception as exc: _log_error(f"GET {url} failed: {exc}") raise - if response.url and response.url != url: - _log_url(response.url, kind="REDIRECT") - body = response.text - _log_response_html(url, body) - if _looks_like_cloudflare_challenge(body): - raise RuntimeError("Cloudflare-Schutz erkannt. requests reicht ggf. nicht aus.") - return body + try: + final_url = (response.url or url) if response is not None else url + body = (response.text or "") if response is not None else "" + if final_url != url: + _log_url(final_url, kind="REDIRECT") + _log_response_html(url, body) + if _looks_like_cloudflare_challenge(body): + raise RuntimeError("Cloudflare-Schutz erkannt. requests reicht ggf. nicht aus.") + return body + finally: + if response is not None: + try: + response.close() + except Exception: + pass def _get_soup_simple(url: str) -> BeautifulSoupT: @@ -351,17 +381,27 @@ def _post_json(url: str, *, payload: Dict[str, str], session: Optional[RequestsS _ensure_requests() _log_visit(url) sess = session or get_requests_session("aniworld", headers=HEADERS) - response = sess.post(url, data=payload, headers=HEADERS, timeout=DEFAULT_TIMEOUT) - response.raise_for_status() - if response.url and response.url != url: - _log_url(response.url, kind="REDIRECT") - _log_response_html(url, response.text) - if _looks_like_cloudflare_challenge(response.text): - raise RuntimeError("Cloudflare-Schutz erkannt. requests reicht ggf. nicht aus.") + response = None try: - return response.json() - except Exception: - return None + response = sess.post(url, data=payload, headers=HEADERS, timeout=DEFAULT_TIMEOUT) + response.raise_for_status() + final_url = (response.url or url) if response is not None else url + body = (response.text or "") if response is not None else "" + if final_url != url: + _log_url(final_url, kind="REDIRECT") + _log_response_html(url, body) + if _looks_like_cloudflare_challenge(body): + raise RuntimeError("Cloudflare-Schutz erkannt. requests reicht ggf. nicht aus.") + try: + return response.json() + except Exception: + return None + finally: + if response is not None: + try: + response.close() + except Exception: + pass def _extract_canonical_url(soup: BeautifulSoupT, fallback: str) -> str: @@ -555,10 +595,18 @@ def resolve_redirect(target_url: str) -> Optional[str]: _log_visit(normalized_url) session = get_requests_session("aniworld", headers=HEADERS) _get_soup(_get_base_url(), session=session) - response = session.get(normalized_url, headers=HEADERS, timeout=DEFAULT_TIMEOUT, allow_redirects=True) - if response.url: - _log_url(response.url, kind="RESOLVED") - return response.url if response.url else None + response = None + try: + response = session.get(normalized_url, headers=HEADERS, timeout=DEFAULT_TIMEOUT, allow_redirects=True) + if response.url: + _log_url(response.url, kind="RESOLVED") + return response.url if response.url else None + finally: + if response is not None: + try: + response.close() + except Exception: + pass def fetch_episode_hoster_names(episode_url: str) -> List[str]: @@ -629,11 +677,12 @@ def fetch_episode_stream_link( return resolved -def search_animes(query: str) -> List[SeriesResult]: +def search_animes(query: str, *, progress_callback: ProgressCallback = None) -> List[SeriesResult]: _ensure_requests() query = (query or "").strip() if not query: return [] + _emit_progress(progress_callback, "AniWorld API-Suche", 15) session = get_requests_session("aniworld", headers=HEADERS) try: session.get(_get_base_url(), headers=HEADERS, timeout=DEFAULT_TIMEOUT) @@ -643,7 +692,9 @@ def search_animes(query: str) -> List[SeriesResult]: results: List[SeriesResult] = [] seen: set[str] = set() if isinstance(data, list): - for entry in data: + for idx, entry in enumerate(data, start=1): + if idx == 1 or idx % 50 == 0: + _emit_progress(progress_callback, f"API auswerten {idx}/{len(data)}", 35) if not isinstance(entry, dict): continue title = _strip_html((entry.get("title") or "").strip()) @@ -665,10 +716,16 @@ def search_animes(query: str) -> List[SeriesResult]: seen.add(key) description = (entry.get("description") or "").strip() results.append(SeriesResult(title=title, description=description, url=url)) + _emit_progress(progress_callback, f"API-Treffer: {len(results)}", 85) return results - soup = _get_soup_simple(_search_url(requests.utils.quote(query))) - for anchor in soup.select("a[href^='/anime/stream/'][href]"): + _emit_progress(progress_callback, "HTML-Suche (Fallback)", 55) + soup = _get_soup_simple(_search_url(query)) + anchors = soup.select("a[href^='/anime/stream/'][href]") + total_anchors = max(1, len(anchors)) + for idx, anchor in enumerate(anchors, start=1): + if idx == 1 or idx % 100 == 0: + _emit_progress(progress_callback, f"HTML auswerten {idx}/{total_anchors}", 70) href = (anchor.get("href") or "").strip() if not href or "/staffel-" in href or "/episode-" in href: continue @@ -686,6 +743,7 @@ def search_animes(query: str) -> List[SeriesResult]: continue seen.add(key) results.append(SeriesResult(title=title, description="", url=url)) + _emit_progress(progress_callback, f"HTML-Treffer: {len(results)}", 85) return results @@ -696,6 +754,7 @@ class AniworldPlugin(BasisPlugin): def __init__(self) -> None: self._anime_results: Dict[str, SeriesResult] = {} self._title_url_cache: Dict[str, str] = self._load_title_url_cache() + self._title_meta: Dict[str, tuple[str, str]] = {} self._genre_names_cache: Optional[List[str]] = None self._season_cache: Dict[str, List[SeasonInfo]] = {} self._season_links_cache: Dict[str, List[SeasonInfo]] = {} @@ -760,8 +819,135 @@ class AniworldPlugin(BasisPlugin): changed = True if changed and persist: self._save_title_url_cache() + if description: + old_plot, old_poster = self._title_meta.get(title, ("", "")) + self._title_meta[title] = (description.strip() or old_plot, old_poster) return changed + def _store_title_meta(self, title: str, *, plot: str = "", poster: str = "") -> None: + title = (title or "").strip() + if not title: + return + old_plot, old_poster = self._title_meta.get(title, ("", "")) + merged_plot = (plot or old_plot or "").strip() + merged_poster = (poster or old_poster or "").strip() + self._title_meta[title] = (merged_plot, merged_poster) + + @staticmethod + def _is_series_image_url(url: str) -> bool: + value = (url or "").strip().casefold() + if not value: + return False + blocked = ( + "/public/img/facebook", + "/public/img/logo", + "aniworld-logo", + "favicon", + "/public/img/german.svg", + "/public/img/japanese-", + ) + return not any(marker in value for marker in blocked) + + @staticmethod + def _extract_style_url(style_value: str) -> str: + style_value = (style_value or "").strip() + if not style_value: + return "" + match = re.search(r"url\((['\"]?)(.*?)\1\)", style_value, flags=re.IGNORECASE) + if not match: + return "" + return (match.group(2) or "").strip() + + def _extract_series_metadata(self, soup: BeautifulSoupT) -> tuple[str, str, str]: + if not soup: + return "", "", "" + plot = "" + poster = "" + fanart = "" + + root = soup.select_one("#series") or soup + + description_node = root.select_one("p.seri_des") + if description_node is not None: + full_text = (description_node.get("data-full-description") or "").strip() + short_text = (description_node.get_text(" ", strip=True) or "").strip() + plot = full_text or short_text + + if not plot: + for selector in ("meta[property='og:description']", "meta[name='description']"): + node = soup.select_one(selector) + if node is None: + continue + content = (node.get("content") or "").strip() + if content: + plot = content + break + if not plot: + for selector in (".series-description", ".seri_des", ".description", "article p"): + node = soup.select_one(selector) + if node is None: + continue + text = (node.get_text(" ", strip=True) or "").strip() + if text: + plot = text + break + + cover = root.select_one("div.seriesCoverBox img[itemprop='image'], div.seriesCoverBox img") + if cover is not None: + for attr in ("data-src", "src"): + value = (cover.get(attr) or "").strip() + if value: + candidate = _absolute_url(value) + if self._is_series_image_url(candidate): + poster = candidate + break + + if not poster: + for selector in ("meta[property='og:image']", "meta[name='twitter:image']"): + node = soup.select_one(selector) + if node is None: + continue + content = (node.get("content") or "").strip() + if content: + candidate = _absolute_url(content) + if self._is_series_image_url(candidate): + poster = candidate + break + if not poster: + for selector in ("img.seriesCoverBox", ".seriesCoverBox img"): + image = soup.select_one(selector) + if image is None: + continue + value = (image.get("data-src") or image.get("src") or "").strip() + if value: + candidate = _absolute_url(value) + if self._is_series_image_url(candidate): + poster = candidate + break + + backdrop_node = root.select_one("section.title .backdrop, .SeriesSection .backdrop, .backdrop") + if backdrop_node is not None: + raw_style = (backdrop_node.get("style") or "").strip() + style_url = self._extract_style_url(raw_style) + if style_url: + candidate = _absolute_url(style_url) + if self._is_series_image_url(candidate): + fanart = candidate + + if not fanart: + for selector in ("meta[property='og:image']",): + node = soup.select_one(selector) + if node is None: + continue + content = (node.get("content") or "").strip() + if content: + candidate = _absolute_url(content) + if self._is_series_image_url(candidate): + fanart = candidate + break + + return plot, poster, fanart + @staticmethod def _season_links_cache_name(series_url: str) -> str: digest = hashlib.sha1((series_url or "").encode("utf-8")).hexdigest()[:20] @@ -893,6 +1079,43 @@ class AniworldPlugin(BasisPlugin): return None + def metadata_for(self, title: str) -> tuple[dict[str, str], dict[str, str], list[object] | None]: + title = (title or "").strip() + if not title: + return {}, {}, None + + info: dict[str, str] = {"title": title} + art: dict[str, str] = {} + cached_plot, cached_poster = self._title_meta.get(title, ("", "")) + if cached_plot: + info["plot"] = cached_plot + if cached_poster: + art = {"thumb": cached_poster, "poster": cached_poster} + if "plot" in info and art: + return info, art, None + + series = self._find_series_by_title(title) + if series is None or not series.url: + return info, art, None + if series.description and "plot" not in info: + info["plot"] = series.description + + try: + soup = _get_soup(series.url, session=get_requests_session("aniworld", headers=HEADERS)) + plot, poster, fanart = self._extract_series_metadata(soup) + except Exception: + plot, poster, fanart = "", "", "" + + if plot: + info["plot"] = plot + if poster: + art = {"thumb": poster, "poster": poster} + if fanart: + art["fanart"] = fanart + art["landscape"] = fanart + self._store_title_meta(title, plot=info.get("plot", ""), poster=poster) + return info, art, None + def _ensure_popular(self) -> List[SeriesResult]: if self._popular_cache is not None: return list(self._popular_cache) @@ -1151,7 +1374,7 @@ class AniworldPlugin(BasisPlugin): return self._episode_label_cache.get(cache_key, {}).get(episode_label) return None - async def search_titles(self, query: str) -> List[str]: + async def search_titles(self, query: str, progress_callback: ProgressCallback = None) -> List[str]: query = (query or "").strip() if not query: self._anime_results.clear() @@ -1163,7 +1386,8 @@ class AniworldPlugin(BasisPlugin): if not self._requests_available: raise RuntimeError("AniworldPlugin kann ohne requests/bs4 nicht suchen.") try: - results = search_animes(query) + _emit_progress(progress_callback, "AniWorld Suche startet", 10) + results = search_animes(query, progress_callback=progress_callback) except Exception as exc: # pragma: no cover self._anime_results.clear() self._season_cache.clear() @@ -1178,6 +1402,7 @@ class AniworldPlugin(BasisPlugin): self._season_cache.clear() self._season_links_cache.clear() self._episode_label_cache.clear() + _emit_progress(progress_callback, f"Treffer aufbereitet: {len(results)}", 95) return [result.title for result in results] def _ensure_seasons(self, title: str) -> List[SeasonInfo]: @@ -1213,6 +1438,18 @@ class AniworldPlugin(BasisPlugin): _log_url(link, kind="FOUND") return link + def episode_url_for(self, title: str, season: str, episode: str) -> str: + cache_key = (title, season) + cached = self._episode_label_cache.get(cache_key) + if cached: + info = cached.get(episode) + if info and info.url: + return info.url + episode_info = self._lookup_episode(title, season, episode) + if episode_info and episode_info.url: + return episode_info.url + return "" + def available_hosters_for(self, title: str, season: str, episode: str) -> List[str]: if not self._requests_available: raise RuntimeError("AniworldPlugin kann ohne requests/bs4 keine Hoster laden.") diff --git a/addon/plugins/dokustreams_plugin.py b/addon/plugins/dokustreams_plugin.py index 047a652..58fffc8 100644 --- a/addon/plugins/dokustreams_plugin.py +++ b/addon/plugins/dokustreams_plugin.py @@ -5,7 +5,7 @@ from __future__ import annotations from dataclasses import dataclass import re from urllib.parse import quote -from typing import TYPE_CHECKING, Any, Dict, List, Optional, TypeAlias +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional try: # pragma: no cover - optional dependency import requests @@ -27,8 +27,8 @@ if TYPE_CHECKING: # pragma: no cover from requests import Session as RequestsSession from bs4 import BeautifulSoup as BeautifulSoupT # type: ignore[import-not-found] else: # pragma: no cover - RequestsSession: TypeAlias = Any - BeautifulSoupT: TypeAlias = Any + RequestsSession = Any + BeautifulSoupT = Any ADDON_ID = "plugin.video.viewit" @@ -44,6 +44,16 @@ SETTING_LOG_URLS = "log_urls_dokustreams" SETTING_DUMP_HTML = "dump_html_dokustreams" SETTING_SHOW_URL_INFO = "show_url_info_dokustreams" SETTING_LOG_ERRORS = "log_errors_dokustreams" +ProgressCallback = Optional[Callable[[str, Optional[int]], Any]] + + +def _emit_progress(callback: ProgressCallback, message: str, percent: Optional[int] = None) -> None: + if not callable(callback): + return + try: + callback(str(message or ""), None if percent is None else int(percent)) + except Exception: + return HEADERS = { "User-Agent": "Mozilla/5.0 (Kodi; ViewIt) AppleWebKit/537.36 (KHTML, like Gecko)", "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", @@ -213,16 +223,26 @@ def _get_soup(url: str, *, session: Optional[RequestsSession] = None) -> Beautif raise RuntimeError("requests/bs4 sind nicht verfuegbar.") _log_visit(url) sess = session or get_requests_session("dokustreams", headers=HEADERS) + response = None try: response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT) response.raise_for_status() except Exception as exc: _log_error_message(f"GET {url} failed: {exc}") raise - if response.url and response.url != url: - _log_url_event(response.url, kind="REDIRECT") - _log_response_html(url, response.text) - return BeautifulSoup(response.text, "html.parser") + try: + final_url = (response.url or url) if response is not None else url + body = (response.text or "") if response is not None else "" + if final_url != url: + _log_url_event(final_url, kind="REDIRECT") + _log_response_html(url, body) + return BeautifulSoup(body, "html.parser") + finally: + if response is not None: + try: + response.close() + except Exception: + pass class DokuStreamsPlugin(BasisPlugin): @@ -247,14 +267,17 @@ class DokuStreamsPlugin(BasisPlugin): if REQUESTS_IMPORT_ERROR: print(f"DokuStreamsPlugin Importfehler: {REQUESTS_IMPORT_ERROR}") - async def search_titles(self, query: str) -> List[str]: + async def search_titles(self, query: str, progress_callback: ProgressCallback = None) -> List[str]: + _emit_progress(progress_callback, "Doku-Streams Suche", 15) hits = self._search_hits(query) + _emit_progress(progress_callback, f"Treffer verarbeiten ({len(hits)})", 70) self._title_to_url = {hit.title: hit.url for hit in hits if hit.title and hit.url} for hit in hits: if hit.title: self._title_meta[hit.title] = (hit.plot, hit.poster) titles = [hit.title for hit in hits if hit.title] titles.sort(key=lambda value: value.casefold()) + _emit_progress(progress_callback, f"Fertig: {len(titles)} Treffer", 95) return titles def _search_hits(self, query: str) -> List[SearchHit]: diff --git a/addon/plugins/einschalten_plugin.py b/addon/plugins/einschalten_plugin.py index 62f35b5..b6aea00 100644 --- a/addon/plugins/einschalten_plugin.py +++ b/addon/plugins/einschalten_plugin.py @@ -11,7 +11,7 @@ from __future__ import annotations import json import re from dataclasses import dataclass -from typing import Any, Dict, List, Optional, Set +from typing import Any, Callable, Dict, List, Optional, Set from urllib.parse import urlencode, urljoin, urlsplit try: # pragma: no cover - optional dependency (Kodi dependency) @@ -43,7 +43,7 @@ SETTING_DUMP_HTML = "dump_html_einschalten" SETTING_SHOW_URL_INFO = "show_url_info_einschalten" SETTING_LOG_ERRORS = "log_errors_einschalten" -DEFAULT_BASE_URL = "" +DEFAULT_BASE_URL = "https://einschalten.in" DEFAULT_INDEX_PATH = "/" DEFAULT_NEW_TITLES_PATH = "/movies/new" DEFAULT_SEARCH_PATH = "/search" @@ -56,6 +56,16 @@ HEADERS = { "Accept-Language": "de-DE,de;q=0.9,en;q=0.8", "Connection": "keep-alive", } +ProgressCallback = Optional[Callable[[str, Optional[int]], Any]] + + +def _emit_progress(callback: ProgressCallback, message: str, percent: Optional[int] = None) -> None: + if not callable(callback): + return + try: + callback(str(message or ""), None if percent is None else int(percent)) + except Exception: + return @dataclass(frozen=True) @@ -526,6 +536,34 @@ class EinschaltenPlugin(BasisPlugin): self._session = requests.Session() return self._session + def _http_get_text(self, url: str, *, timeout: int = 20) -> tuple[str, str]: + _log_url(url, kind="GET") + _notify_url(url) + sess = self._get_session() + response = None + try: + response = sess.get(url, headers=HEADERS, timeout=timeout) + response.raise_for_status() + final_url = (response.url or url) if response is not None else url + body = (response.text or "") if response is not None else "" + _log_url(final_url, kind="OK") + _log_response_html(final_url, body) + return final_url, body + finally: + if response is not None: + try: + response.close() + except Exception: + pass + + def _http_get_json(self, url: str, *, timeout: int = 20) -> tuple[str, Any]: + final_url, body = self._http_get_text(url, timeout=timeout) + try: + payload = json.loads(body or "{}") + except Exception: + payload = {} + return final_url, payload + def _get_base_url(self) -> str: base = _get_setting_text(SETTING_BASE_URL, default=DEFAULT_BASE_URL).strip() return base.rstrip("/") @@ -646,15 +684,9 @@ class EinschaltenPlugin(BasisPlugin): if not url: return "" try: - _log_url(url, kind="GET") - _notify_url(url) - sess = self._get_session() - resp = sess.get(url, headers=HEADERS, timeout=20) - resp.raise_for_status() - _log_url(resp.url or url, kind="OK") - _log_response_html(resp.url or url, resp.text) - self._detail_html_by_id[movie_id] = resp.text or "" - return resp.text or "" + _, body = self._http_get_text(url, timeout=20) + self._detail_html_by_id[movie_id] = body + return body except Exception as exc: _log_error(f"GET {url} failed: {exc}") return "" @@ -667,16 +699,8 @@ class EinschaltenPlugin(BasisPlugin): if not url: return {} try: - _log_url(url, kind="GET") - _notify_url(url) - sess = self._get_session() - resp = sess.get(url, headers=HEADERS, timeout=20) - resp.raise_for_status() - _log_url(resp.url or url, kind="OK") - # Some backends may return JSON with a JSON content-type; for debugging we still dump text. - _log_response_html(resp.url or url, resp.text) - data = resp.json() - return dict(data) if isinstance(data, dict) else {} + _, data = self._http_get_json(url, timeout=20) + return data except Exception as exc: _log_error(f"GET {url} failed: {exc}") return {} @@ -741,14 +765,8 @@ class EinschaltenPlugin(BasisPlugin): if not url: return [] try: - _log_url(url, kind="GET") - _notify_url(url) - sess = self._get_session() - resp = sess.get(url, headers=HEADERS, timeout=20) - resp.raise_for_status() - _log_url(resp.url or url, kind="OK") - _log_response_html(resp.url or url, resp.text) - payload = _extract_ng_state_payload(resp.text) + _, body = self._http_get_text(url, timeout=20) + payload = _extract_ng_state_payload(body) return _parse_ng_state_movies(payload) except Exception: return [] @@ -759,14 +777,8 @@ class EinschaltenPlugin(BasisPlugin): if not url: return [] try: - _log_url(url, kind="GET") - _notify_url(url) - sess = self._get_session() - resp = sess.get(url, headers=HEADERS, timeout=20) - resp.raise_for_status() - _log_url(resp.url or url, kind="OK") - _log_response_html(resp.url or url, resp.text) - payload = _extract_ng_state_payload(resp.text) + _, body = self._http_get_text(url, timeout=20) + payload = _extract_ng_state_payload(body) movies = _parse_ng_state_movies(payload) _log_debug_line(f"parse_ng_state_movies:count={len(movies)}") if movies: @@ -784,14 +796,8 @@ class EinschaltenPlugin(BasisPlugin): if page > 1: url = f"{url}?{urlencode({'page': str(page)})}" try: - _log_url(url, kind="GET") - _notify_url(url) - sess = self._get_session() - resp = sess.get(url, headers=HEADERS, timeout=20) - resp.raise_for_status() - _log_url(resp.url or url, kind="OK") - _log_response_html(resp.url or url, resp.text) - payload = _extract_ng_state_payload(resp.text) + _, body = self._http_get_text(url, timeout=20) + payload = _extract_ng_state_payload(body) movies, has_more, current_page = _parse_ng_state_movies_with_pagination(payload) _log_debug_line(f"parse_ng_state_movies_page:page={page} count={len(movies)}") if has_more is not None: @@ -844,14 +850,8 @@ class EinschaltenPlugin(BasisPlugin): if not url: return [] try: - _log_url(url, kind="GET") - _notify_url(url) - sess = self._get_session() - resp = sess.get(url, headers=HEADERS, timeout=20) - resp.raise_for_status() - _log_url(resp.url or url, kind="OK") - _log_response_html(resp.url or url, resp.text) - payload = _extract_ng_state_payload(resp.text) + _, body = self._http_get_text(url, timeout=20) + payload = _extract_ng_state_payload(body) results = _parse_ng_state_search_results(payload) return _filter_movies_by_title(query, results) except Exception: @@ -867,13 +867,7 @@ class EinschaltenPlugin(BasisPlugin): api_url = self._api_genres_url() if api_url: try: - _log_url(api_url, kind="GET") - _notify_url(api_url) - sess = self._get_session() - resp = sess.get(api_url, headers=HEADERS, timeout=20) - resp.raise_for_status() - _log_url(resp.url or api_url, kind="OK") - payload = resp.json() + _, payload = self._http_get_json(api_url, timeout=20) if isinstance(payload, list): parsed: Dict[str, int] = {} for item in payload: @@ -900,14 +894,8 @@ class EinschaltenPlugin(BasisPlugin): if not url: return try: - _log_url(url, kind="GET") - _notify_url(url) - sess = self._get_session() - resp = sess.get(url, headers=HEADERS, timeout=20) - resp.raise_for_status() - _log_url(resp.url or url, kind="OK") - _log_response_html(resp.url or url, resp.text) - payload = _extract_ng_state_payload(resp.text) + _, body = self._http_get_text(url, timeout=20) + payload = _extract_ng_state_payload(body) parsed = _parse_ng_state_genres(payload) if parsed: self._genre_id_by_name.clear() @@ -915,7 +903,7 @@ class EinschaltenPlugin(BasisPlugin): except Exception: return - async def search_titles(self, query: str) -> List[str]: + async def search_titles(self, query: str, progress_callback: ProgressCallback = None) -> List[str]: if not REQUESTS_AVAILABLE: return [] query = (query or "").strip() @@ -924,9 +912,12 @@ class EinschaltenPlugin(BasisPlugin): if not self._get_base_url(): return [] + _emit_progress(progress_callback, "Einschalten Suche", 15) movies = self._fetch_search_movies(query) if not movies: + _emit_progress(progress_callback, "Fallback: Index filtern", 45) movies = _filter_movies_by_title(query, self._load_movies()) + _emit_progress(progress_callback, f"Treffer verarbeiten ({len(movies)})", 75) titles: List[str] = [] seen: set[str] = set() for movie in movies: @@ -936,6 +927,7 @@ class EinschaltenPlugin(BasisPlugin): self._id_by_title[movie.title] = movie.id titles.append(movie.title) titles.sort(key=lambda value: value.casefold()) + _emit_progress(progress_callback, f"Fertig: {len(titles)} Treffer", 95) return titles def genres(self) -> List[str]: @@ -971,14 +963,8 @@ class EinschaltenPlugin(BasisPlugin): if not url: return [] try: - _log_url(url, kind="GET") - _notify_url(url) - sess = self._get_session() - resp = sess.get(url, headers=HEADERS, timeout=20) - resp.raise_for_status() - _log_url(resp.url or url, kind="OK") - _log_response_html(resp.url or url, resp.text) - payload = _extract_ng_state_payload(resp.text) + _, body = self._http_get_text(url, timeout=20) + payload = _extract_ng_state_payload(body) except Exception: return [] if not isinstance(payload, dict): @@ -1079,3 +1065,7 @@ class EinschaltenPlugin(BasisPlugin): return [] # Backwards compatible: first page only. UI uses paging via `new_titles_page`. return self.new_titles_page(1) + + +# Alias für die automatische Plugin-Erkennung. +Plugin = EinschaltenPlugin diff --git a/addon/plugins/filmpalast_plugin.py b/addon/plugins/filmpalast_plugin.py index 82c6509..cbabffd 100644 --- a/addon/plugins/filmpalast_plugin.py +++ b/addon/plugins/filmpalast_plugin.py @@ -11,7 +11,7 @@ from dataclasses import dataclass import re from urllib.parse import quote, urlencode from urllib.parse import urljoin -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, TypeAlias +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple try: # pragma: no cover - optional dependency import requests @@ -33,8 +33,8 @@ if TYPE_CHECKING: # pragma: no cover from requests import Session as RequestsSession from bs4 import BeautifulSoup as BeautifulSoupT # type: ignore[import-not-found] else: # pragma: no cover - RequestsSession: TypeAlias = Any - BeautifulSoupT: TypeAlias = Any + RequestsSession = Any + BeautifulSoupT = Any ADDON_ID = "plugin.video.viewit" @@ -53,6 +53,16 @@ SETTING_LOG_URLS = "log_urls_filmpalast" SETTING_DUMP_HTML = "dump_html_filmpalast" SETTING_SHOW_URL_INFO = "show_url_info_filmpalast" SETTING_LOG_ERRORS = "log_errors_filmpalast" +ProgressCallback = Optional[Callable[[str, Optional[int]], Any]] + + +def _emit_progress(callback: ProgressCallback, message: str, percent: Optional[int] = None) -> None: + if not callable(callback): + return + try: + callback(str(message or ""), None if percent is None else int(percent)) + except Exception: + return HEADERS = { "User-Agent": "Mozilla/5.0 (Kodi; ViewIt) AppleWebKit/537.36 (KHTML, like Gecko)", "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", @@ -206,16 +216,26 @@ def _get_soup(url: str, *, session: Optional[RequestsSession] = None) -> Beautif raise RuntimeError("requests/bs4 sind nicht verfuegbar.") _log_visit(url) sess = session or get_requests_session("filmpalast", headers=HEADERS) + response = None try: response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT) response.raise_for_status() except Exception as exc: _log_error_message(f"GET {url} failed: {exc}") raise - if response.url and response.url != url: - _log_url_event(response.url, kind="REDIRECT") - _log_response_html(url, response.text) - return BeautifulSoup(response.text, "html.parser") + try: + final_url = (response.url or url) if response is not None else url + body = (response.text or "") if response is not None else "" + if final_url != url: + _log_url_event(final_url, kind="REDIRECT") + _log_response_html(url, body) + return BeautifulSoup(body, "html.parser") + finally: + if response is not None: + try: + response.close() + except Exception: + pass class FilmpalastPlugin(BasisPlugin): @@ -224,6 +244,7 @@ class FilmpalastPlugin(BasisPlugin): def __init__(self) -> None: self._title_to_url: Dict[str, str] = {} + self._title_meta: Dict[str, tuple[str, str]] = {} self._series_entries: Dict[str, Dict[int, Dict[int, EpisodeEntry]]] = {} self._hoster_cache: Dict[str, Dict[str, str]] = {} self._genre_to_url: Dict[str, str] = {} @@ -352,6 +373,7 @@ class FilmpalastPlugin(BasisPlugin): seen_titles: set[str] = set() seen_urls: set[str] = set() for base_url, params in search_requests: + response = None try: request_url = base_url if not params else f"{base_url}?{urlencode(params)}" _log_url_event(request_url, kind="GET") @@ -365,6 +387,12 @@ class FilmpalastPlugin(BasisPlugin): except Exception as exc: _log_error_message(f"search request failed ({base_url}): {exc}") continue + finally: + if response is not None: + try: + response.close() + except Exception: + pass anchors = soup.select("article.liste h2 a[href], article.liste h3 a[href]") if not anchors: @@ -466,9 +494,13 @@ class FilmpalastPlugin(BasisPlugin): titles.sort(key=lambda value: value.casefold()) return titles - async def search_titles(self, query: str) -> List[str]: + async def search_titles(self, query: str, progress_callback: ProgressCallback = None) -> List[str]: + _emit_progress(progress_callback, "Filmpalast Suche", 15) hits = self._search_hits(query) - return self._apply_hits_to_title_index(hits) + _emit_progress(progress_callback, f"Treffer verarbeiten ({len(hits)})", 70) + titles = self._apply_hits_to_title_index(hits) + _emit_progress(progress_callback, f"Fertig: {len(titles)} Treffer", 95) + return titles def _parse_genres(self, soup: BeautifulSoupT) -> Dict[str, str]: genres: Dict[str, str] = {} @@ -691,6 +723,64 @@ class FilmpalastPlugin(BasisPlugin): return hit.url return "" + def _store_title_meta(self, title: str, *, plot: str = "", poster: str = "") -> None: + title = (title or "").strip() + if not title: + return + old_plot, old_poster = self._title_meta.get(title, ("", "")) + merged_plot = (plot or old_plot or "").strip() + merged_poster = (poster or old_poster or "").strip() + self._title_meta[title] = (merged_plot, merged_poster) + + def _extract_detail_metadata(self, soup: BeautifulSoupT) -> tuple[str, str]: + if not soup: + return "", "" + root = soup.select_one("div#content[role='main']") or soup + detail = root.select_one("article.detail") or root + plot = "" + poster = "" + + # Filmpalast Detailseite: bevorzugt den dedizierten Filmhandlung-Block. + plot_node = detail.select_one( + "li[itemtype='http://schema.org/Movie'] span[itemprop='description']" + ) + if plot_node is not None: + plot = (plot_node.get_text(" ", strip=True) or "").strip() + if not plot: + hidden_plot = detail.select_one("cite span.hidden") + if hidden_plot is not None: + plot = (hidden_plot.get_text(" ", strip=True) or "").strip() + if not plot: + for selector in ("meta[property='og:description']", "meta[name='description']"): + node = root.select_one(selector) + if node is None: + continue + content = (node.get("content") or "").strip() + if content: + plot = content + break + + # Filmpalast Detailseite: Cover liegt stabil in `img.cover2`. + cover = detail.select_one("img.cover2") + if cover is not None: + value = (cover.get("data-src") or cover.get("src") or "").strip() + if value: + candidate = _absolute_url(value) + lower = candidate.casefold() + if "/themes/" not in lower and "spacer.gif" not in lower and "/files/movies/" in lower: + poster = candidate + if not poster: + thumb_node = detail.select_one("li[itemtype='http://schema.org/Movie'] img[itemprop='image']") + if thumb_node is not None: + value = (thumb_node.get("data-src") or thumb_node.get("src") or "").strip() + if value: + candidate = _absolute_url(value) + lower = candidate.casefold() + if "/themes/" not in lower and "spacer.gif" not in lower and "/files/movies/" in lower: + poster = candidate + + return plot, poster + def remember_series_url(self, title: str, series_url: str) -> None: title = (title or "").strip() series_url = (series_url or "").strip() @@ -711,6 +801,52 @@ class FilmpalastPlugin(BasisPlugin): return _series_hint_value(series_key) return "" + def metadata_for(self, title: str) -> tuple[dict[str, str], dict[str, str], list[object] | None]: + title = (title or "").strip() + if not title: + return {}, {}, None + + info: dict[str, str] = {"title": title} + art: dict[str, str] = {} + cached_plot, cached_poster = self._title_meta.get(title, ("", "")) + if cached_plot: + info["plot"] = cached_plot + if cached_poster: + art = {"thumb": cached_poster, "poster": cached_poster} + if "plot" in info and art: + return info, art, None + + detail_url = self._ensure_title_url(title) + if not detail_url: + series_key = self._series_key_for_title(title) or self._ensure_series_entries_for_title(title) + if series_key: + seasons = self._series_entries.get(series_key, {}) + first_entry: Optional[EpisodeEntry] = None + for season_number in sorted(seasons.keys()): + episodes = seasons.get(season_number, {}) + for episode_number in sorted(episodes.keys()): + first_entry = episodes.get(episode_number) + if first_entry is not None: + break + if first_entry is not None: + break + detail_url = first_entry.url if first_entry is not None else "" + if not detail_url: + return info, art, None + + try: + soup = _get_soup(detail_url, session=get_requests_session("filmpalast", headers=HEADERS)) + plot, poster = self._extract_detail_metadata(soup) + except Exception: + plot, poster = "", "" + + if plot: + info["plot"] = plot + if poster: + art = {"thumb": poster, "poster": poster} + self._store_title_meta(title, plot=info.get("plot", ""), poster=poster) + return info, art, None + def is_movie(self, title: str) -> bool: title = (title or "").strip() if not title: @@ -820,11 +956,23 @@ class FilmpalastPlugin(BasisPlugin): def available_hosters_for(self, title: str, season: str, episode: str) -> List[str]: detail_url = self._detail_url_for_selection(title, season, episode) - hosters = self._hosters_for_detail_url(detail_url) - return list(hosters.keys()) + return self.available_hosters_for_url(detail_url) def stream_link_for(self, title: str, season: str, episode: str) -> Optional[str]: detail_url = self._detail_url_for_selection(title, season, episode) + return self.stream_link_for_url(detail_url) + + def episode_url_for(self, title: str, season: str, episode: str) -> str: + detail_url = self._detail_url_for_selection(title, season, episode) + return (detail_url or "").strip() + + def available_hosters_for_url(self, episode_url: str) -> List[str]: + detail_url = (episode_url or "").strip() + hosters = self._hosters_for_detail_url(detail_url) + return list(hosters.keys()) + + def stream_link_for_url(self, episode_url: str) -> Optional[str]: + detail_url = (episode_url or "").strip() if not detail_url: return None hosters = self._hosters_for_detail_url(detail_url) @@ -901,6 +1049,7 @@ class FilmpalastPlugin(BasisPlugin): redirected = link if self._requests_available: + response = None try: session = get_requests_session("filmpalast", headers=HEADERS) response = session.get(link, headers=HEADERS, timeout=DEFAULT_TIMEOUT, allow_redirects=True) @@ -908,6 +1057,12 @@ class FilmpalastPlugin(BasisPlugin): redirected = (response.url or link).strip() or link except Exception: redirected = link + finally: + if response is not None: + try: + response.close() + except Exception: + pass # 2) Danach optional die Redirect-URL nochmals auflösen. if callable(resolve_with_resolveurl) and redirected and redirected != link: @@ -922,3 +1077,7 @@ class FilmpalastPlugin(BasisPlugin): _log_url_event(redirected, kind="FINAL") return redirected return None + + +# Alias für die automatische Plugin-Erkennung. +Plugin = FilmpalastPlugin diff --git a/addon/plugins/serienstream_plugin.py b/addon/plugins/serienstream_plugin.py index d2f67f3..cfdb1f0 100644 --- a/addon/plugins/serienstream_plugin.py +++ b/addon/plugins/serienstream_plugin.py @@ -17,7 +17,8 @@ import os import re import time import unicodedata -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, TypeAlias +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple +from urllib.parse import quote try: # pragma: no cover - optional dependency import requests @@ -49,14 +50,15 @@ if TYPE_CHECKING: # pragma: no cover from requests import Session as RequestsSession from bs4 import BeautifulSoup as BeautifulSoupT # type: ignore[import-not-found] else: # pragma: no cover - RequestsSession: TypeAlias = Any - BeautifulSoupT: TypeAlias = Any + RequestsSession = Any + BeautifulSoupT = Any SETTING_BASE_URL = "serienstream_base_url" DEFAULT_BASE_URL = "https://s.to" DEFAULT_PREFERRED_HOSTERS = ["voe"] DEFAULT_TIMEOUT = 20 +SEARCH_TIMEOUT = 8 ADDON_ID = "plugin.video.viewit" GLOBAL_SETTING_LOG_URLS = "debug_log_urls" GLOBAL_SETTING_DUMP_HTML = "debug_dump_html" @@ -75,6 +77,19 @@ HEADERS = { SESSION_CACHE_TTL_SECONDS = 300 SESSION_CACHE_PREFIX = "viewit.serienstream" SESSION_CACHE_MAX_TITLE_URLS = 800 +CATALOG_SEARCH_TTL_SECONDS = 600 +CATALOG_SEARCH_CACHE_KEY = "catalog_index" +_CATALOG_INDEX_MEMORY: tuple[float, List["SeriesResult"]] = (0.0, []) +ProgressCallback = Optional[Callable[[str, Optional[int]], Any]] + + +def _emit_progress(callback: ProgressCallback, message: str, percent: Optional[int] = None) -> None: + if not callable(callback): + return + try: + callback(str(message or ""), None if percent is None else int(percent)) + except Exception: + return @dataclass @@ -111,6 +126,57 @@ class SeasonInfo: episodes: List[EpisodeInfo] +def _extract_series_metadata(soup: BeautifulSoupT) -> Tuple[Dict[str, str], Dict[str, str]]: + info: Dict[str, str] = {} + art: Dict[str, str] = {} + if not soup: + return info, art + + title_tag = soup.select_one("h1") + title = (title_tag.get_text(" ", strip=True) if title_tag else "").strip() + if title: + info["title"] = title + + description = "" + desc_tag = soup.select_one(".series-description .description-text") + if desc_tag: + description = (desc_tag.get_text(" ", strip=True) or "").strip() + if not description: + meta_desc = soup.select_one("meta[property='og:description'], meta[name='description']") + if meta_desc: + description = (meta_desc.get("content") or "").strip() + if description: + info["plot"] = description + + poster = "" + poster_tag = soup.select_one( + ".show-cover-mobile img[data-src], .show-cover-mobile img[src], .col-3 img[data-src], .col-3 img[src]" + ) + if poster_tag: + poster = (poster_tag.get("data-src") or poster_tag.get("src") or "").strip() + if not poster: + for candidate in soup.select("img[data-src], img[src]"): + url = (candidate.get("data-src") or candidate.get("src") or "").strip() + if "/media/images/channel/" in url: + poster = url + break + if poster: + poster = _absolute_url(poster) + art["poster"] = poster + art["thumb"] = poster + + fanart = "" + fanart_tag = soup.select_one("meta[property='og:image']") + if fanart_tag: + fanart = (fanart_tag.get("content") or "").strip() + if fanart: + fanart = _absolute_url(fanart) + art["fanart"] = fanart + art["landscape"] = fanart + + return info, art + + def _get_base_url() -> str: base = get_setting_string(ADDON_ID, SETTING_BASE_URL, default=DEFAULT_BASE_URL).strip() if not base: @@ -342,37 +408,56 @@ def _get_soup(url: str, *, session: Optional[RequestsSession] = None) -> Beautif _ensure_requests() _log_visit(url) sess = session or get_requests_session("serienstream", headers=HEADERS) + response = None try: response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT) response.raise_for_status() except Exception as exc: _log_error(f"GET {url} failed: {exc}") raise - if response.url and response.url != url: - _log_url(response.url, kind="REDIRECT") - _log_response_html(url, response.text) - if _looks_like_cloudflare_challenge(response.text): - raise RuntimeError("Cloudflare-Schutz erkannt. requests reicht ggf. nicht aus.") - return BeautifulSoup(response.text, "html.parser") + try: + final_url = (response.url or url) if response is not None else url + body = (response.text or "") if response is not None else "" + if final_url != url: + _log_url(final_url, kind="REDIRECT") + _log_response_html(url, body) + if _looks_like_cloudflare_challenge(body): + raise RuntimeError("Cloudflare-Schutz erkannt. requests reicht ggf. nicht aus.") + return BeautifulSoup(body, "html.parser") + finally: + if response is not None: + try: + response.close() + except Exception: + pass def _get_html_simple(url: str) -> str: _ensure_requests() _log_visit(url) sess = get_requests_session("serienstream", headers=HEADERS) + response = None try: response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT) response.raise_for_status() except Exception as exc: _log_error(f"GET {url} failed: {exc}") raise - if response.url and response.url != url: - _log_url(response.url, kind="REDIRECT") - body = response.text - _log_response_html(url, body) - if _looks_like_cloudflare_challenge(body): - raise RuntimeError("Cloudflare-Schutz erkannt. requests reicht ggf. nicht aus.") - return body + try: + final_url = (response.url or url) if response is not None else url + body = (response.text or "") if response is not None else "" + if final_url != url: + _log_url(final_url, kind="REDIRECT") + _log_response_html(url, body) + if _looks_like_cloudflare_challenge(body): + raise RuntimeError("Cloudflare-Schutz erkannt. requests reicht ggf. nicht aus.") + return body + finally: + if response is not None: + try: + response.close() + except Exception: + pass def _get_soup_simple(url: str) -> BeautifulSoupT: @@ -400,20 +485,238 @@ def _extract_genre_names_from_html(body: str) -> List[str]: return names -def search_series(query: str) -> List[SeriesResult]: - """Sucht Serien im (/serien)-Katalog (Genre-liste) nach Titel/Alt-Titel.""" +def _strip_tags(value: str) -> str: + return re.sub(r"<[^>]+>", " ", value or "") + + +def _search_series_api(query: str) -> List[SeriesResult]: + query = (query or "").strip() + if not query: + return [] + _ensure_requests() + sess = get_requests_session("serienstream", headers=HEADERS) + terms = [query] + if " " in query: + # Fallback: einzelne Tokens liefern in der API oft bessere Treffer. + terms.extend([token for token in query.split() if token]) + seen_urls: set[str] = set() + for term in terms: + response = None + try: + response = sess.get( + f"{_get_base_url()}/api/search/suggest", + params={"term": term}, + headers=HEADERS, + timeout=SEARCH_TIMEOUT, + ) + response.raise_for_status() + except Exception: + continue + try: + payload = response.json() + except Exception: + continue + finally: + if response is not None: + try: + response.close() + except Exception: + pass + shows = payload.get("shows") if isinstance(payload, dict) else None + if not isinstance(shows, list): + continue + results: List[SeriesResult] = [] + for item in shows: + if not isinstance(item, dict): + continue + title = (item.get("name") or "").strip() + href = (item.get("url") or "").strip() + if not title or not href: + continue + url_abs = _absolute_url(href) + if not url_abs or url_abs in seen_urls: + continue + if "/staffel-" in url_abs or "/episode-" in url_abs: + continue + seen_urls.add(url_abs) + results.append(SeriesResult(title=title, description="", url=url_abs)) + if not results: + continue + filtered = [entry for entry in results if _matches_query(query, title=entry.title)] + if filtered: + return filtered + # Falls nur Token-Suche möglich war, zumindest die Ergebnisse liefern. + if term != query: + return results + return [] + + +def _search_series_server(query: str) -> List[SeriesResult]: + if not query: + return [] + api_results = _search_series_api(query) + if api_results: + return api_results + base = _get_base_url() + search_url = f"{base}/search?q={quote(query)}" + alt_url = f"{base}/suche?q={quote(query)}" + for url in (search_url, alt_url): + try: + body = _get_html_simple(url) + except Exception: + continue + if not body: + continue + soup = BeautifulSoup(body, "html.parser") + root = soup.select_one(".search-results-list") + if root is None: + continue + seen_urls: set[str] = set() + results: List[SeriesResult] = [] + for card in root.select(".cover-card"): + anchor = card.select_one("a[href*='/serie/']") + if not anchor: + continue + href = (anchor.get("href") or "").strip() + url_abs = _absolute_url(href) + if not url_abs or url_abs in seen_urls: + continue + if "/staffel-" in url_abs or "/episode-" in url_abs: + continue + title_tag = card.select_one(".show-title") or card.select_one("h3") or card.select_one("h4") + title = (title_tag.get_text(" ", strip=True) if title_tag else anchor.get_text(" ", strip=True)).strip() + if not title: + continue + seen_urls.add(url_abs) + results.append(SeriesResult(title=title, description="", url=url_abs)) + if results: + return results + return [] + + +def _extract_catalog_index_from_html(body: str, *, progress_callback: ProgressCallback = None) -> List[SeriesResult]: + items: List[SeriesResult] = [] + if not body: + return items + seen_urls: set[str] = set() + item_re = re.compile( + r"]*class=[\"'][^\"']*series-item[^\"']*[\"'][^>]*>(.*?)", + re.IGNORECASE | re.DOTALL, + ) + anchor_re = re.compile(r"]+href=[\"']([^\"']+)[\"'][^>]*>(.*?)", re.IGNORECASE | re.DOTALL) + data_search_re = re.compile(r"data-search=[\"']([^\"']*)[\"']", re.IGNORECASE) + for idx, match in enumerate(item_re.finditer(body), start=1): + if idx == 1 or idx % 200 == 0: + _emit_progress(progress_callback, f"Katalog parsen {idx}", 62) + block = match.group(0) + inner = match.group(1) or "" + anchor_match = anchor_re.search(inner) + if not anchor_match: + continue + href = (anchor_match.group(1) or "").strip() + url = _absolute_url(href) + if not url or "/serie/" not in url or "/staffel-" in url or "/episode-" in url: + continue + if url in seen_urls: + continue + seen_urls.add(url) + title_raw = anchor_match.group(2) or "" + title = unescape(re.sub(r"\s+", " ", _strip_tags(title_raw))).strip() + if not title: + continue + search_match = data_search_re.search(block) + description = (search_match.group(1) or "").strip() if search_match else "" + items.append(SeriesResult(title=title, description=description, url=url)) + return items + + +def _catalog_index_from_soup(soup: BeautifulSoupT) -> List[SeriesResult]: + items: List[SeriesResult] = [] + if not soup: + return items + seen_urls: set[str] = set() + for item in soup.select("li.series-item"): + anchor = item.find("a", href=True) + if not anchor: + continue + href = (anchor.get("href") or "").strip() + url = _absolute_url(href) + if not url or "/serie/" not in url or "/staffel-" in url or "/episode-" in url: + continue + if url in seen_urls: + continue + seen_urls.add(url) + title = (anchor.get_text(" ", strip=True) or "").strip() + if not title: + continue + description = (item.get("data-search") or "").strip() + items.append(SeriesResult(title=title, description=description, url=url)) + return items + + +def _load_catalog_index_from_cache() -> Optional[List[SeriesResult]]: + global _CATALOG_INDEX_MEMORY + expires_at, cached = _CATALOG_INDEX_MEMORY + if cached and expires_at > time.time(): + return list(cached) + raw = _session_cache_get(CATALOG_SEARCH_CACHE_KEY) + if not isinstance(raw, list): + return None + items: List[SeriesResult] = [] + for entry in raw: + if not isinstance(entry, list) or len(entry) < 2: + continue + title = str(entry[0] or "").strip() + url = str(entry[1] or "").strip() + description = str(entry[2] or "") if len(entry) > 2 else "" + if title and url: + items.append(SeriesResult(title=title, description=description, url=url)) + if items: + _CATALOG_INDEX_MEMORY = (time.time() + CATALOG_SEARCH_TTL_SECONDS, list(items)) + return items or None + + +def _store_catalog_index_in_cache(items: List[SeriesResult]) -> None: + global _CATALOG_INDEX_MEMORY + if not items: + return + _CATALOG_INDEX_MEMORY = (time.time() + CATALOG_SEARCH_TTL_SECONDS, list(items)) + payload: List[List[str]] = [] + for entry in items: + if not entry.title or not entry.url: + continue + payload.append([entry.title, entry.url, entry.description]) + _session_cache_set(CATALOG_SEARCH_CACHE_KEY, payload, ttl_seconds=CATALOG_SEARCH_TTL_SECONDS) + + +def search_series(query: str, *, progress_callback: ProgressCallback = None) -> List[SeriesResult]: + """Sucht Serien im (/serien)-Katalog nach Titel. Nutzt Cache + Ein-Pass-Filter.""" _ensure_requests() if not _normalize_search_text(query): return [] - # Direkter Abruf wie in fetch_serien.py. + _emit_progress(progress_callback, "Server-Suche", 15) + server_results = _search_series_server(query) + if server_results: + _emit_progress(progress_callback, f"Server-Treffer: {len(server_results)}", 35) + return [entry for entry in server_results if entry.title and _matches_query(query, title=entry.title)] + _emit_progress(progress_callback, "Pruefe Such-Cache", 42) + cached = _load_catalog_index_from_cache() + if cached is not None: + _emit_progress(progress_callback, f"Cache-Treffer: {len(cached)}", 52) + return [entry for entry in cached if entry.title and _matches_query(query, title=entry.title)] + + _emit_progress(progress_callback, "Lade Katalogseite", 58) catalog_url = f"{_get_base_url()}/serien?by=genre" - soup = _get_soup_simple(catalog_url) - results: List[SeriesResult] = [] - for series in parse_series_catalog(soup).values(): - for entry in series: - if entry.title and _matches_query(query, title=entry.title): - results.append(entry) - return results + body = _get_html_simple(catalog_url) + items = _extract_catalog_index_from_html(body, progress_callback=progress_callback) + if not items: + _emit_progress(progress_callback, "Fallback-Parser", 70) + soup = BeautifulSoup(body, "html.parser") + items = _catalog_index_from_soup(soup) + if items: + _store_catalog_index_in_cache(items) + _emit_progress(progress_callback, f"Filtere Treffer ({len(items)})", 85) + return [entry for entry in items if entry.title and _matches_query(query, title=entry.title)] def parse_series_catalog(soup: BeautifulSoupT) -> Dict[str, List[SeriesResult]]: @@ -731,15 +1034,23 @@ def resolve_redirect(target_url: str) -> Optional[str]: _get_soup(_get_base_url(), session=session) except Exception: pass - response = session.get( - normalized_url, - headers=HEADERS, - timeout=DEFAULT_TIMEOUT, - allow_redirects=True, - ) - if response.url: - _log_url(response.url, kind="RESOLVED") - return response.url if response.url else None + response = None + try: + response = session.get( + normalized_url, + headers=HEADERS, + timeout=DEFAULT_TIMEOUT, + allow_redirects=True, + ) + if response.url: + _log_url(response.url, kind="RESOLVED") + return response.url if response.url else None + finally: + if response is not None: + try: + response.close() + except Exception: + pass def scrape_series_detail( @@ -785,7 +1096,7 @@ class SerienstreamPlugin(BasisPlugin): name = "Serienstream" version = "1.0.0" - POPULAR_GENRE_LABEL = "⭐ Beliebte Serien" + POPULAR_GENRE_LABEL = "Haeufig gesehen" def __init__(self) -> None: self._series_results: Dict[str, SeriesResult] = {} @@ -805,6 +1116,7 @@ class SerienstreamPlugin(BasisPlugin): self._hoster_cache: Dict[Tuple[str, str, str], List[str]] = {} self._latest_cache: Dict[int, List[LatestEpisode]] = {} self._latest_hoster_cache: Dict[str, List[str]] = {} + self._series_metadata_cache: Dict[str, Tuple[Dict[str, str], Dict[str, str]]] = {} self.is_available = True self.unavailable_reason: Optional[str] = None if not self._requests_available: # pragma: no cover - optional dependency @@ -851,12 +1163,30 @@ class SerienstreamPlugin(BasisPlugin): cache_key = title.casefold() if self._title_url_cache.get(cache_key) != url: self._title_url_cache[cache_key] = url - self._save_title_url_cache() + self._save_title_url_cache() + if url: return current = self._series_results.get(title) if current is None: self._series_results[title] = SeriesResult(title=title, description=description, url="") + @staticmethod + def _metadata_cache_key(title: str) -> str: + return (title or "").strip().casefold() + + def _series_for_title(self, title: str) -> Optional[SeriesResult]: + direct = self._series_results.get(title) + if direct and direct.url: + return direct + lookup_key = (title or "").strip().casefold() + for item in self._series_results.values(): + if item.title.casefold().strip() == lookup_key and item.url: + return item + cached_url = self._title_url_cache.get(lookup_key, "") + if cached_url: + return SeriesResult(title=title, description="", url=cached_url) + return None + @staticmethod def _season_links_cache_name(series_url: str) -> str: digest = hashlib.sha1((series_url or "").encode("utf-8")).hexdigest()[:20] @@ -1274,7 +1604,28 @@ class SerienstreamPlugin(BasisPlugin): self._season_links_cache[title] = list(session_links) return list(session_links) try: - seasons = scrape_series_detail(series.url, load_episodes=False) + series_soup = _get_soup(series.url, session=get_requests_session("serienstream", headers=HEADERS)) + info_labels, art = _extract_series_metadata(series_soup) + if series.description and "plot" not in info_labels: + info_labels["plot"] = series.description + cache_key = self._metadata_cache_key(title) + if info_labels or art: + self._series_metadata_cache[cache_key] = (info_labels, art) + + base_series_url = _series_root_url(_extract_canonical_url(series_soup, series.url)) + season_links = _extract_season_links(series_soup) + season_count = _extract_number_of_seasons(series_soup) + if season_count and (not season_links or len(season_links) < season_count): + existing = {number for number, _ in season_links} + for number in range(1, season_count + 1): + if number in existing: + continue + season_url = f"{base_series_url}/staffel-{number}" + _log_parsed_url(season_url) + season_links.append((number, season_url)) + season_links.sort(key=lambda item: item[0]) + seasons = [SeasonInfo(number=number, url=url, episodes=[]) for number, url in season_links] + seasons.sort(key=lambda s: s.number) except Exception as exc: # pragma: no cover - defensive logging raise RuntimeError(f"Serienstream-Staffeln konnten nicht geladen werden: {exc}") from exc self._season_links_cache[title] = list(seasons) @@ -1288,6 +1639,41 @@ class SerienstreamPlugin(BasisPlugin): return self._remember_series_result(title, series_url) + def metadata_for(self, title: str) -> Tuple[Dict[str, str], Dict[str, str], Optional[List[Any]]]: + title = (title or "").strip() + if not title or not self._requests_available: + return {}, {}, None + + cache_key = self._metadata_cache_key(title) + cached = self._series_metadata_cache.get(cache_key) + if cached is not None: + info, art = cached + return dict(info), dict(art), None + + series = self._series_for_title(title) + if series is None or not series.url: + info = {"title": title} + self._series_metadata_cache[cache_key] = (dict(info), {}) + return info, {}, None + + info: Dict[str, str] = {"title": title} + art: Dict[str, str] = {} + if series.description: + info["plot"] = series.description + + try: + soup = _get_soup(series.url, session=get_requests_session("serienstream", headers=HEADERS)) + parsed_info, parsed_art = _extract_series_metadata(soup) + if parsed_info: + info.update(parsed_info) + if parsed_art: + art.update(parsed_art) + except Exception: + pass + + self._series_metadata_cache[cache_key] = (dict(info), dict(art)) + return info, art, None + def series_url_for_title(self, title: str) -> str: title = (title or "").strip() if not title: @@ -1348,7 +1734,7 @@ class SerienstreamPlugin(BasisPlugin): return self._episode_label_cache.get(cache_key, {}).get(episode_label) return None - async def search_titles(self, query: str) -> List[str]: + async def search_titles(self, query: str, progress_callback: ProgressCallback = None) -> List[str]: query = query.strip() if not query: self._series_results.clear() @@ -1362,7 +1748,8 @@ class SerienstreamPlugin(BasisPlugin): try: # Nutzt den Katalog (/serien), der jetzt nach Genres gruppiert ist. # Alternativ gäbe es ein Ajax-Endpoint, aber der ist nicht immer zuverlässig erreichbar. - results = search_series(query) + _emit_progress(progress_callback, "Serienstream Suche startet", 10) + results = search_series(query, progress_callback=progress_callback) except Exception as exc: # pragma: no cover - defensive logging self._series_results.clear() self._season_cache.clear() @@ -1375,6 +1762,7 @@ class SerienstreamPlugin(BasisPlugin): self._season_cache.clear() self._season_links_cache.clear() self._episode_label_cache.clear() + _emit_progress(progress_callback, f"Treffer aufbereitet: {len(results)}", 95) return [result.title for result in results] def _ensure_seasons(self, title: str) -> List[SeasonInfo]: @@ -1443,6 +1831,18 @@ class SerienstreamPlugin(BasisPlugin): except Exception as exc: # pragma: no cover - defensive logging raise RuntimeError(f"Stream-Link konnte nicht geladen werden: {exc}") from exc + def episode_url_for(self, title: str, season: str, episode: str) -> str: + cache_key = (title, season) + cached = self._episode_label_cache.get(cache_key) + if cached: + info = cached.get(episode) + if info and info.url: + return info.url + episode_info = self._lookup_episode(title, season, episode) + if episode_info and episode_info.url: + return episode_info.url + return "" + def available_hosters_for(self, title: str, season: str, episode: str) -> List[str]: if not self._requests_available: raise RuntimeError("SerienstreamPlugin kann ohne requests/bs4 keine Hoster laden.") diff --git a/addon/plugins/topstreamfilm_plugin.py b/addon/plugins/topstreamfilm_plugin.py index 97c9e4b..ab71fd6 100644 --- a/addon/plugins/topstreamfilm_plugin.py +++ b/addon/plugins/topstreamfilm_plugin.py @@ -19,8 +19,8 @@ import hashlib import os import re import json -from typing import TYPE_CHECKING, Any, Dict, List, Optional, TypeAlias -from urllib.parse import urlencode, urljoin +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional +from urllib.parse import urljoin try: # pragma: no cover - optional dependency import requests @@ -51,13 +51,13 @@ if TYPE_CHECKING: # pragma: no cover from requests import Session as RequestsSession from bs4 import BeautifulSoup as BeautifulSoupT # type: ignore[import-not-found] else: # pragma: no cover - RequestsSession: TypeAlias = Any - BeautifulSoupT: TypeAlias = Any + RequestsSession = Any + BeautifulSoupT = Any ADDON_ID = "plugin.video.viewit" SETTING_BASE_URL = "topstream_base_url" -DEFAULT_BASE_URL = "https://www.meineseite" +DEFAULT_BASE_URL = "https://topstreamfilm.live" GLOBAL_SETTING_LOG_URLS = "debug_log_urls" GLOBAL_SETTING_DUMP_HTML = "debug_dump_html" GLOBAL_SETTING_SHOW_URL_INFO = "debug_show_url_info" @@ -78,6 +78,16 @@ HEADERS = { "Accept-Language": "de-DE,de;q=0.9,en;q=0.8", "Connection": "keep-alive", } +ProgressCallback = Optional[Callable[[str, Optional[int]], Any]] + + +def _emit_progress(callback: ProgressCallback, message: str, percent: Optional[int] = None) -> None: + if not callable(callback): + return + try: + callback(str(message or ""), None if percent is None else int(percent)) + except Exception: + return @dataclass(frozen=True) @@ -87,6 +97,7 @@ class SearchHit: title: str url: str description: str = "" + poster: str = "" def _normalize_search_text(value: str) -> str: @@ -139,6 +150,7 @@ class TopstreamfilmPlugin(BasisPlugin): self._season_to_episode_numbers: Dict[tuple[str, str], List[int]] = {} self._episode_title_by_number: Dict[tuple[str, int, int], str] = {} self._detail_html_cache: Dict[str, str] = {} + self._title_meta: Dict[str, tuple[str, str]] = {} self._popular_cache: List[str] | None = None self._default_preferred_hosters: List[str] = list(DEFAULT_PREFERRED_HOSTERS) self._preferred_hosters: List[str] = list(self._default_preferred_hosters) @@ -419,6 +431,7 @@ class TopstreamfilmPlugin(BasisPlugin): continue seen.add(hit.title) self._title_to_url[hit.title] = hit.url + self._store_title_meta(hit.title, plot=hit.description, poster=hit.poster) titles.append(hit.title) if titles: self._save_title_url_cache() @@ -477,6 +490,69 @@ class TopstreamfilmPlugin(BasisPlugin): except Exception: return "" + def _pick_image_from_node(self, node: Any) -> str: + if node is None: + return "" + image = node.select_one("img") + if image is None: + return "" + for attr in ("data-src", "src"): + value = (image.get(attr) or "").strip() + if value and "lazy_placeholder" not in value.casefold(): + return self._absolute_external_url(value, base=self._get_base_url()) + srcset = (image.get("data-srcset") or image.get("srcset") or "").strip() + if srcset: + first = srcset.split(",")[0].strip().split(" ", 1)[0].strip() + if first: + return self._absolute_external_url(first, base=self._get_base_url()) + return "" + + def _store_title_meta(self, title: str, *, plot: str = "", poster: str = "") -> None: + title = (title or "").strip() + if not title: + return + old_plot, old_poster = self._title_meta.get(title, ("", "")) + merged_plot = (plot or old_plot or "").strip() + merged_poster = (poster or old_poster or "").strip() + self._title_meta[title] = (merged_plot, merged_poster) + + def _extract_detail_metadata(self, soup: BeautifulSoupT) -> tuple[str, str]: + if not soup: + return "", "" + plot = "" + poster = "" + for selector in ("meta[property='og:description']", "meta[name='description']"): + node = soup.select_one(selector) + if node is None: + continue + content = (node.get("content") or "").strip() + if content: + plot = content + break + if not plot: + candidates: list[str] = [] + for paragraph in soup.select("article p, .TPost p, .Description p, .entry-content p"): + text = (paragraph.get_text(" ", strip=True) or "").strip() + if len(text) >= 60: + candidates.append(text) + if candidates: + plot = max(candidates, key=len) + + for selector in ("meta[property='og:image']", "meta[name='twitter:image']"): + node = soup.select_one(selector) + if node is None: + continue + content = (node.get("content") or "").strip() + if content: + poster = self._absolute_external_url(content, base=self._get_base_url()) + break + if not poster: + for selector in ("article", ".TPost", ".entry-content"): + poster = self._pick_image_from_node(soup.select_one(selector)) + if poster: + break + return plot, poster + def _clear_stream_index_for_title(self, title: str) -> None: for key in list(self._season_to_episode_numbers.keys()): if key[0] == title: @@ -584,15 +660,25 @@ class TopstreamfilmPlugin(BasisPlugin): session = self._get_session() self._log_url(url, kind="VISIT") self._notify_url(url) + response = None try: response = session.get(url, timeout=DEFAULT_TIMEOUT) response.raise_for_status() except Exception as exc: self._log_error(f"GET {url} failed: {exc}") raise - self._log_url(response.url, kind="OK") - self._log_response_html(response.url, response.text) - return BeautifulSoup(response.text, "html.parser") + try: + final_url = (response.url or url) if response is not None else url + body = (response.text or "") if response is not None else "" + self._log_url(final_url, kind="OK") + self._log_response_html(final_url, body) + return BeautifulSoup(body, "html.parser") + finally: + if response is not None: + try: + response.close() + except Exception: + pass def _get_detail_soup(self, title: str) -> Optional[BeautifulSoupT]: title = (title or "").strip() @@ -701,7 +787,17 @@ class TopstreamfilmPlugin(BasisPlugin): continue if is_movie_hint: self._movie_title_hint.add(title) - hits.append(SearchHit(title=title, url=self._absolute_url(href), description="")) + description_tag = item.select_one(".TPMvCn .Description, .Description, .entry-summary") + description = (description_tag.get_text(" ", strip=True) or "").strip() if description_tag else "" + poster = self._pick_image_from_node(item) + hits.append( + SearchHit( + title=title, + url=self._absolute_url(href), + description=description, + poster=poster, + ) + ) return hits def is_movie(self, title: str) -> bool: @@ -774,6 +870,7 @@ class TopstreamfilmPlugin(BasisPlugin): continue seen.add(hit.title) self._title_to_url[hit.title] = hit.url + self._store_title_meta(hit.title, plot=hit.description, poster=hit.poster) titles.append(hit.title) if titles: self._save_title_url_cache() @@ -814,7 +911,7 @@ class TopstreamfilmPlugin(BasisPlugin): # Sonst: Serie via Streams-Accordion parsen (falls vorhanden). self._parse_stream_accordion(soup, title=title) - async def search_titles(self, query: str) -> List[str]: + async def search_titles(self, query: str, progress_callback: ProgressCallback = None) -> List[str]: """Sucht Titel ueber eine HTML-Suche. Erwartetes HTML (Snippet): @@ -827,6 +924,7 @@ class TopstreamfilmPlugin(BasisPlugin): query = (query or "").strip() if not query: return [] + _emit_progress(progress_callback, "Topstreamfilm Suche", 15) session = self._get_session() url = self._get_base_url() + "/" @@ -834,6 +932,7 @@ class TopstreamfilmPlugin(BasisPlugin): request_url = f"{url}?{urlencode(params)}" self._log_url(request_url, kind="GET") self._notify_url(request_url) + response = None try: response = session.get( url, @@ -844,15 +943,28 @@ class TopstreamfilmPlugin(BasisPlugin): except Exception as exc: self._log_error(f"GET {request_url} failed: {exc}") raise - self._log_url(response.url, kind="OK") - self._log_response_html(response.url, response.text) + try: + final_url = (response.url or request_url) if response is not None else request_url + body = (response.text or "") if response is not None else "" + self._log_url(final_url, kind="OK") + self._log_response_html(final_url, body) - if BeautifulSoup is None: - return [] - soup = BeautifulSoup(response.text, "html.parser") + if BeautifulSoup is None: + return [] + soup = BeautifulSoup(body, "html.parser") + finally: + if response is not None: + try: + response.close() + except Exception: + pass hits: List[SearchHit] = [] - for item in soup.select("li.TPostMv"): + items = soup.select("li.TPostMv") + total_items = max(1, len(items)) + for idx, item in enumerate(items, start=1): + if idx == 1 or idx % 20 == 0: + _emit_progress(progress_callback, f"Treffer pruefen {idx}/{total_items}", 55) anchor = item.select_one("a[href]") if not anchor: continue @@ -870,7 +982,8 @@ class TopstreamfilmPlugin(BasisPlugin): self._movie_title_hint.add(title) description_tag = item.select_one(".TPMvCn .Description") description = description_tag.get_text(" ", strip=True) if description_tag else "" - hit = SearchHit(title=title, url=self._absolute_url(href), description=description) + poster = self._pick_image_from_node(item) + hit = SearchHit(title=title, url=self._absolute_url(href), description=description, poster=poster) if _matches_query(query, title=hit.title, description=hit.description): hits.append(hit) @@ -883,10 +996,41 @@ class TopstreamfilmPlugin(BasisPlugin): continue seen.add(hit.title) self._title_to_url[hit.title] = hit.url + self._store_title_meta(hit.title, plot=hit.description, poster=hit.poster) titles.append(hit.title) self._save_title_url_cache() + _emit_progress(progress_callback, f"Fertig: {len(titles)} Treffer", 95) return titles + def metadata_for(self, title: str) -> tuple[dict[str, str], dict[str, str], list[object] | None]: + title = (title or "").strip() + if not title: + return {}, {}, None + + info: dict[str, str] = {"title": title} + art: dict[str, str] = {} + + cached_plot, cached_poster = self._title_meta.get(title, ("", "")) + if cached_plot: + info["plot"] = cached_plot + if cached_poster: + art = {"thumb": cached_poster, "poster": cached_poster} + + if "plot" in info and art: + return info, art, None + + soup = self._get_detail_soup(title) + if soup is None: + return info, art, None + + plot, poster = self._extract_detail_metadata(soup) + if plot: + info["plot"] = plot + if poster: + art = {"thumb": poster, "poster": poster} + self._store_title_meta(title, plot=plot, poster=poster) + return info, art, None + def genres(self) -> List[str]: if not REQUESTS_AVAILABLE or BeautifulSoup is None: return [] diff --git a/addon/resources/settings.xml b/addon/resources/settings.xml index d9f7c76..1092fac 100644 --- a/addon/resources/settings.xml +++ b/addon/resources/settings.xml @@ -1,79 +1,90 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - + + + - + + - + + - + + - + + - + + - - - - + + + + - - - + + + - - - + + + - - - - - - - - - - + + + + + + + + + + + + + + + diff --git a/addon/tmdb.py b/addon/tmdb.py index 830e770..7ae2d15 100644 --- a/addon/tmdb.py +++ b/addon/tmdb.py @@ -14,6 +14,7 @@ except ImportError: # pragma: no cover TMDB_API_BASE = "https://api.themoviedb.org/3" TMDB_IMAGE_BASE = "https://image.tmdb.org/t/p" +MAX_CAST_MEMBERS = 30 _TMDB_THREAD_LOCAL = threading.local() @@ -73,53 +74,17 @@ def _fetch_credits( return [] params = {"api_key": api_key, "language": (language or "de-DE").strip()} url = f"{TMDB_API_BASE}/{kind}/{tmdb_id}/credits?{urlencode(params)}" - if callable(log): - log(f"TMDB GET {url}") - try: - response = requests.get(url, timeout=timeout) - except Exception as exc: # pragma: no cover - if callable(log): - log(f"TMDB ERROR /{kind}/{{id}}/credits request_failed error={exc!r}") - return [] - status = getattr(response, "status_code", None) + status, payload, body_text = _tmdb_get_json(url=url, timeout=timeout, log=log, log_responses=log_responses) if callable(log): log(f"TMDB RESPONSE /{kind}/{{id}}/credits status={status}") - if status != 200: + if log_responses and payload is None and body_text: + log(f"TMDB RESPONSE_BODY /{kind}/{{id}}/credits body={body_text[:2000]}") + if status != 200 or not isinstance(payload, dict): return [] - try: - payload = response.json() or {} - except Exception: - return [] - if callable(log) and log_responses: - try: - dumped = json.dumps(payload, ensure_ascii=False) - except Exception: - dumped = str(payload) - log(f"TMDB RESPONSE_BODY /{kind}/{{id}}/credits body={dumped[:2000]}") - cast_payload = payload.get("cast") or [] if callable(log): log(f"TMDB CREDITS /{kind}/{{id}}/credits cast={len(cast_payload)}") - with_images: List[TmdbCastMember] = [] - without_images: List[TmdbCastMember] = [] - for entry in cast_payload: - name = (entry.get("name") or "").strip() - role = (entry.get("character") or "").strip() - thumb = _image_url(entry.get("profile_path") or "", size="w185") - if not name: - continue - member = TmdbCastMember(name=name, role=role, thumb=thumb) - if thumb: - with_images.append(member) - else: - without_images.append(member) - - # Viele Kodi-Skins zeigen bei fehlendem Thumbnail Platzhalter-Köpfe. - # Bevorzugt daher Cast-Einträge mit Bild; nur wenn gar keine Bilder existieren, - # geben wir Namen ohne Bild zurück. - if with_images: - return with_images[:30] - return without_images[:30] + return _parse_cast_payload(cast_payload) def _parse_cast_payload(cast_payload: object) -> List[TmdbCastMember]: @@ -141,8 +106,8 @@ def _parse_cast_payload(cast_payload: object) -> List[TmdbCastMember]: else: without_images.append(member) if with_images: - return with_images[:30] - return without_images[:30] + return with_images[:MAX_CAST_MEMBERS] + return without_images[:MAX_CAST_MEMBERS] def _tmdb_get_json( @@ -163,23 +128,29 @@ def _tmdb_get_json( if callable(log): log(f"TMDB GET {url}") sess = session or _get_tmdb_session() or requests.Session() + response = None try: response = sess.get(url, timeout=timeout) + status = getattr(response, "status_code", None) + payload: object | None = None + body_text = "" + try: + payload = response.json() + except Exception: + try: + body_text = (response.text or "").strip() + except Exception: + body_text = "" except Exception as exc: # pragma: no cover if callable(log): log(f"TMDB ERROR request_failed url={url} error={exc!r}") return None, None, "" - - status = getattr(response, "status_code", None) - payload: object | None = None - body_text = "" - try: - payload = response.json() - except Exception: - try: - body_text = (response.text or "").strip() - except Exception: - body_text = "" + finally: + if response is not None: + try: + response.close() + except Exception: + pass if callable(log): log(f"TMDB RESPONSE status={status} url={url}") @@ -214,49 +185,17 @@ def fetch_tv_episode_credits( return [] params = {"api_key": api_key, "language": (language or "de-DE").strip()} url = f"{TMDB_API_BASE}/tv/{tmdb_id}/season/{season_number}/episode/{episode_number}/credits?{urlencode(params)}" - if callable(log): - log(f"TMDB GET {url}") - try: - response = requests.get(url, timeout=timeout) - except Exception as exc: # pragma: no cover - if callable(log): - log(f"TMDB ERROR /tv/{{id}}/season/{{n}}/episode/{{e}}/credits request_failed error={exc!r}") - return [] - status = getattr(response, "status_code", None) + status, payload, body_text = _tmdb_get_json(url=url, timeout=timeout, log=log, log_responses=log_responses) if callable(log): log(f"TMDB RESPONSE /tv/{{id}}/season/{{n}}/episode/{{e}}/credits status={status}") - if status != 200: + if log_responses and payload is None and body_text: + log(f"TMDB RESPONSE_BODY /tv/{{id}}/season/{{n}}/episode/{{e}}/credits body={body_text[:2000]}") + if status != 200 or not isinstance(payload, dict): return [] - try: - payload = response.json() or {} - except Exception: - return [] - if callable(log) and log_responses: - try: - dumped = json.dumps(payload, ensure_ascii=False) - except Exception: - dumped = str(payload) - log(f"TMDB RESPONSE_BODY /tv/{{id}}/season/{{n}}/episode/{{e}}/credits body={dumped[:2000]}") - cast_payload = payload.get("cast") or [] if callable(log): log(f"TMDB CREDITS /tv/{{id}}/season/{{n}}/episode/{{e}}/credits cast={len(cast_payload)}") - with_images: List[TmdbCastMember] = [] - without_images: List[TmdbCastMember] = [] - for entry in cast_payload: - name = (entry.get("name") or "").strip() - role = (entry.get("character") or "").strip() - thumb = _image_url(entry.get("profile_path") or "", size="w185") - if not name: - continue - member = TmdbCastMember(name=name, role=role, thumb=thumb) - if thumb: - with_images.append(member) - else: - without_images.append(member) - if with_images: - return with_images[:30] - return without_images[:30] + return _parse_cast_payload(cast_payload) def lookup_tv_show( @@ -546,27 +485,13 @@ def lookup_tv_season_summary( params = {"api_key": api_key, "language": (language or "de-DE").strip()} url = f"{TMDB_API_BASE}/tv/{tmdb_id}/season/{season_number}?{urlencode(params)}" - if callable(log): - log(f"TMDB GET {url}") - try: - response = requests.get(url, timeout=timeout) - except Exception: - return None - status = getattr(response, "status_code", None) + status, payload, body_text = _tmdb_get_json(url=url, timeout=timeout, log=log, log_responses=log_responses) if callable(log): log(f"TMDB RESPONSE /tv/{{id}}/season/{{n}} status={status}") - if status != 200: + if log_responses and payload is None and body_text: + log(f"TMDB RESPONSE_BODY /tv/{{id}}/season/{{n}} body={body_text[:2000]}") + if status != 200 or not isinstance(payload, dict): return None - try: - payload = response.json() or {} - except Exception: - return None - if callable(log) and log_responses: - try: - dumped = json.dumps(payload, ensure_ascii=False) - except Exception: - dumped = str(payload) - log(f"TMDB RESPONSE_BODY /tv/{{id}}/season/{{n}} body={dumped[:2000]}") plot = (payload.get("overview") or "").strip() poster_path = (payload.get("poster_path") or "").strip() @@ -594,27 +519,9 @@ def lookup_tv_season( return None params = {"api_key": api_key, "language": (language or "de-DE").strip()} url = f"{TMDB_API_BASE}/tv/{tmdb_id}/season/{season_number}?{urlencode(params)}" - if callable(log): - log(f"TMDB GET {url}") - try: - response = requests.get(url, timeout=timeout) - except Exception as exc: # pragma: no cover - if callable(log): - log(f"TMDB ERROR /tv/{{id}}/season/{{n}} request_failed error={exc!r}") - return None - - status = getattr(response, "status_code", None) - payload = None - body_text = "" - try: - payload = response.json() or {} - except Exception: - try: - body_text = (response.text or "").strip() - except Exception: - body_text = "" - - episodes = (payload or {}).get("episodes") or [] + status, payload, body_text = _tmdb_get_json(url=url, timeout=timeout, log=log, log_responses=log_responses) + episodes = (payload or {}).get("episodes") if isinstance(payload, dict) else [] + episodes = episodes or [] if callable(log): log(f"TMDB RESPONSE /tv/{{id}}/season/{{n}} status={status} episodes={len(episodes)}") if log_responses: diff --git a/docs/DEFAULT_ROUTER.md b/docs/DEFAULT_ROUTER.md index 61a2aed..4503d58 100644 --- a/docs/DEFAULT_ROUTER.md +++ b/docs/DEFAULT_ROUTER.md @@ -1,54 +1,49 @@ -# ViewIT – Hauptlogik (`addon/default.py`) +# ViewIT Hauptlogik (`addon/default.py`) -Dieses Dokument beschreibt den Einstiegspunkt des Addons und die zentrale Steuerlogik. +Diese Datei ist der Router des Addons. +Sie verbindet Kodi UI, Plugin Calls und Playback. -## Aufgabe der Datei -`addon/default.py` ist der Router des Addons. Er: -- lädt die Plugin‑Module dynamisch, -- stellt die Kodi‑Navigation bereit, -- übersetzt UI‑Aktionen in Plugin‑Aufrufe, -- startet die Wiedergabe und verwaltet Playstate/Resume. +## Kernaufgabe +- Plugins laden +- Menues bauen +- Aktionen auf Plugin Methoden mappen +- Playback starten +- Playstate speichern -## Ablauf (high level) -1. **Plugin‑Discovery**: Lädt alle `addon/plugins/*.py` (ohne `_`‑Prefix) und instanziiert Klassen, die von `BasisPlugin` erben. -2. **Navigation**: Baut Kodi‑Listen (Serien/Staffeln/Episoden) auf Basis der Plugin‑Antworten. -3. **Playback**: Holt Stream‑Links aus dem Plugin und startet die Wiedergabe. -4. **Playstate**: Speichert Resume‑Daten lokal (`playstate.json`) und setzt `playcount`/Resume‑Infos. +## Ablauf +1. Plugin Discovery fuer `addon/plugins/*.py` ohne `_` Prefix. +2. Navigation fuer Titel, Staffeln und Episoden. +3. Playback: Link holen, optional aufloesen, abspielen. +4. Playstate: watched und resume in `playstate.json` schreiben. -## Routing & Aktionen -Die Datei arbeitet mit URL‑Parametern (Kodi‑Plugin‑Standard). Typische Aktionen: -- `search` → Suche über ein Plugin -- `seasons` → Staffeln für einen Titel -- `episodes` → Episoden für eine Staffel -- `play` → Stream‑Link auflösen und abspielen +## Routing +Der Router liest Query Parameter aus `sys.argv[2]`. +Typische Aktionen: +- `search` +- `seasons` +- `episodes` +- `play_episode` +- `play_movie` +- `play_episode_url` -Die genaue Aktion wird aus den Query‑Parametern gelesen und an das entsprechende Plugin delegiert. +## Playstate +- Speicherort: Addon Profilordner, Datei `playstate.json` +- Key: Plugin + Titel + Staffel + Episode +- Werte: watched, playcount, resume_position, resume_total -## Playstate (Resume/Watched) -- **Speicherort**: `playstate.json` im Addon‑Profilordner. -- **Key**: Kombination aus Plugin‑Name, Titel, Staffel, Episode. -- **Verwendung**: - - `playcount` wird gesetzt, wenn „gesehen“ markiert ist. - - `resume_position`/`resume_total` werden gesetzt, wenn vorhanden. +## Wichtige Helper +- Plugin Loader und Discovery +- UI Builder fuer ListItems +- Playstate Load/Save/Merge +- TMDB Merge mit Source Fallback -## Wichtige Hilfsfunktionen -- **Plugin‑Loader**: findet & instanziiert Plugins. -- **UI‑Helper**: setzt Content‑Type, baut Verzeichnisseinträge. -- **Playstate‑Helper**: `_load_playstate`, `_save_playstate`, `_apply_playstate_to_info`. +## Fehlerverhalten +- Importfehler pro Plugin werden isoliert behandelt. +- Fehler in einem Plugin sollen das Addon nicht stoppen. +- User bekommt kurze Fehlermeldungen in Kodi. -## Fehlerbehandlung -- Plugin‑Importfehler werden isoliert behandelt, damit das Addon nicht komplett ausfällt. -- Netzwerk‑Fehler werden in Plugins abgefangen, `default.py` sollte nur saubere Fehlermeldungen weitergeben. - -## Debugging -- Globale Debug‑Settings werden über `addon/resources/settings.xml` gesteuert. -- Plugins loggen URLs/HTML optional (siehe jeweilige Plugin‑Doku). - -## Änderungen & Erweiterungen -Für neue Aktionen: -1. Neue Aktion im Router registrieren. -2. UI‑Einträge passend anlegen. -3. Entsprechende Plugin‑Methode definieren oder erweitern. - -## Hinweis zur Erstellung -Teile dieser Dokumentation wurden KI‑gestützt erstellt und bei Bedarf manuell angepasst. +## Erweiterung +Fuer neue Aktion im Router: +1. Action im `run()` Handler registrieren. +2. ListItem mit passenden Parametern bauen. +3. Zielmethode im Plugin bereitstellen. diff --git a/docs/PLUGIN_DEVELOPMENT.md b/docs/PLUGIN_DEVELOPMENT.md index 84e8c96..b167134 100644 --- a/docs/PLUGIN_DEVELOPMENT.md +++ b/docs/PLUGIN_DEVELOPMENT.md @@ -1,109 +1,85 @@ -# ViewIT – Entwicklerdoku Plugins (`addon/plugins/*_plugin.py`) +# ViewIT Plugin Entwicklung (`addon/plugins/*_plugin.py`) -Diese Doku beschreibt, wie Plugins im ViewIT‑Addon aufgebaut sind und wie neue Provider‑Integrationen entwickelt werden. +Diese Datei zeigt, wie Plugins im Projekt aufgebaut sind und wie sie mit dem Router zusammenarbeiten. ## Grundlagen -- Jedes Plugin ist eine einzelne Datei unter `addon/plugins/`. -- Dateinamen **ohne** `_`‑Prefix werden automatisch geladen. -- Jede Datei enthält eine Klasse, die von `BasisPlugin` erbt. +- Ein Plugin ist eine Python Datei in `addon/plugins/`. +- Dateien mit `_` Prefix werden nicht geladen. +- Plugin Klasse erbt von `BasisPlugin`. +- Optional: `Plugin = ` als klarer Einstiegspunkt. -## Pflicht‑Methoden (BasisPlugin) -Jedes Plugin muss diese Methoden implementieren: +## Pflichtmethoden +Jedes Plugin implementiert: - `async search_titles(query: str) -> list[str]` - `seasons_for(title: str) -> list[str]` - `episodes_for(title: str, season: str) -> list[str]` -## Vertrag Plugin ↔ Hauptlogik (`default.py`) -Die Hauptlogik ruft Plugin-Methoden auf und verarbeitet ausschließlich deren Rückgaben. +## Wichtige optionale Methoden +- `stream_link_for(...)` +- `resolve_stream_link(...)` +- `metadata_for(...)` +- `available_hosters_for(...)` +- `series_url_for_title(...)` +- `remember_series_url(...)` +- `episode_url_for(...)` +- `available_hosters_for_url(...)` +- `stream_link_for_url(...)` -Wesentliche Rückgaben an die Hauptlogik: -- `search_titles(...)` → Liste von Titel-Strings für die Trefferliste -- `seasons_for(...)` → Liste von Staffel-Labels -- `episodes_for(...)` → Liste von Episoden-Labels -- `stream_link_for(...)` → Hoster-/Player-Link (nicht zwingend finale Media-URL) -- `resolve_stream_link(...)` → finale/spielbare URL nach Redirect/Resolver -- Optional `available_hosters_for(...)` → auswählbare Hoster-Namen im Dialog -- Optional `series_url_for_title(...)` → stabile Detail-URL pro Titel für Folgeaufrufe -- Optional `remember_series_url(...)` → Übernahme einer bereits bekannten Detail-URL +## Film Provider Standard +Wenn keine echten Staffeln existieren: +- `seasons_for(title)` gibt `['Film']` +- `episodes_for(title, 'Film')` gibt `['Stream']` -Standard für Film-Provider (ohne echte Staffeln): -- `seasons_for(title)` gibt `["Film"]` zurück -- `episodes_for(title, "Film")` gibt `["Stream"]` zurück +## Capabilities +Ein Plugin kann Features melden ueber `capabilities()`. +Bekannte Werte: +- `popular_series` +- `genres` +- `latest_episodes` +- `new_titles` +- `alpha` +- `series_catalog` -## Optionale Features (Capabilities) -Über `capabilities()` kann das Plugin zusätzliche Funktionen anbieten: -- `popular_series` → `popular_series()` -- `genres` → `genres()` + `titles_for_genre(genre)` -- `latest_episodes` → `latest_episodes(page=1)` +## Suche +Aktuelle Regeln fuer Suchtreffer: +- Match auf Titel +- Wortbasiert +- Keine Teilwort Treffer im selben Wort +- Beschreibungen nicht fuer Match nutzen -## Empfohlene Struktur -- Konstanten für URLs/Endpoints (BASE_URL, Pfade, Templates) -- `requests` + `bs4` optional (fehlt beides, Plugin sollte sauber deaktivieren) -- Helper‑Funktionen für Parsing und Normalisierung -- Caches für Such‑, Staffel‑ und Episoden‑Daten +## Settings +Pro Plugin meist `*_base_url`. +Beispiele: +- `serienstream_base_url` +- `aniworld_base_url` +- `einschalten_base_url` +- `topstream_base_url` +- `filmpalast_base_url` +- `doku_streams_base_url` -## Suche (aktuelle Policy) -- **Nur Titel‑Matches** -- **Wortbasierter Match** nach Normalisierung (Lowercase + Nicht‑Alnum → Leerzeichen) -- Keine Teilwort-Treffer innerhalb eines Wortes (Beispiel: `hund` matcht nicht `thunder`) -- Keine Beschreibung/Plot/Meta für Matches +## Playback Flow +1. Episode oder Film auswaehlen. +2. Optional Hosterliste anzeigen. +3. `stream_link_for` oder `stream_link_for_url` aufrufen. +4. `resolve_stream_link` aufrufen. +5. Finale URL an Kodi geben. -## Namensgebung -- Plugin‑Klassenname: `XxxPlugin` -- Anzeigename (Property `name`): **mit Großbuchstaben beginnen** (z. B. `Serienstream`, `Einschalten`) - -## Settings pro Plugin -Standard: `*_base_url` (Domain / BASE_URL) -- Beispiele: - - `serienstream_base_url` - - `aniworld_base_url` - - `einschalten_base_url` - - `topstream_base_url` - - `filmpalast_base_url` - -## Playback -- `stream_link_for(...)` implementieren (liefert bevorzugten Hoster-Link). -- `available_hosters_for(...)` bereitstellen, wenn die Seite mehrere Hoster anbietet. -- `resolve_stream_link(...)` nach einheitlichem Flow umsetzen: - 1. Redirects auflösen (falls vorhanden) - 2. ResolveURL (`resolveurl_backend.resolve`) versuchen - 3. Bei Fehlschlag auf den besten verfügbaren Link zurückfallen -- Optional `set_preferred_hosters(...)` unterstützen, damit die Hoster-Auswahl aus der Hauptlogik direkt greift. - -## Standard‑Flow (empfohlen) -1. **Suche**: nur Titel liefern und Titel→Detail-URL mappen. -2. **Navigation**: `series_url_for_title`/`remember_series_url` unterstützen, damit URLs zwischen Aufrufen stabil bleiben. -3. **Auswahl Hoster**: Hoster-Namen aus der Detailseite extrahieren und anbieten. -4. **Playback**: Hoster-Link liefern, danach konsistent über `resolve_stream_link` finalisieren. -5. **Fallbacks**: bei Layout-Unterschieden defensiv parsen und Logging aktivierbar halten. - -## Debugging -Global gesteuert über Settings: -- `debug_log_urls` -- `debug_dump_html` -- `debug_show_url_info` - -Plugins sollten die Helper aus `addon/plugin_helpers.py` nutzen: +## Logging +Nutze Helper aus `addon/plugin_helpers.py`: - `log_url(...)` - `dump_response_html(...)` - `notify_url(...)` -## Template -`addon/plugins/_template_plugin.py` dient als Startpunkt für neue Provider. +## Build und Checks +- ZIP: `./scripts/build_kodi_zip.sh` +- Addon Ordner: `./scripts/build_install_addon.sh` +- Manifest: `python3 scripts/generate_plugin_manifest.py` +- Snapshot Checks: `python3 qa/run_plugin_snapshots.py` -## Build & Test -- ZIP bauen: `./scripts/build_kodi_zip.sh` -- Addon‑Ordner: `./scripts/build_install_addon.sh` - -## Beispiel‑Checkliste -- [ ] `name` korrekt gesetzt -- [ ] `*_base_url` in Settings vorhanden -- [ ] Suche matcht nur Titel und wortbasiert -- [ ] `stream_link_for` + `resolve_stream_link` folgen dem Standard-Flow -- [ ] Optional: `available_hosters_for` + `set_preferred_hosters` vorhanden -- [ ] Optional: `series_url_for_title` + `remember_series_url` vorhanden -- [ ] Fehlerbehandlung und Timeouts vorhanden -- [ ] Optional: Caches für Performance - -## Hinweis zur Erstellung -Teile dieser Dokumentation wurden KI‑gestützt erstellt und bei Bedarf manuell angepasst. +## Kurze Checkliste +- `name` gesetzt und korrekt +- `*_base_url` in Settings vorhanden +- Suche liefert nur passende Titel +- Playback Methoden vorhanden +- Fehler und Timeouts behandelt +- Cache nur da, wo er Zeit spart diff --git a/docs/PLUGIN_MANIFEST.json b/docs/PLUGIN_MANIFEST.json new file mode 100644 index 0000000..07f73d4 --- /dev/null +++ b/docs/PLUGIN_MANIFEST.json @@ -0,0 +1,104 @@ +{ + "schema_version": 1, + "plugins": [ + { + "file": "addon/plugins/aniworld_plugin.py", + "module": "aniworld_plugin", + "name": "Aniworld", + "class": "AniworldPlugin", + "version": "1.0.0", + "capabilities": [ + "genres", + "latest_episodes", + "popular_series" + ], + "prefer_source_metadata": false, + "base_url_setting": "aniworld_base_url", + "available": true, + "unavailable_reason": null, + "error": null + }, + { + "file": "addon/plugins/dokustreams_plugin.py", + "module": "dokustreams_plugin", + "name": "Doku-Streams", + "class": "DokuStreamsPlugin", + "version": "1.0.0", + "capabilities": [ + "genres", + "popular_series" + ], + "prefer_source_metadata": true, + "base_url_setting": "doku_streams_base_url", + "available": true, + "unavailable_reason": null, + "error": null + }, + { + "file": "addon/plugins/einschalten_plugin.py", + "module": "einschalten_plugin", + "name": "Einschalten", + "class": "EinschaltenPlugin", + "version": "1.0.0", + "capabilities": [ + "genres", + "new_titles" + ], + "prefer_source_metadata": false, + "base_url_setting": "einschalten_base_url", + "available": true, + "unavailable_reason": null, + "error": null + }, + { + "file": "addon/plugins/filmpalast_plugin.py", + "module": "filmpalast_plugin", + "name": "Filmpalast", + "class": "FilmpalastPlugin", + "version": "1.0.0", + "capabilities": [ + "alpha", + "genres", + "series_catalog" + ], + "prefer_source_metadata": false, + "base_url_setting": "filmpalast_base_url", + "available": true, + "unavailable_reason": null, + "error": null + }, + { + "file": "addon/plugins/serienstream_plugin.py", + "module": "serienstream_plugin", + "name": "Serienstream", + "class": "SerienstreamPlugin", + "version": "1.0.0", + "capabilities": [ + "genres", + "latest_episodes", + "popular_series" + ], + "prefer_source_metadata": false, + "base_url_setting": "serienstream_base_url", + "available": true, + "unavailable_reason": null, + "error": null + }, + { + "file": "addon/plugins/topstreamfilm_plugin.py", + "module": "topstreamfilm_plugin", + "name": "Topstreamfilm", + "class": "TopstreamfilmPlugin", + "version": "1.0.0", + "capabilities": [ + "genres", + "popular_series" + ], + "prefer_source_metadata": false, + "base_url_setting": "topstream_base_url", + "available": true, + "unavailable_reason": null, + "error": null + } + ] +} diff --git a/docs/PLUGIN_SYSTEM.md b/docs/PLUGIN_SYSTEM.md index d4ac5b7..37394ac 100644 --- a/docs/PLUGIN_SYSTEM.md +++ b/docs/PLUGIN_SYSTEM.md @@ -1,96 +1,71 @@ -## ViewIt Plugin-System +# ViewIT Plugin System -Dieses Dokument beschreibt, wie das Plugin-System von **ViewIt** funktioniert und wie die Community neue Integrationen hinzufügen kann. +Dieses Dokument beschreibt Laden, Vertrag und Betrieb der Plugins. -### Überblick +## Ueberblick +Der Router laedt Provider Integrationen aus `addon/plugins/*.py`. +Aktive Plugins werden instanziiert und im UI genutzt. -ViewIt lädt Provider-Integrationen dynamisch aus `addon/plugins/*.py`. Jede Datei enthält eine Klasse, die von `BasisPlugin` erbt. Beim Start werden alle Plugins instanziiert und nur aktiv genutzt, wenn sie verfügbar sind. +Relevante Dateien: +- `addon/default.py` +- `addon/plugin_interface.py` +- `docs/DEFAULT_ROUTER.md` +- `docs/PLUGIN_DEVELOPMENT.md` -Weitere Details: -- `docs/DEFAULT_ROUTER.md` (Hauptlogik in `addon/default.py`) -- `docs/PLUGIN_DEVELOPMENT.md` (Entwicklerdoku für Plugins) +## Aktuelle Plugins +- `serienstream_plugin.py` +- `topstreamfilm_plugin.py` +- `einschalten_plugin.py` +- `aniworld_plugin.py` +- `filmpalast_plugin.py` +- `dokustreams_plugin.py` +- `_template_plugin.py` (Vorlage) -### Aktuelle Plugins +## Discovery Ablauf +In `addon/default.py`: +1. Finde `*.py` in `addon/plugins/` +2. Ueberspringe Dateien mit `_` Prefix +3. Importiere Modul +4. Nutze `Plugin = `, falls vorhanden +5. Sonst instanziiere `BasisPlugin` Subklassen deterministisch +6. Ueberspringe Plugins mit `is_available = False` -- `serienstream_plugin.py` – Serienstream (s.to) -- `topstreamfilm_plugin.py` – Topstreamfilm -- `einschalten_plugin.py` – Einschalten -- `aniworld_plugin.py` – Aniworld -- `filmpalast_plugin.py` – Filmpalast -- `_template_plugin.py` – Vorlage für neue Plugins +## Basis Interface +`BasisPlugin` definiert den Kern: +- `search_titles` +- `seasons_for` +- `episodes_for` -### Plugin-Discovery (Ladeprozess) +Weitere Methoden sind optional und werden nur genutzt, wenn vorhanden. -Der Loader in `addon/default.py`: +## Capabilities +Plugins koennen Features aktiv melden. +Typische Werte: +- `popular_series` +- `genres` +- `latest_episodes` +- `new_titles` +- `alpha` +- `series_catalog` -1. Sucht alle `*.py` in `addon/plugins/` -2. Überspringt Dateien, die mit `_` beginnen -3. Lädt Module dynamisch -4. Instanziert Klassen, die von `BasisPlugin` erben -5. Ignoriert Plugins mit `is_available = False` +Das UI zeigt nur Menues fuer aktiv gemeldete Features. -Damit bleiben fehlerhafte Plugins isoliert und blockieren nicht das gesamte Add-on. +## Metadaten Quelle +`prefer_source_metadata = True` bedeutet: +- Quelle zuerst +- TMDB nur Fallback -### BasisPlugin – verpflichtende Methoden +## Stabilitaet +- Keine Netz Calls im Import Block. +- Fehler im Plugin muessen lokal behandelt werden. +- Ein defektes Plugin darf andere Plugins nicht blockieren. -Definiert in `addon/plugin_interface.py`: +## Build +Kodi ZIP bauen: -- `async search_titles(query: str) -> list[str]` -- `seasons_for(title: str) -> list[str]` -- `episodes_for(title: str, season: str) -> list[str]` - -### Optionale Features (Capabilities) - -Plugins können zusätzliche Features anbieten: - -- `capabilities() -> set[str]` - - `popular_series`: liefert beliebte Serien - - `genres`: Genre-Liste verfügbar - - `latest_episodes`: neue Episoden verfügbar -- `popular_series() -> list[str]` -- `genres() -> list[str]` -- `titles_for_genre(genre: str) -> list[str]` -- `latest_episodes(page: int = 1) -> list[LatestEpisode]` (wenn angeboten) - -ViewIt zeigt im UI nur die Features an, die ein Plugin tatsächlich liefert. - -### Plugin-Struktur (empfohlen) - -Eine Integration sollte typischerweise bieten: - -- Konstante `BASE_URL` -- `search_titles()` mit Provider-Suche -- `seasons_for()` und `episodes_for()` mit HTML-Parsing -- `stream_link_for()` optional für direkte Playback-Links -- Optional: `available_hosters_for()` oder Provider-spezifische Helfer - -Als Startpunkt dient `addon/plugins/_template_plugin.py`. - -### Community-Erweiterungen (Workflow) - -1. Fork/Branch erstellen -2. Neue Datei unter `addon/plugins/` hinzufügen (z. B. `meinprovider_plugin.py`) -3. Klasse erstellen, die `BasisPlugin` implementiert -4. In Kodi testen (ZIP bauen, installieren) -5. PR öffnen - -### Qualitätsrichtlinien - -- Keine Netzwerkzugriffe im Import-Top-Level -- Netzwerkzugriffe nur in Methoden (z. B. `search_titles`) -- Fehler sauber abfangen und verständliche Fehlermeldungen liefern -- Kein globaler Zustand, der across instances überrascht -- Provider-spezifische Parser in Helper-Funktionen kapseln - -### Debugging & Logs - -Hilfreiche Logs werden nach `userdata/addon_data/plugin.video.viewit/logs/` geschrieben. -Provider sollten URL-Logging optional halten (Settings). - -### ZIP-Build - -``` +```bash ./scripts/build_kodi_zip.sh ``` -Das ZIP liegt anschließend unter `dist/plugin.video.viewit-.zip`. +Ergebnis: +`dist/plugin.video.viewit-.zip` diff --git a/docs/RELEASE.md b/docs/RELEASE.md new file mode 100644 index 0000000..fbb8de5 --- /dev/null +++ b/docs/RELEASE.md @@ -0,0 +1,44 @@ +# Release Flow (Main + Nightly) + +This project uses two release channels: + +- `nightly`: integration and test channel +- `main`: stable channel + +## Rules + +- Feature work goes to `nightly` only. +- Promote from `nightly` to `main` with `--squash` only. +- `main` version has no suffix (`0.1.60`). +- `nightly` version uses `-nightly` and is always at least one patch higher than `main` (`0.1.61-nightly`). +- Keep changelogs split: + - `CHANGELOG-NIGHTLY.md` + - `CHANGELOG.md` + +## Nightly publish + +1) Finish changes on `nightly`. +2) Bump addon version in `addon/addon.xml` to `X.Y.Z-nightly`. +3) Build and publish nightly repo artifacts. +4) Push `nightly`. + +## Promote nightly to main + +```bash +git checkout main +git pull origin main +git merge --squash nightly +git commit -m "release: X.Y.Z" +``` + +Then: + +1) Set `addon/addon.xml` version to `X.Y.Z` (without `-nightly`). +2) Build and publish main repo artifacts. +3) Push `main`. +4) Optional tag: `vX.Y.Z`. + +## Local ZIPs (separated) + +- Main ZIP output: `dist/local_zips/main/` +- Nightly ZIP output: `dist/local_zips/nightly/` diff --git a/qa/plugin_snapshots.json b/qa/plugin_snapshots.json new file mode 100644 index 0000000..71d5c4d --- /dev/null +++ b/qa/plugin_snapshots.json @@ -0,0 +1,73 @@ +{ + "snapshots": { + "Serienstream::search_titles::trek": [ + "Star Trek: Lower Decks", + "Star Trek: Prodigy", + "Star Trek: The Animated Series", + "Inside Star Trek", + "Raumschiff Enterprise - Star Trek: The Original Series", + "Star Trek: Deep Space Nine", + "Star Trek: Discovery", + "Star Trek: Enterprise", + "Star Trek: Picard", + "Star Trek: Raumschiff Voyager", + "Star Trek: Short Treks", + "Star Trek: Starfleet Academy", + "Star Trek: Strange New Worlds", + "Star Trek: The Next Generation" + ], + "Aniworld::search_titles::naruto": [ + "Naruto", + "Naruto Shippuden", + "Boruto: Naruto Next Generations", + "Naruto Spin-Off: Rock Lee & His Ninja Pals" + ], + "Topstreamfilm::search_titles::matrix": [ + "Darkdrive – Verschollen in der Matrix", + "Matrix Reloaded", + "Armitage III: Poly Matrix", + "Matrix Resurrections", + "Matrix", + "Matrix Revolutions", + "Matrix Fighters" + ], + "Einschalten::new_titles_page::1": [ + "Miracle: Das Eishockeywunder von 1980", + "No Escape - Grizzly Night", + "Kidnapped: Der Fall Elizabeth Smart", + "The Internship", + "The Rip", + "Die Toten vom Bodensee – Schicksalsrad", + "People We Meet on Vacation", + "Anaconda", + "Even If This Love Disappears Tonight", + "Die Stunde der Mutigen", + "10DANCE", + "SpongeBob Schwammkopf: Piraten Ahoi!", + "Ella McCay", + "Merv", + "Elmo and Mark Rober's Merry Giftmas", + "Als mein Vater Weihnachten rettete 2", + "Die Fraggles: Der erste Schnee", + "Gregs Tagebuch 3: Jetzt reicht's!", + "Not Without Hope", + "Five Nights at Freddy's 2" + ], + "Filmpalast::search_titles::trek": [ + "Star Trek", + "Star Trek - Der Film", + "Star Trek 2 - Der Zorn des Khan", + "Star Trek 9 Der Aufstand", + "Star Trek: Nemesis", + "Star Trek: Section 31", + "Star Trek: Starfleet Academy", + "Star Trek: Strange New Worlds" + ], + "Doku-Streams::search_titles::japan": [ + "Deutsche im Knast - Japan und die Disziplin", + "Die Meerfrauen von Japan", + "Japan - Land der Moderne und Tradition", + "Japan im Zweiten Weltkrieg - Der Fall des Kaiserreichs" + ] + } +} diff --git a/qa/run_plugin_snapshots.py b/qa/run_plugin_snapshots.py new file mode 100755 index 0000000..2fdd89d --- /dev/null +++ b/qa/run_plugin_snapshots.py @@ -0,0 +1,153 @@ +#!/usr/bin/env python3 +"""Run live snapshot checks for plugins. + +Use --update to refresh stored snapshots. +""" +from __future__ import annotations + +import argparse +import asyncio +import importlib.util +import inspect +import json +import sys +from pathlib import Path +from typing import Any + +ROOT_DIR = Path(__file__).resolve().parents[1] +PLUGIN_DIR = ROOT_DIR / "addon" / "plugins" +SNAPSHOT_PATH = ROOT_DIR / "qa" / "plugin_snapshots.json" + +sys.path.insert(0, str(ROOT_DIR / "addon")) + +try: + from plugin_interface import BasisPlugin # type: ignore +except Exception as exc: # pragma: no cover + raise SystemExit(f"Failed to import BasisPlugin: {exc}") + +CONFIG = [ + {"plugin": "Serienstream", "method": "search_titles", "args": ["trek"], "max_items": 20}, + {"plugin": "Aniworld", "method": "search_titles", "args": ["naruto"], "max_items": 20}, + {"plugin": "Topstreamfilm", "method": "search_titles", "args": ["matrix"], "max_items": 20}, + {"plugin": "Einschalten", "method": "new_titles_page", "args": [1], "max_items": 20}, + {"plugin": "Filmpalast", "method": "search_titles", "args": ["trek"], "max_items": 20}, + {"plugin": "Doku-Streams", "method": "search_titles", "args": ["japan"], "max_items": 20}, +] + + +def _import_module(path: Path): + spec = importlib.util.spec_from_file_location(path.stem, path) + if spec is None or spec.loader is None: + raise ImportError(f"Missing spec for {path}") + module = importlib.util.module_from_spec(spec) + sys.modules[spec.name] = module + spec.loader.exec_module(module) + return module + + +def _discover_plugins() -> dict[str, BasisPlugin]: + plugins: dict[str, BasisPlugin] = {} + for file_path in sorted(PLUGIN_DIR.glob("*.py")): + if file_path.name.startswith("_"): + continue + module = _import_module(file_path) + preferred = getattr(module, "Plugin", None) + if inspect.isclass(preferred) and issubclass(preferred, BasisPlugin) and preferred is not BasisPlugin: + classes = [preferred] + else: + classes = [ + obj + for obj in module.__dict__.values() + if inspect.isclass(obj) and issubclass(obj, BasisPlugin) and obj is not BasisPlugin + ] + classes.sort(key=lambda cls: cls.__name__.casefold()) + for cls in classes: + instance = cls() + name = str(getattr(instance, "name", "") or "").strip() + if name and name not in plugins: + plugins[name] = instance + return plugins + + +def _normalize_titles(value: Any, max_items: int) -> list[str]: + if not value: + return [] + titles = [str(item).strip() for item in list(value) if item and str(item).strip()] + seen = set() + normalized: list[str] = [] + for title in titles: + key = title.casefold() + if key in seen: + continue + seen.add(key) + normalized.append(title) + if len(normalized) >= max_items: + break + return normalized + + +def _snapshot_key(entry: dict[str, Any]) -> str: + args = entry.get("args", []) + return f"{entry['plugin']}::{entry['method']}::{','.join(str(a) for a in args)}" + + +def _call_method(plugin: BasisPlugin, method_name: str, args: list[Any]): + method = getattr(plugin, method_name, None) + if not callable(method): + raise RuntimeError(f"Method missing: {method_name}") + result = method(*args) + if asyncio.iscoroutine(result): + return asyncio.run(result) + return result + + +def main() -> int: + parser = argparse.ArgumentParser() + parser.add_argument("--update", action="store_true") + args = parser.parse_args() + + snapshots: dict[str, Any] = {} + if SNAPSHOT_PATH.exists(): + snapshots = json.loads(SNAPSHOT_PATH.read_text(encoding="utf-8")) + data = snapshots.get("snapshots", {}) if isinstance(snapshots, dict) else {} + if args.update: + data = {} + + plugins = _discover_plugins() + errors = [] + + for entry in CONFIG: + plugin_name = entry["plugin"] + plugin = plugins.get(plugin_name) + if plugin is None: + errors.append(f"Plugin missing: {plugin_name}") + continue + key = _snapshot_key(entry) + try: + result = _call_method(plugin, entry["method"], entry.get("args", [])) + normalized = _normalize_titles(result, entry.get("max_items", 20)) + except Exception as exc: + errors.append(f"Snapshot error: {key} ({exc})") + if args.update: + data[key] = {"error": str(exc)} + continue + if args.update: + data[key] = normalized + else: + expected = data.get(key) + if expected != normalized: + errors.append(f"Snapshot mismatch: {key}\nExpected: {expected}\nActual: {normalized}") + + if args.update: + SNAPSHOT_PATH.parent.mkdir(parents=True, exist_ok=True) + SNAPSHOT_PATH.write_text(json.dumps({"snapshots": data}, indent=2, ensure_ascii=False) + "\n", encoding="utf-8") + + if errors: + for err in errors: + print(err) + return 1 + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/scripts/__pycache__/test_einschalten_api.cpython-312.pyc b/scripts/__pycache__/test_einschalten_api.cpython-312.pyc deleted file mode 100644 index 46a348027f89e913b0790d9bf2995b288150160e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 5702 zcmb^#TWk|ocE&S)&G?Zgd61AP&=M2d0Rj|K9u0)YrjSx%TF3(4iD!t%@vC>nBpEqU zTD2;vRw7-kgxR(tl~^?ukv5;J{o2p2y3*P>mCbCVMQVSl{h`b54_|xk*ke1Xt5&Et z%G`U;J@=e*?>$d{<#aLxv^kq7O*;wV8$22Vr*I%WypoC9)6F{E(QVP4T0$RfH4{E(=mP#hCH zHN(e}QBfXbI4(J>gcI=rP7Wa{smT3`C@aAlk%D|u@+W6GpKpSfMPCX6mPH8iA6VZ_zr7_ zhtya;9#1H|BEkEOdQ6%f?o*715`w5%VHklYSI?YJs8Te__aE@@;d)<72+}Luo02&F zD}@upiPWTr8%o0Ky(Lb(CMo>~2440L90a_xf%E6QTvVD8xieyDD&bLID>Aw zaw6_k#2KX@OXsa!n!%91nOO9dw~y}eANKC<->r`hscnMj8$RU~#PW(5FD+>1P&6Tn z9$IBCfp+V5y5s!mP!l^@4)p7_6jA2ebwSo2A zA=1PQDeIOTDO*zwF7co|iPTak!vxcV@{T#6BD0Fw*eOuP%DZHm{5kaJ0B-{Yx=KMQ z`yc|i2qqPnq{)cg;8pFb@;m?sfZH6EHlcs=OtN|;*&L|r#cK+l2^jd$m1T1W|kGOT+M#p`}hcEB)XKntXqv6Am#S;a#JI8kC z*`E2~N30tVGTZX5-mI;6ooQU`&5V8IU)uS3-)DWxq5SrJd1he#quRy-LxD?UgPdwVdunL-)Wxwu zMm2GqY64Ibb6XJyR-sep)(W}ZfpBBB-$OJijSHYqt+ ziZEf32Mk#=!#bEe-T=9SM&q!c*&yYRrzIr}PJ+fMhUSpNiRqxRiJB5;HS)^&&b_Q=9W!O@j-bY;-ej-}S6iRIR1>oUrA@B7*@@Pq=~ zZ)?TMw)U;j-i@LJ?p*ok${O8MWZkz9FB~p8sf@73?${_z%h$GTPbk8%6Scv4D{cqI zRkvdTmjZ6ba7jcxw__L`jzINYkYHM&jEDl}Tpx>8DsDUyLoU2x-$1oq zL}P$AYhcZp(xwQD54f=j4#BzNs$fApt9uT2jf4#wB&<)hQD~~6N~h_%RGT;5PJLP} zLTe2zbK3l~Xw}EsBDAfv8>iLpw~i}KS89Vfi{du!tF9v|FV0!h)<~0qS?wd(+BP)> z2VcF_xG_tl-GEkm!J$_(7bU2!(w0bPb;TH=UG?EY!_+X;LG4Pnfmt8VkRWXnI*tIU z(o=)a8RKd%bf#_PjEq>>YUt^lJ^n|aU+)Bs`U(6$Nw-Vr-XQDtDohk$;MDiICiIJ5A5`zj#i9pE0>jj2Jg<=BU zD?lNO@Yyv>sW??z&zuTy{ZP;LmjGNZl++&BO5=)LwUmm@@34=C*r?ZuYENZP32Qvw#X$39Hur(#%65FXj))`J||_ zHPXTgPDIME61Ha6#ZjG?;#2X&bew~;4{~rKYxyQ&6IMfnlUz@SX5-N$a>C?^n)Z5LOv<054JSG|-qkK0cC5%W6queI)3bDGHIQX`^334;$;Zxy+t(MaXU=B(PTablcb=@}VY@G{ z4(Xc@*5IAPc;{>J?90PC*51cWt#|xTGIeJUytU}hH(j28tw_5IbXSh<%3N7($V(dgBk>>)qT^%jX@Rb$opN*6GEA3!}KU zYq{5bw|m+7=No^Ce@U;o&M#0!yYqJYLi^(7hxYDTjOo8qUk?9c=o z_AlC(nQX^E-nBn#+y5A{yF&|y7Ej)9UJ87E^|Pz_7xvvclxGL#hu7)m#o_x3rhPQ$ zJ(}Nj>}&dXk#*>Ko@NUMv8n`vXaE$&LymfEC3QXtQ-tF;(nq@0whB^6`8BA$7k;rr z*SIt{nuv=~wd+61>4+YSu9a#d_K4V?{1?kV^p$M*G^Y4Ed^8&G_0Z@HOjLVRl6A`0 z^hNXv06h(wH5e2Up3lLu s613x%`jF~)#I$@A$n3b+d$)IK@6x`EH{ZG|&-7;LUdX`dPF;HL*|xi}AA$rQBy5`2+N3m5YEv7NUGVL)q#|xG2+u7ts>R2b#Gau1lUn8zJ8%;eL`~2Ib(b zeAvetFdu2)J_hNQ{SfJqjp#bq&ewf)#QuklxZb*}qwPQFNa*eQzK#9=uqEc%)x7%! zc~Mvr^6_Slhub|Y;ZyYv*6!o^{XW)!8=by|0$S{1y9EX>A81JV*xvepn)D?aknOew z@Ov6`AKN&vzLWKf;*w}bn{4j(YXfXbHqj3@=uOm~B{ARYzuJHcf-OSv%h2QxBvHf5 zrklNRQ)%4rv4-s8p_DRKId(=f%cM{sRGB0;Q)bG5v7-K3rnIQ^pDkESIjWqsY({7* z!`n$G_B1t0yq}x^GgyJ#6P|W(R-<#3gA;@r!~`lSDE(P7qm>J6P~p5(N^%0GMsv0k z(mjTQXGx({38d3=WYLLEp2-fYLuXH^r$^2^sM5#J3Y?YG1T3+1u#1=%lxDG1N~FAo zC@+*sFr6-FX3mMZBF_$ujXpOtI#LPfB%K|p?3*Z2ZF-g*Q!1I^jN%bn;fj?@Q(~83 zJP_NFZDvuTGFm2O9`_-W&0CbEpP3jRQ~Gmc?yJvfg{2g@l_O?Kbb{J64~1vJ@(D7_ z9dn3eXk+PDm>mDi3*>hpVjRh$DeNP7F46Hxw0!yJ0Gml+4UP=m(TJ zI1=RyVOlz?F=A$J^D$Tn$>&BUo_lt(5-35j3>tTVS_zfhiW#J?-h>l|#FsQ`6DK}I zbLAq`$XR}*P86oC3S5`wh!&IwWZ9rlT~dxPM|Q7q3EPpl(SZ^*Oy)@Tq78GKL0Bl| zzXi`X%w3}0+)d2wae|&DP<=u7CF)@JEnEagY(5wvr1zd4n6-*zU>e51nH!jJ{{{mW z3^H#pwee4Y;FPRA)8pWxW|%Y$&$Ys5AB7-XLbqZaH)DM_Vtuva>Q?O4ODF1BzKOeT z;I5BDtM=6swV_vPxN8fmP4tcSE6KIwmEN`9Yw_#iX6kh9+=45#*boYD9KLd7?Z}nP zTISlJ>yK<6IafPxxap4gamh3FNaALs=SHMwGtzr0`%|R#{m0*Z{8l)& zEsC*G;cgJMcT|%r$v;{GE%J60wIzOubl2nU@8{pi*W312!zqNCcq(tRtMsCOoB zcJ|-s?7tfcMuV5Kzr|6s4c@B7AKeNcueY}Sf?M7`vV}Wt0eoaRTRpXM>Vxss_SMPN z>DoSE9OaDE@?`bJl^3`0gY`)C4#W&OVO8Z(R8`(ql&~^|8h~7<5{|5@Gi4wURi!*< z^dR4OgVH{JwbT27XBxT6@Wycq!)X~=m?qx9!X=H(Iza<^oiYdKEyILyWKdHp60V>3 zimE!;z09-8iJS>Ud14W!U~y!@U~7nGxb9=H-3vcawDfX;JVl>^Bku|O4cN9NK@fgL zhkr(`-=miAk-Uwu(Dws=>`wHM5ZlfQ2uI(pd@FT-C+}aG{P6irxu=GEwk3pwa7cX32 k+U$C13%~p$erP)YH}Bc}C-qVAIQnd;XShxJvQ>cnZ%GPvLI3~& diff --git a/scripts/build_kodi_zip.sh b/scripts/build_kodi_zip.sh index 4ae5971..96277de 100755 --- a/scripts/build_kodi_zip.sh +++ b/scripts/build_kodi_zip.sh @@ -37,6 +37,6 @@ ZIP_PATH="${INSTALL_DIR}/${ZIP_NAME}" ADDON_DIR="$("${ROOT_DIR}/scripts/build_install_addon.sh" >/dev/null; echo "${INSTALL_DIR}/${ADDON_ID}")" rm -f "${ZIP_PATH}" -(cd "${INSTALL_DIR}" && zip -r "${ZIP_NAME}" "$(basename "${ADDON_DIR}")" >/dev/null) +python3 "${ROOT_DIR}/scripts/zip_deterministic.py" "${ZIP_PATH}" "${ADDON_DIR}" >/dev/null echo "${ZIP_PATH}" diff --git a/scripts/build_local_kodi_repo.sh b/scripts/build_local_kodi_repo.sh index ddedb92..861970d 100755 --- a/scripts/build_local_kodi_repo.sh +++ b/scripts/build_local_kodi_repo.sh @@ -21,8 +21,20 @@ fi mkdir -p "${REPO_DIR}" +read -r ADDON_ID ADDON_VERSION < <(python3 - "${PLUGIN_ADDON_XML}" <<'PY' +import sys +import xml.etree.ElementTree as ET + +root = ET.parse(sys.argv[1]).getroot() +print(root.attrib.get("id", "plugin.video.viewit"), root.attrib.get("version", "0.0.0")) +PY +) + PLUGIN_ZIP="$("${ROOT_DIR}/scripts/build_kodi_zip.sh")" -cp -f "${PLUGIN_ZIP}" "${REPO_DIR}/" +PLUGIN_ZIP_NAME="$(basename "${PLUGIN_ZIP}")" +PLUGIN_ADDON_DIR_IN_REPO="${REPO_DIR}/${ADDON_ID}" +mkdir -p "${PLUGIN_ADDON_DIR_IN_REPO}" +cp -f "${PLUGIN_ZIP}" "${PLUGIN_ADDON_DIR_IN_REPO}/${PLUGIN_ZIP_NAME}" read -r REPO_ADDON_ID REPO_ADDON_VERSION < <(python3 - "${REPO_ADDON_XML}" <<'PY' import sys @@ -73,7 +85,10 @@ PY REPO_ZIP_NAME="${REPO_ADDON_ID}-${REPO_ADDON_VERSION}.zip" REPO_ZIP_PATH="${REPO_DIR}/${REPO_ZIP_NAME}" rm -f "${REPO_ZIP_PATH}" -(cd "${TMP_DIR}" && zip -r "${REPO_ZIP_PATH}" "${REPO_ADDON_ID}" >/dev/null) +python3 "${ROOT_DIR}/scripts/zip_deterministic.py" "${REPO_ZIP_PATH}" "${TMP_REPO_ADDON_DIR}" >/dev/null +REPO_ADDON_DIR_IN_REPO="${REPO_DIR}/${REPO_ADDON_ID}" +mkdir -p "${REPO_ADDON_DIR_IN_REPO}" +cp -f "${REPO_ZIP_PATH}" "${REPO_ADDON_DIR_IN_REPO}/${REPO_ZIP_NAME}" python3 - "${PLUGIN_ADDON_XML}" "${TMP_REPO_ADDON_DIR}/addon.xml" "${REPO_DIR}/addons.xml" <<'PY' import sys @@ -107,4 +122,5 @@ echo "Repo built:" echo " ${REPO_DIR}/addons.xml" echo " ${REPO_DIR}/addons.xml.md5" echo " ${REPO_ZIP_PATH}" -echo " ${REPO_DIR}/$(basename "${PLUGIN_ZIP}")" +echo " ${PLUGIN_ADDON_DIR_IN_REPO}/${PLUGIN_ZIP_NAME}" +echo " ${REPO_ADDON_DIR_IN_REPO}/${REPO_ZIP_NAME}" diff --git a/scripts/generate_plugin_manifest.py b/scripts/generate_plugin_manifest.py new file mode 100755 index 0000000..fe8d3b9 --- /dev/null +++ b/scripts/generate_plugin_manifest.py @@ -0,0 +1,106 @@ +#!/usr/bin/env python3 +"""Generate a JSON manifest for addon plugins.""" +from __future__ import annotations + +import importlib.util +import inspect +import json +import sys +from pathlib import Path + +ROOT_DIR = Path(__file__).resolve().parents[1] +PLUGIN_DIR = ROOT_DIR / "addon" / "plugins" +OUTPUT_PATH = ROOT_DIR / "docs" / "PLUGIN_MANIFEST.json" + +sys.path.insert(0, str(ROOT_DIR / "addon")) + +try: + from plugin_interface import BasisPlugin # type: ignore +except Exception as exc: # pragma: no cover + raise SystemExit(f"Failed to import BasisPlugin: {exc}") + + +def _import_module(path: Path): + spec = importlib.util.spec_from_file_location(path.stem, path) + if spec is None or spec.loader is None: + raise ImportError(f"Missing spec for {path}") + module = importlib.util.module_from_spec(spec) + sys.modules[spec.name] = module + spec.loader.exec_module(module) + return module + + +def _collect_plugins(): + plugins = [] + for file_path in sorted(PLUGIN_DIR.glob("*.py")): + if file_path.name.startswith("_"): + continue + entry = { + "file": str(file_path.relative_to(ROOT_DIR)), + "module": file_path.stem, + "name": None, + "class": None, + "version": None, + "capabilities": [], + "prefer_source_metadata": False, + "base_url_setting": None, + "available": None, + "unavailable_reason": None, + "error": None, + } + try: + module = _import_module(file_path) + preferred = getattr(module, "Plugin", None) + if inspect.isclass(preferred) and issubclass(preferred, BasisPlugin) and preferred is not BasisPlugin: + classes = [preferred] + else: + classes = [ + obj + for obj in module.__dict__.values() + if inspect.isclass(obj) and issubclass(obj, BasisPlugin) and obj is not BasisPlugin + ] + classes.sort(key=lambda cls: cls.__name__.casefold()) + + if not classes: + entry["error"] = "No plugin classes found" + plugins.append(entry) + continue + + cls = classes[0] + instance = cls() + entry["class"] = cls.__name__ + entry["name"] = str(getattr(instance, "name", "") or "") or None + entry["version"] = str(getattr(instance, "version", "0.0.0") or "0.0.0") + entry["prefer_source_metadata"] = bool(getattr(instance, "prefer_source_metadata", False)) + entry["available"] = bool(getattr(instance, "is_available", True)) + entry["unavailable_reason"] = getattr(instance, "unavailable_reason", None) + try: + caps = instance.capabilities() # type: ignore[call-arg] + entry["capabilities"] = sorted([str(c) for c in caps]) if caps else [] + except Exception: + entry["capabilities"] = [] + + entry["base_url_setting"] = getattr(module, "SETTING_BASE_URL", None) + except Exception as exc: # pragma: no cover + entry["error"] = str(exc) + plugins.append(entry) + + plugins.sort(key=lambda item: (item.get("name") or item["module"]).casefold()) + return plugins + + +def main() -> int: + if not PLUGIN_DIR.exists(): + raise SystemExit("Plugin directory missing") + manifest = { + "schema_version": 1, + "plugins": _collect_plugins(), + } + OUTPUT_PATH.parent.mkdir(parents=True, exist_ok=True) + OUTPUT_PATH.write_text(json.dumps(manifest, indent=2, ensure_ascii=False) + "\n", encoding="utf-8") + print(str(OUTPUT_PATH)) + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/scripts/zip_deterministic.py b/scripts/zip_deterministic.py new file mode 100755 index 0000000..f3cea34 --- /dev/null +++ b/scripts/zip_deterministic.py @@ -0,0 +1,73 @@ +#!/usr/bin/env python3 +"""Create deterministic zip archives. + +Usage: + zip_deterministic.py + +The archive will include the root directory itself and all files under it. +""" +from __future__ import annotations + +import os +import sys +import time +import zipfile +from pathlib import Path + + +def _timestamp() -> tuple[int, int, int, int, int, int]: + epoch = os.environ.get("SOURCE_DATE_EPOCH") + if epoch: + try: + value = int(epoch) + return time.gmtime(value)[:6] + except Exception: + pass + return (2000, 1, 1, 0, 0, 0) + + +def _iter_files(root: Path): + for dirpath, dirnames, filenames in os.walk(root): + dirnames[:] = sorted([d for d in dirnames if d != "__pycache__"]) + for filename in sorted(filenames): + if filename.endswith(".pyc"): + continue + yield Path(dirpath) / filename + + +def _add_file(zf: zipfile.ZipFile, file_path: Path, arcname: str) -> None: + info = zipfile.ZipInfo(arcname, date_time=_timestamp()) + info.compress_type = zipfile.ZIP_DEFLATED + info.external_attr = (0o644 & 0xFFFF) << 16 + with file_path.open("rb") as handle: + data = handle.read() + zf.writestr(info, data, compress_type=zipfile.ZIP_DEFLATED) + + +def main() -> int: + if len(sys.argv) != 3: + print("Usage: zip_deterministic.py ") + return 2 + + zip_path = Path(sys.argv[1]).resolve() + root = Path(sys.argv[2]).resolve() + if not root.exists() or not root.is_dir(): + print(f"Missing root dir: {root}") + return 2 + + base = root.parent + zip_path.parent.mkdir(parents=True, exist_ok=True) + if zip_path.exists(): + zip_path.unlink() + + with zipfile.ZipFile(zip_path, "w") as zf: + for file_path in sorted(_iter_files(root)): + arcname = str(file_path.relative_to(base)).replace(os.sep, "/") + _add_file(zf, file_path, arcname) + + print(str(zip_path)) + return 0 + + +if __name__ == "__main__": + raise SystemExit(main())