Compare commits

...

5 Commits

9 changed files with 504 additions and 357 deletions

View File

@@ -1,3 +1,23 @@
## 0.1.83.0-dev - 2026-03-15
- dev: Trakt Performance, Suchfilter Phrase-Match, Debug-Settings Expert-Level
## 0.1.82.5-dev - 2026-03-15
- dev: Update-Versionsvergleich numerisch korrigiert
## 0.1.82.0-dev - 2026-03-14
- dev: HDFilme Plot in Rubrik Neuste anzeigen
## 0.1.81.5-dev - 2026-03-14
- dev: YouTube HD via inputstream.adaptive, DokuStreams Suche fix
## 0.1.81.0-dev - 2026-03-14
- dev: YouTube Fixes, Trakt Credentials fest, Upcoming Ansicht, Watchlist Kontextmenue
## 0.1.80.5-dev - 2026-03-13 ## 0.1.80.5-dev - 2026-03-13
- dev: YouTube: yt-dlp ZIP-Installation von GitHub, kein yesno-Dialog - dev: YouTube: yt-dlp ZIP-Installation von GitHub, kein yesno-Dialog

View File

@@ -1,5 +1,5 @@
<?xml version='1.0' encoding='utf-8'?> <?xml version='1.0' encoding='utf-8'?>
<addon id="plugin.video.viewit" name="ViewIt" version="0.1.81.0-dev" provider-name="ViewIt"> <addon id="plugin.video.viewit" name="ViewIt" version="0.1.83.5-dev" provider-name="ViewIt">
<requires> <requires>
<import addon="xbmc.python" version="3.0.0" /> <import addon="xbmc.python" version="3.0.0" />
<import addon="script.module.requests" /> <import addon="script.module.requests" />

View File

@@ -1811,7 +1811,7 @@ def _show_root_menu() -> None:
# Update-Hinweis ganz oben wenn neuere Version verfügbar # Update-Hinweis ganz oben wenn neuere Version verfügbar
installed = _get_setting_string("update_installed_version").strip() installed = _get_setting_string("update_installed_version").strip()
available = _get_setting_string("update_available_selected").strip() available = _get_setting_string("update_available_selected").strip()
if installed and available and available not in ("-", "", "0.0.0") and installed != available: if installed and available and available not in ("-", "", "0.0.0") and _version_sort_key(available) > _version_sort_key(installed):
_add_directory_item( _add_directory_item(
handle, handle,
f"Update verfuegbar: {installed} -> {available}", f"Update verfuegbar: {installed} -> {available}",
@@ -1938,6 +1938,8 @@ def _show_plugin_search_results(plugin_name: str, query: str) -> None:
pass pass
raise raise
results = _clean_search_titles([str(t).strip() for t in (results or []) if t and str(t).strip()]) results = _clean_search_titles([str(t).strip() for t in (results or []) if t and str(t).strip()])
from search_utils import matches_query as _mq
results = [r for r in results if _mq(query, title=r)]
results.sort(key=lambda value: value.casefold()) results.sort(key=lambda value: value.casefold())
use_source, show_tmdb, prefer_source = _metadata_policy( use_source, show_tmdb, prefer_source = _metadata_policy(
@@ -2208,6 +2210,8 @@ def _show_search_results(query: str) -> None:
_log(f"Suche fehlgeschlagen ({plugin_name}): {exc}", xbmc.LOGWARNING) _log(f"Suche fehlgeschlagen ({plugin_name}): {exc}", xbmc.LOGWARNING)
continue continue
results = _clean_search_titles([str(t).strip() for t in (results or []) if t and str(t).strip()]) results = _clean_search_titles([str(t).strip() for t in (results or []) if t and str(t).strip()])
from search_utils import matches_query as _mq
results = [r for r in results if _mq(query, title=r)]
_log(f"Treffer ({plugin_name}): {len(results)}", xbmc.LOGDEBUG) _log(f"Treffer ({plugin_name}): {len(results)}", xbmc.LOGDEBUG)
use_source, show_tmdb, prefer_source = _metadata_policy( use_source, show_tmdb, prefer_source = _metadata_policy(
plugin_name, plugin, allow_tmdb=_tmdb_enabled() plugin_name, plugin, allow_tmdb=_tmdb_enabled()
@@ -3965,6 +3969,190 @@ def _resolve_stream_with_retry(plugin: BasisPlugin, link: str) -> str | None:
return final_link return final_link
def _is_inputstream_adaptive_available() -> bool:
"""Prueft ob inputstream.adaptive in Kodi installiert ist."""
try:
import xbmcaddon # type: ignore
xbmcaddon.Addon("inputstream.adaptive")
return True
except Exception:
return False
# ---------------------------------------------------------------------------
# Lokaler MPD-Manifest-Server fuer inputstream.adaptive
# ---------------------------------------------------------------------------
_mpd_server_instance = None
_mpd_server_port = 0
def _ensure_mpd_server() -> int:
"""Startet einen lokalen HTTP-Server der MPD-Manifeste serviert.
Gibt den Port zurueck. Der Server laeuft in einem Daemon-Thread.
"""
global _mpd_server_instance, _mpd_server_port
if _mpd_server_instance is not None:
return _mpd_server_port
import http.server
import socketserver
import threading
_pending_manifests: dict[str, str] = {}
class _ManifestHandler(http.server.BaseHTTPRequestHandler):
def do_GET(self) -> None:
if "/manifest" in self.path:
key = self.path.split("key=")[-1].split("&")[0] if "key=" in self.path else ""
content = _pending_manifests.pop(key, "")
if content:
data = content.encode("utf-8")
self.send_response(200)
self.send_header("Content-Type", "application/dash+xml")
self.send_header("Content-Length", str(len(data)))
self.end_headers()
self.wfile.write(data)
return
self.send_error(404)
def log_message(self, *_args: object) -> None:
pass # kein Logging
server = socketserver.TCPServer(("127.0.0.1", 0), _ManifestHandler)
_mpd_server_port = server.server_address[1]
_mpd_server_instance = server
# pending_manifests als Attribut am Server speichern
server._pending_manifests = _pending_manifests # type: ignore[attr-defined]
t = threading.Thread(target=server.serve_forever, daemon=True)
t.start()
_log(f"MPD-Server gestartet auf Port {_mpd_server_port}", xbmc.LOGDEBUG)
return _mpd_server_port
def _register_mpd_manifest(mpd_xml: str) -> str:
"""Registriert ein MPD-Manifest und gibt die lokale URL zurueck."""
import hashlib
port = _ensure_mpd_server()
key = hashlib.md5(mpd_xml.encode()).hexdigest()[:12]
if _mpd_server_instance is not None:
_mpd_server_instance._pending_manifests[key] = mpd_xml # type: ignore[attr-defined]
return f"http://127.0.0.1:{port}/plugin.video.viewit/manifest?key={key}"
def _play_dual_stream(
video_url: str,
audio_url: str,
*,
meta: dict[str, str] | None = None,
display_title: str | None = None,
info_labels: dict[str, str] | None = None,
art: dict[str, str] | None = None,
cast: list[TmdbCastMember] | None = None,
resolve_handle: int | None = None,
trakt_media: dict[str, object] | None = None,
) -> None:
"""Spielt getrennte Video+Audio-Streams via inputstream.adaptive.
Startet einen lokalen HTTP-Server der ein generiertes MPD-Manifest
serviert (gem. inputstream.adaptive Wiki: Integration + Custom Manifest).
Fallback auf Video-only wenn inputstream.adaptive nicht installiert.
"""
if not _is_inputstream_adaptive_available():
_log("inputstream.adaptive nicht verfuegbar Video-only Wiedergabe", xbmc.LOGWARNING)
_play_final_link(
video_url, display_title=display_title, info_labels=info_labels,
art=art, cast=cast, resolve_handle=resolve_handle, trakt_media=trakt_media,
)
return
from xml.sax.saxutils import escape as xml_escape
m = meta or {}
vcodec = m.get("vc", "avc1.640028")
acodec = m.get("ac", "mp4a.40.2")
w = m.get("w", "1920")
h = m.get("h", "1080")
fps = m.get("fps", "25")
vbr = m.get("vbr", "5000000")
abr = m.get("abr", "128000")
asr = m.get("asr", "44100")
ach = m.get("ach", "2")
dur = m.get("dur", "0")
dur_attr = ""
if dur and dur != "0":
dur_attr = f' mediaPresentationDuration="PT{dur}S"'
mpd_xml = (
'<?xml version="1.0" encoding="UTF-8"?>'
'<MPD xmlns="urn:mpeg:dash:schema:mpd:2011" type="static"'
' minBufferTime="PT2S"'
' profiles="urn:mpeg:dash:profile:isoff-on-demand:2011"'
+ dur_attr + '>'
'<Period>'
'<AdaptationSet mimeType="video/mp4" contentType="video" subsegmentAlignment="true">'
'<Representation id="video" bandwidth="' + vbr + '"'
' codecs="' + xml_escape(vcodec) + '"'
' width="' + w + '" height="' + h + '"'
' frameRate="' + fps + '">'
'<BaseURL>' + xml_escape(video_url) + '</BaseURL>'
'</Representation>'
'</AdaptationSet>'
'<AdaptationSet mimeType="audio/mp4" contentType="audio" subsegmentAlignment="true">'
'<Representation id="audio" bandwidth="' + abr + '"'
' codecs="' + xml_escape(acodec) + '"'
' audioSamplingRate="' + asr + '">'
'<AudioChannelConfiguration'
' schemeIdUri="urn:mpeg:dash:23003:3:audio_channel_configuration:2011"'
' value="' + ach + '"/>'
'<BaseURL>' + xml_escape(audio_url) + '</BaseURL>'
'</Representation>'
'</AdaptationSet>'
'</Period>'
'</MPD>'
)
mpd_url = _register_mpd_manifest(mpd_xml)
_log(f"MPD-Manifest URL: {mpd_url}", xbmc.LOGDEBUG)
list_item = xbmcgui.ListItem(label=display_title or "", path=mpd_url)
list_item.setMimeType("application/dash+xml")
list_item.setContentLookup(False)
list_item.setProperty("inputstream", "inputstream.adaptive")
list_item.setProperty("inputstream.adaptive.manifest_type", "mpd")
merged_info: dict[str, object] = dict(info_labels or {})
if display_title:
merged_info["title"] = display_title
_apply_video_info(list_item, merged_info, cast)
if art:
setter = getattr(list_item, "setArt", None)
if callable(setter):
try:
setter(art)
except Exception:
pass
resolved = False
if resolve_handle is not None:
resolver = getattr(xbmcplugin, "setResolvedUrl", None)
if callable(resolver):
try:
resolver(resolve_handle, True, list_item)
resolved = True
except Exception:
pass
if not resolved:
xbmc.Player().play(item=mpd_url, listitem=list_item)
if trakt_media and _get_setting_bool("trakt_enabled", default=False):
_trakt_scrobble_start_async(trakt_media)
_trakt_monitor_playback(trakt_media)
def _play_final_link( def _play_final_link(
link: str, link: str,
*, *,
@@ -3975,6 +4163,25 @@ def _play_final_link(
resolve_handle: int | None = None, resolve_handle: int | None = None,
trakt_media: dict[str, object] | None = None, trakt_media: dict[str, object] | None = None,
) -> None: ) -> None:
# Getrennte Video+Audio-Streams (yt-dlp): via inputstream.adaptive abspielen
audio_url = None
meta: dict[str, str] = {}
try:
from ytdlp_helper import split_video_audio
link, audio_url, meta = split_video_audio(link)
except Exception:
pass
if audio_url:
_play_dual_stream(
link, audio_url,
meta=meta,
display_title=display_title, info_labels=info_labels,
art=art, cast=cast, resolve_handle=resolve_handle,
trakt_media=trakt_media,
)
return
list_item = xbmcgui.ListItem(label=display_title or "", path=link) list_item = xbmcgui.ListItem(label=display_title or "", path=link)
try: try:
list_item.setProperty("IsPlayable", "true") list_item.setProperty("IsPlayable", "true")
@@ -4707,10 +4914,11 @@ def _show_trakt_watchlist(media_type: str = "") -> None:
_set_content(handle, "tvshows") _set_content(handle, "tvshows")
items = client.get_watchlist(token, media_type=media_type) items = client.get_watchlist(token, media_type=media_type)
tmdb_prefetched = _tmdb_labels_and_art_bulk([i.title for i in items]) if _tmdb_enabled() else {}
for item in items: for item in items:
label = f"{item.title} ({item.year})" if item.year else item.title label = f"{item.title} ({item.year})" if item.year else item.title
tmdb_info, art, _ = _tmdb_labels_and_art(item.title) tmdb_info, art, _ = tmdb_prefetched.get(item.title, ({}, {}, []))
info_labels: dict[str, object] = dict(tmdb_info) info_labels: dict[str, object] = dict(tmdb_info)
info_labels["title"] = label info_labels["title"] = label
info_labels["tvshowtitle"] = item.title info_labels["tvshowtitle"] = item.title
@@ -4718,16 +4926,7 @@ def _show_trakt_watchlist(media_type: str = "") -> None:
info_labels["year"] = item.year info_labels["year"] = item.year
info_labels["mediatype"] = "tvshow" info_labels["mediatype"] = "tvshow"
match = _trakt_find_in_plugins(item.title) _add_directory_item(handle, label, "search", {"query": item.title}, is_folder=True, info_labels=info_labels, art=art)
if match:
plugin_name, matched_title = match
action = "seasons"
params: dict[str, str] = {"plugin": plugin_name, "title": matched_title}
else:
action = "search"
params = {"query": item.title}
_add_directory_item(handle, label, action, params, is_folder=True, info_labels=info_labels, art=art)
if not items: if not items:
xbmcgui.Dialog().notification("Trakt", "Watchlist ist leer.", xbmcgui.NOTIFICATION_INFO, 3000) xbmcgui.Dialog().notification("Trakt", "Watchlist ist leer.", xbmcgui.NOTIFICATION_INFO, 3000)
xbmcplugin.endOfDirectory(handle) xbmcplugin.endOfDirectory(handle)
@@ -4746,6 +4945,7 @@ def _show_trakt_history(page: int = 1) -> None:
_set_content(handle, "episodes") _set_content(handle, "episodes")
items = client.get_history(token, page=page, limit=LIST_PAGE_SIZE) items = client.get_history(token, page=page, limit=LIST_PAGE_SIZE)
tmdb_prefetched = _tmdb_labels_and_art_bulk(list(dict.fromkeys(i.title for i in items))) if _tmdb_enabled() else {}
for item in items: for item in items:
is_episode = item.media_type == "episode" and item.season and item.episode is_episode = item.media_type == "episode" and item.season and item.episode
@@ -4766,7 +4966,7 @@ def _show_trakt_history(page: int = 1) -> None:
art["fanart"] = item.show_fanart art["fanart"] = item.show_fanart
if item.show_poster: if item.show_poster:
art["poster"] = item.show_poster art["poster"] = item.show_poster
_, tmdb_art, _ = _tmdb_labels_and_art(item.title) _, tmdb_art, _ = tmdb_prefetched.get(item.title, ({}, {}, []))
for _k, _v in tmdb_art.items(): for _k, _v in tmdb_art.items():
art.setdefault(_k, _v) art.setdefault(_k, _v)
@@ -4793,24 +4993,7 @@ def _show_trakt_history(page: int = 1) -> None:
f"RunPlugin({sys.argv[0]}?{wl_params})")) f"RunPlugin({sys.argv[0]}?{wl_params})"))
# Navigation: Episoden direkt abspielen, Serien zur Staffelauswahl # Navigation: Episoden direkt abspielen, Serien zur Staffelauswahl
match = _trakt_find_in_plugins(item.title) _add_directory_item(handle, label, "search", {"query": item.title}, is_folder=True, info_labels=info_labels, art=art, context_menu=ctx or None)
if match:
plugin_name, matched_title = match
if is_episode:
action = "play_episode"
params: dict[str, str] = {
"plugin": plugin_name,
"title": matched_title,
"season": f"Staffel {item.season}",
"episode": f"Episode {item.episode}",
}
_add_directory_item(handle, label, action, params, is_folder=False, info_labels=info_labels, art=art, context_menu=ctx or None)
else:
action = "seasons"
params = {"plugin": plugin_name, "title": matched_title}
_add_directory_item(handle, label, action, params, is_folder=True, info_labels=info_labels, art=art, context_menu=ctx or None)
else:
_add_directory_item(handle, label, "search", {"query": item.title}, is_folder=True, info_labels=info_labels, art=art, context_menu=ctx or None)
if len(items) >= LIST_PAGE_SIZE: if len(items) >= LIST_PAGE_SIZE:
_add_directory_item(handle, "Naechste Seite >>", "trakt_history", {"page": str(page + 1)}, is_folder=True) _add_directory_item(handle, "Naechste Seite >>", "trakt_history", {"page": str(page + 1)}, is_folder=True)
@@ -4863,6 +5046,10 @@ def _show_trakt_upcoming() -> None:
pass pass
dated_items.append((airdate, item)) dated_items.append((airdate, item))
# TMDB-Artwork fuer alle Serien parallel prefetchen (dedupliziert)
show_titles = list(dict.fromkeys(item.show_title for _, item in dated_items))
tmdb_prefetched = _tmdb_labels_and_art_bulk(show_titles) if _tmdb_enabled() else {}
last_date: _date | None = None last_date: _date | None = None
for airdate, item in dated_items: for airdate, item in dated_items:
# Datums-Ueberschrift einfuegen # Datums-Ueberschrift einfuegen
@@ -4914,22 +5101,12 @@ def _show_trakt_upcoming() -> None:
art["fanart"] = item.episode_thumb art["fanart"] = item.episode_thumb
elif item.show_fanart: elif item.show_fanart:
art["fanart"] = item.show_fanart art["fanart"] = item.show_fanart
_, tmdb_art, _ = _tmdb_labels_and_art(item.show_title) _, tmdb_art, _ = tmdb_prefetched.get(item.show_title, ({}, {}, []))
for _k, _v in tmdb_art.items(): for _k, _v in tmdb_art.items():
art.setdefault(_k, _v) art.setdefault(_k, _v)
match = _trakt_find_in_plugins(item.show_title) action = "search"
if match: params: dict[str, str] = {"query": item.show_title}
plugin_name, matched_title = match
action = "episodes"
params: dict[str, str] = {
"plugin": plugin_name,
"title": matched_title,
"season": f"Staffel {item.season}",
}
else:
action = "search"
params = {"query": item.show_title}
_add_directory_item(handle, label, action, params, is_folder=True, info_labels=info_labels, art=art) _add_directory_item(handle, label, action, params, is_folder=True, info_labels=info_labels, art=art)
@@ -4968,47 +5145,18 @@ def _show_trakt_continue_watching() -> None:
xbmcplugin.endOfDirectory(handle) xbmcplugin.endOfDirectory(handle)
return return
# TMDB-Artwork fuer alle Serien parallel prefetchen
tmdb_prefetched = _tmdb_labels_and_art_bulk(list(seen.keys())) if _tmdb_enabled() else {}
for last in seen.values(): for last in seen.values():
next_season = last.season next_season = last.season
next_ep = last.episode + 1 next_ep = last.episode + 1
match = _trakt_find_in_plugins(last.title)
# Wenn kein Plugin-Match: Suchaktion anbieten (kein Episode-Overflow-Problem)
if not match:
label = f"{last.title} \u2013 S{next_season:02d}E{next_ep:02d}"
sub = f"(zuletzt: S{last.season:02d}E{last.episode:02d})"
display_label = f"{label} {sub}"
info_labels: dict[str, object] = {
"title": display_label,
"tvshowtitle": last.title,
"mediatype": "episode",
}
if last.year:
info_labels["year"] = last.year
_, art, _ = _tmdb_labels_and_art(last.title)
_add_directory_item(handle, display_label, "search", {"query": last.title}, is_folder=True, info_labels=info_labels, art=art)
continue
plugin_name, matched_title = match
# Prüfe ob die nächste Episode im Plugin tatsächlich existiert
plugin = _discover_plugins().get(plugin_name)
episodes_getter = getattr(plugin, "episodes_for_season", None) if plugin else None
if callable(episodes_getter):
try:
ep_list = episodes_getter(matched_title, next_season) or []
if next_ep > len(ep_list):
# Letzte Folge der Staffel war die letzte nächste Staffel, Folge 1
next_season += 1
next_ep = 1
except Exception:
pass
label = f"{last.title} \u2013 S{next_season:02d}E{next_ep:02d}" label = f"{last.title} \u2013 S{next_season:02d}E{next_ep:02d}"
sub = f"(zuletzt: S{last.season:02d}E{last.episode:02d})" sub = f"(zuletzt: S{last.season:02d}E{last.episode:02d})"
display_label = f"{label} {sub}" display_label = f"{label} {sub}"
info_labels = { info_labels: dict[str, object] = {
"title": display_label, "title": display_label,
"tvshowtitle": last.title, "tvshowtitle": last.title,
"season": next_season, "season": next_season,
@@ -5018,14 +5166,8 @@ def _show_trakt_continue_watching() -> None:
if last.year: if last.year:
info_labels["year"] = last.year info_labels["year"] = last.year
_, art, _ = _tmdb_labels_and_art(last.title) _, art, _ = tmdb_prefetched.get(last.title, ({}, {}, []))
_add_directory_item(handle, display_label, "search", {"query": last.title}, is_folder=True, info_labels=info_labels, art=art)
params: dict[str, str] = {
"plugin": plugin_name,
"title": matched_title,
"season": f"Staffel {next_season}",
}
_add_directory_item(handle, display_label, "episodes", params, is_folder=True, info_labels=info_labels, art=art)
xbmcplugin.endOfDirectory(handle) xbmcplugin.endOfDirectory(handle)

View File

@@ -286,7 +286,7 @@ class DokuStreamsPlugin(BasisPlugin):
soup = _get_soup(search_url, session=session) soup = _get_soup(search_url, session=session)
except Exception: except Exception:
return [] return []
return _parse_listing_hits(soup, query=query) return _parse_listing_hits(soup)
def capabilities(self) -> set[str]: def capabilities(self) -> set[str]:
return {"genres", "popular_series", "tags", "random"} return {"genres", "popular_series", "tags", "random"}
@@ -455,15 +455,24 @@ class DokuStreamsPlugin(BasisPlugin):
art = {"thumb": poster, "poster": poster} art = {"thumb": poster, "poster": poster}
return info, art, None return info, art, None
def series_url_for_title(self, title: str) -> Optional[str]:
return self._title_to_url.get((title or "").strip())
def remember_series_url(self, title: str, url: str) -> None:
title = (title or "").strip()
url = (url or "").strip()
if title and url:
self._title_to_url[title] = url
def seasons_for(self, title: str) -> List[str]: def seasons_for(self, title: str) -> List[str]:
title = (title or "").strip() title = (title or "").strip()
if not title or title not in self._title_to_url: if not title:
return [] return []
return ["Stream"] return ["Stream"]
def episodes_for(self, title: str, season: str) -> List[str]: def episodes_for(self, title: str, season: str) -> List[str]:
title = (title or "").strip() title = (title or "").strip()
if not title or title not in self._title_to_url: if not title:
return [] return []
return [title] return [title]
@@ -537,6 +546,14 @@ class DokuStreamsPlugin(BasisPlugin):
"""Folgt Redirects und versucht ResolveURL fuer Hoster-Links.""" """Folgt Redirects und versucht ResolveURL fuer Hoster-Links."""
if not link: if not link:
return None return None
# YouTube-URLs via yt-dlp aufloesen
from ytdlp_helper import extract_youtube_id, resolve_youtube_url
yt_id = extract_youtube_id(link)
if yt_id:
resolved = resolve_youtube_url(yt_id)
if resolved:
return resolved
return None
from plugin_helpers import resolve_via_resolveurl from plugin_helpers import resolve_via_resolveurl
resolved = resolve_via_resolveurl(link, fallback_to_link=False) resolved = resolve_via_resolveurl(link, fallback_to_link=False)
if resolved: if resolved:

View File

@@ -388,7 +388,7 @@ class HdfilmePlugin(BasisPlugin):
info: dict[str, str] = {"title": title} info: dict[str, str] = {"title": title}
art: dict[str, str] = {} art: dict[str, str] = {}
# Cache-Hit # Cache-Hit nur zurückgeben wenn Plot vorhanden (sonst Detailseite laden)
cached = self._title_meta.get(title) cached = self._title_meta.get(title)
if cached: if cached:
plot, poster = cached plot, poster = cached
@@ -396,7 +396,7 @@ class HdfilmePlugin(BasisPlugin):
info["plot"] = plot info["plot"] = plot
if poster: if poster:
art["thumb"] = art["poster"] = poster art["thumb"] = art["poster"] = poster
if info or art: if plot:
return info, art, None return info, art, None
# Detailseite laden # Detailseite laden

View File

@@ -57,7 +57,6 @@ else: # pragma: no cover
SETTING_BASE_URL = "serienstream_base_url" SETTING_BASE_URL = "serienstream_base_url"
SETTING_CATALOG_SEARCH = "serienstream_catalog_search"
DEFAULT_BASE_URL = "https://s.to" DEFAULT_BASE_URL = "https://s.to"
DEFAULT_PREFERRED_HOSTERS = ["voe"] DEFAULT_PREFERRED_HOSTERS = ["voe"]
DEFAULT_TIMEOUT = 20 DEFAULT_TIMEOUT = 20
@@ -80,10 +79,7 @@ HEADERS = {
SESSION_CACHE_TTL_SECONDS = 300 SESSION_CACHE_TTL_SECONDS = 300
SESSION_CACHE_PREFIX = "viewit.serienstream" SESSION_CACHE_PREFIX = "viewit.serienstream"
SESSION_CACHE_MAX_TITLE_URLS = 800 SESSION_CACHE_MAX_TITLE_URLS = 800
CATALOG_SEARCH_TTL_SECONDS = 600
CATALOG_SEARCH_CACHE_KEY = "catalog_index"
GENRE_LIST_PAGE_SIZE = 20 GENRE_LIST_PAGE_SIZE = 20
_CATALOG_INDEX_MEMORY: tuple[float, list["SeriesResult"]] = (0.0, [])
ProgressCallback = Optional[Callable[[str, int | None], Any]] ProgressCallback = Optional[Callable[[str, int | None], Any]]
@@ -575,8 +571,8 @@ def _search_series_server(query: str) -> list[SeriesResult]:
if not query: if not query:
return [] return []
base = _get_base_url() base = _get_base_url()
search_url = f"{base}/search?q={quote(query)}" search_url = f"{base}/suche?term={quote(query)}"
alt_url = f"{base}/suche?q={quote(query)}" alt_url = f"{base}/search?term={quote(query)}"
for url in (search_url, alt_url): for url in (search_url, alt_url):
try: try:
body = _get_html_simple(url) body = _get_html_simple(url)
@@ -606,158 +602,30 @@ def _search_series_server(query: str) -> list[SeriesResult]:
continue continue
seen_urls.add(url_abs) seen_urls.add(url_abs)
results.append(SeriesResult(title=title, description="", url=url_abs)) results.append(SeriesResult(title=title, description="", url=url_abs))
filtered = [r for r in results if _matches_query(query, title=r.title)]
if filtered:
return filtered
if results: if results:
return results return results
api_results = _search_series_api(query)
if api_results:
return api_results
return [] return []
def _extract_catalog_index_from_html(body: str, *, progress_callback: ProgressCallback = None) -> list[SeriesResult]:
items: list[SeriesResult] = []
if not body:
return items
seen_urls: set[str] = set()
item_re = re.compile(
r"<li[^>]*class=[\"'][^\"']*series-item[^\"']*[\"'][^>]*>(.*?)</li>",
re.IGNORECASE | re.DOTALL,
)
anchor_re = re.compile(r"<a[^>]+href=[\"']([^\"']+)[\"'][^>]*>(.*?)</a>", re.IGNORECASE | re.DOTALL)
data_search_re = re.compile(r"data-search=[\"']([^\"']*)[\"']", re.IGNORECASE)
for idx, match in enumerate(item_re.finditer(body), start=1):
if idx == 1 or idx % 200 == 0:
_emit_progress(progress_callback, f"Katalog parsen {idx}", 62)
block = match.group(0)
inner = match.group(1) or ""
anchor_match = anchor_re.search(inner)
if not anchor_match:
continue
href = (anchor_match.group(1) or "").strip()
url = _absolute_url(href)
if not url or "/serie/" not in url or "/staffel-" in url or "/episode-" in url:
continue
if url in seen_urls:
continue
seen_urls.add(url)
title_raw = anchor_match.group(2) or ""
title = unescape(re.sub(r"\s+", " ", _strip_tags(title_raw))).strip()
if not title:
continue
search_match = data_search_re.search(block)
description = (search_match.group(1) or "").strip() if search_match else ""
items.append(SeriesResult(title=title, description=description, url=url))
return items
def _catalog_index_from_soup(soup: BeautifulSoupT) -> list[SeriesResult]:
items: list[SeriesResult] = []
if not soup:
return items
seen_urls: set[str] = set()
for item in soup.select("li.series-item"):
anchor = item.find("a", href=True)
if not anchor:
continue
href = (anchor.get("href") or "").strip()
url = _absolute_url(href)
if not url or "/serie/" not in url or "/staffel-" in url or "/episode-" in url:
continue
if url in seen_urls:
continue
seen_urls.add(url)
title = (anchor.get_text(" ", strip=True) or "").strip()
if not title:
continue
description = (item.get("data-search") or "").strip()
items.append(SeriesResult(title=title, description=description, url=url))
return items
def _load_catalog_index_from_cache() -> Optional[list[SeriesResult]]:
global _CATALOG_INDEX_MEMORY
expires_at, cached = _CATALOG_INDEX_MEMORY
if cached and expires_at > time.time():
return list(cached)
raw = _session_cache_get(CATALOG_SEARCH_CACHE_KEY)
if not isinstance(raw, list):
return None
items: list[SeriesResult] = []
for entry in raw:
if not isinstance(entry, list) or len(entry) < 2:
continue
title = str(entry[0] or "").strip()
url = str(entry[1] or "").strip()
description = str(entry[2] or "") if len(entry) > 2 else ""
cover = str(entry[3] or "").strip() if len(entry) > 3 else ""
if title and url:
items.append(SeriesResult(title=title, description=description, url=url, cover=cover))
if items:
_CATALOG_INDEX_MEMORY = (time.time() + CATALOG_SEARCH_TTL_SECONDS, list(items))
return items or None
def _store_catalog_index_in_cache(items: list[SeriesResult]) -> None:
global _CATALOG_INDEX_MEMORY
if not items:
return
_CATALOG_INDEX_MEMORY = (time.time() + CATALOG_SEARCH_TTL_SECONDS, list(items))
payload: list[list[str]] = []
for entry in items:
if not entry.title or not entry.url:
continue
payload.append([entry.title, entry.url, entry.description, entry.cover])
_session_cache_set(CATALOG_SEARCH_CACHE_KEY, payload, ttl_seconds=CATALOG_SEARCH_TTL_SECONDS)
def search_series(query: str, *, progress_callback: ProgressCallback = None) -> list[SeriesResult]: def search_series(query: str, *, progress_callback: ProgressCallback = None) -> list[SeriesResult]:
"""Sucht Serien. Katalog-Suche (vollstaendig) oder API-Suche (max 10) je nach Setting.""" """Sucht Serien. Server-Suche (/suche?term=) zuerst, API als Fallback."""
_ensure_requests() _ensure_requests()
if not _normalize_search_text(query): if not _normalize_search_text(query):
return [] return []
use_catalog = _get_setting_bool(SETTING_CATALOG_SEARCH, default=True) # 1. Server-Suche (schnell, vollstaendig, direkte HTML-Suche)
_emit_progress(progress_callback, "Suche", 20)
if use_catalog:
_emit_progress(progress_callback, "Pruefe Such-Cache", 15)
cached = _load_catalog_index_from_cache()
if cached is not None:
matched_from_cache = [entry for entry in cached if entry.title and _matches_query(query, title=entry.title)]
_emit_progress(progress_callback, f"Cache-Treffer: {len(cached)}", 35)
if matched_from_cache:
return matched_from_cache
_emit_progress(progress_callback, "Lade Katalogseite", 42)
catalog_url = f"{_get_base_url()}/serien?by=genre"
items: list[SeriesResult] = []
try:
soup = _get_soup_simple(catalog_url)
items = _catalog_index_from_soup(soup)
except Exception:
body = _get_html_simple(catalog_url)
items = _extract_catalog_index_from_html(body, progress_callback=progress_callback)
if not items:
_emit_progress(progress_callback, "Fallback-Parser", 58)
soup = BeautifulSoup(body, "html.parser")
items = _catalog_index_from_soup(soup)
if items:
_store_catalog_index_in_cache(items)
_emit_progress(progress_callback, f"Filtere Treffer ({len(items)})", 70)
return [entry for entry in items if entry.title and _matches_query(query, title=entry.title)]
# API-Suche (primaer wenn Katalog deaktiviert, Fallback wenn Katalog leer)
_emit_progress(progress_callback, "API-Suche", 60)
api_results = _search_series_api(query)
if api_results:
_emit_progress(progress_callback, f"API-Treffer: {len(api_results)}", 80)
return api_results
_emit_progress(progress_callback, "Server-Suche", 85)
server_results = _search_series_server(query) server_results = _search_series_server(query)
if server_results: if server_results:
_emit_progress(progress_callback, f"Server-Treffer: {len(server_results)}", 95) return server_results
return [entry for entry in server_results if entry.title and _matches_query(query, title=entry.title)]
return [] # 2. API-Suche (Fallback, max 10 Ergebnisse)
_emit_progress(progress_callback, "API-Suche", 60)
return _search_series_api(query)
def parse_series_catalog(soup: BeautifulSoupT) -> dict[str, list[SeriesResult]]: def parse_series_catalog(soup: BeautifulSoupT) -> dict[str, list[SeriesResult]]:
@@ -1252,7 +1120,7 @@ class SerienstreamPlugin(BasisPlugin):
except Exception: except Exception:
continue continue
url = str(item.get("url") or "").strip() url = str(item.get("url") or "").strip()
if number <= 0 or not url: if number < 0 or not url:
continue continue
seasons.append(SeasonInfo(number=number, url=url, episodes=[])) seasons.append(SeasonInfo(number=number, url=url, episodes=[]))
if not seasons: if not seasons:
@@ -1794,6 +1662,8 @@ class SerienstreamPlugin(BasisPlugin):
@staticmethod @staticmethod
def _season_label(number: int) -> str: def _season_label(number: int) -> str:
if number == 0:
return "Filme"
return f"Staffel {number}" return f"Staffel {number}"
@staticmethod @staticmethod
@@ -1808,6 +1678,8 @@ class SerienstreamPlugin(BasisPlugin):
@staticmethod @staticmethod
def _parse_season_number(label: str) -> int | None: def _parse_season_number(label: str) -> int | None:
if (label or "").strip().casefold() == "filme":
return 0
digits = "".join(ch for ch in label if ch.isdigit()) digits = "".join(ch for ch in label if ch.isdigit())
if not digits: if not digits:
return None return None

View File

@@ -135,7 +135,7 @@ def _videos_from_search_data(data: dict) -> List[str]:
def _search_with_ytdlp(query: str, count: int = 20) -> List[str]: def _search_with_ytdlp(query: str, count: int = 20) -> List[str]:
"""Sucht YouTube-Videos via yt-dlp ytsearch-Extraktor.""" """Sucht YouTube-Videos via yt-dlp ytsearch-Extraktor."""
if not _ensure_ytdlp_in_path(): if not ensure_ytdlp_in_path():
return [] return []
try: try:
from yt_dlp import YoutubeDL # type: ignore from yt_dlp import YoutubeDL # type: ignore
@@ -174,95 +174,7 @@ def _fetch_search_videos(url: str) -> List[str]:
return [] return []
def _fix_strptime() -> None: from ytdlp_helper import ensure_ytdlp_in_path, resolve_youtube_url
"""Kodi-Workaround: datetime.strptime ist manchmal None."""
import datetime as _dt
import time as _time
if not callable(getattr(_dt.datetime, "strptime", None)):
_dt.datetime.strptime = lambda s, f: _dt.datetime(*(_time.strptime(s, f)[0:6]))
def _ensure_ytdlp_in_path() -> bool:
"""Fuegt script.module.yt-dlp/lib zum sys.path hinzu falls noetig."""
_fix_strptime()
try:
import yt_dlp # type: ignore # noqa: F401
return True
except ImportError:
pass
try:
import sys, os
import xbmcvfs # type: ignore
lib_path = xbmcvfs.translatePath("special://home/addons/script.module.yt-dlp/lib")
if lib_path and os.path.isdir(lib_path) and lib_path not in sys.path:
sys.path.insert(0, lib_path)
import yt_dlp # type: ignore # noqa: F401
return True
except Exception:
pass
return False
def _get_quality_format() -> str:
"""Liest YouTube-Qualitaet aus den Addon-Einstellungen."""
_QUALITY_MAP = {
"0": "best[ext=mp4]/best",
"1": "bestvideo[height<=1080][ext=mp4]+bestaudio[ext=m4a]/best[height<=1080][ext=mp4]/best",
"2": "bestvideo[height<=720][ext=mp4]+bestaudio[ext=m4a]/best[height<=720][ext=mp4]/best",
"3": "bestvideo[height<=480][ext=mp4]+bestaudio[ext=m4a]/best[height<=480][ext=mp4]/best",
"4": "bestvideo[height<=360][ext=mp4]+bestaudio[ext=m4a]/best[height<=360][ext=mp4]/best",
}
try:
import xbmcaddon # type: ignore
val = xbmcaddon.Addon().getSetting("youtube_quality") or "0"
return _QUALITY_MAP.get(val, _QUALITY_MAP["0"])
except Exception:
return _QUALITY_MAP["0"]
def _resolve_with_ytdlp(video_id: str) -> Optional[str]:
"""Loest Video-ID via yt-dlp zu direkter Stream-URL auf."""
if not _ensure_ytdlp_in_path():
_log("[YouTube] yt-dlp nicht verfuegbar (script.module.yt-dlp fehlt)")
try:
import xbmcgui
xbmcgui.Dialog().notification(
"yt-dlp fehlt",
"Bitte yt-dlp in den ViewIT-Einstellungen installieren.",
xbmcgui.NOTIFICATION_ERROR,
5000,
)
except Exception:
pass
return None
try:
from yt_dlp import YoutubeDL # type: ignore
except ImportError:
return None
url = f"https://www.youtube.com/watch?v={video_id}"
fmt = _get_quality_format()
ydl_opts: Dict[str, Any] = {
"format": fmt,
"quiet": True,
"no_warnings": True,
"extract_flat": False,
}
try:
with YoutubeDL(ydl_opts) as ydl:
info = ydl.extract_info(url, download=False)
if not info:
return None
# Einzelnes Video
direct = info.get("url")
if direct:
return direct
# Formatauswahl
formats = info.get("formats", [])
if formats:
return formats[-1].get("url")
except Exception as exc:
_log(f"[YouTube] yt-dlp Fehler fuer {video_id}: {exc}")
return None
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
@@ -306,7 +218,7 @@ class YoutubePlugin(BasisPlugin):
video_id = _decode_id(episode) or _decode_id(title) video_id = _decode_id(episode) or _decode_id(title)
if not video_id: if not video_id:
return None return None
return _resolve_with_ytdlp(video_id) return resolve_youtube_url(video_id)
def resolve_stream_link(self, link: str) -> Optional[str]: def resolve_stream_link(self, link: str) -> Optional[str]:
return link # bereits direkte URL return link # bereits direkte URL

View File

@@ -2,8 +2,7 @@
<settings> <settings>
<category label="Quellen"> <category label="Quellen">
<setting id="serienstream_base_url" type="text" label="SerienStream Basis-URL" default="https://s.to" /> <setting id="serienstream_base_url" type="text" label="SerienStream Basis-URL" default="https://s.to" />
<setting id="serienstream_catalog_search" type="bool" label="SerienStream: Katalog-Suche (mehr Ergebnisse, langsamer)" default="true" /> <setting id="aniworld_base_url" type="text" label="AniWorld Basis-URL" default="https://aniworld.to" />
<setting id="aniworld_base_url" type="text" label="AniWorld Basis-URL" default="https://aniworld.to" />
<setting id="topstream_base_url" type="text" label="TopStream Basis-URL" default="https://topstreamfilm.live" /> <setting id="topstream_base_url" type="text" label="TopStream Basis-URL" default="https://topstreamfilm.live" />
<setting id="einschalten_base_url" type="text" label="Einschalten Basis-URL" default="https://einschalten.in" /> <setting id="einschalten_base_url" type="text" label="Einschalten Basis-URL" default="https://einschalten.in" />
<setting id="filmpalast_base_url" type="text" label="Filmpalast Basis-URL" default="https://filmpalast.to" /> <setting id="filmpalast_base_url" type="text" label="Filmpalast Basis-URL" default="https://filmpalast.to" />
@@ -87,40 +86,40 @@
</category> </category>
<category label="Debug Global"> <category label="Debug Global">
<setting id="debug_log_urls" type="bool" label="URLs mitschreiben (global)" default="false" /> <setting id="debug_log_urls" type="bool" label="URLs mitschreiben (global)" default="false" level="3" />
<setting id="debug_dump_html" type="bool" label="HTML speichern (global)" default="false" /> <setting id="debug_dump_html" type="bool" label="HTML speichern (global)" default="false" level="3" />
<setting id="debug_show_url_info" type="bool" label="Aktuelle URL anzeigen (global)" default="false" /> <setting id="debug_show_url_info" type="bool" label="Aktuelle URL anzeigen (global)" default="false" level="3" />
<setting id="debug_log_errors" type="bool" label="Fehler mitschreiben (global)" default="false" /> <setting id="debug_log_errors" type="bool" label="Fehler mitschreiben (global)" default="false" level="3" />
<setting id="log_max_mb" type="number" label="URL-Log: maximale Dateigroesse (MB)" default="5" /> <setting id="log_max_mb" type="number" label="URL-Log: maximale Dateigroesse (MB)" default="5" level="3" />
<setting id="log_max_files" type="number" label="URL-Log: Anzahl alter Dateien" default="3" /> <setting id="log_max_files" type="number" label="URL-Log: Anzahl alter Dateien" default="3" level="3" />
<setting id="dump_max_files" type="number" label="HTML: maximale Dateien pro Plugin" default="200" /> <setting id="dump_max_files" type="number" label="HTML: maximale Dateien pro Plugin" default="200" level="3" />
</category> </category>
<category label="Debug Quellen"> <category label="Debug Quellen">
<setting id="log_urls_serienstream" type="bool" label="SerienStream: URLs mitschreiben" default="false" /> <setting id="log_urls_serienstream" type="bool" label="SerienStream: URLs mitschreiben" default="false" level="3" />
<setting id="dump_html_serienstream" type="bool" label="SerienStream: HTML speichern" default="false" /> <setting id="dump_html_serienstream" type="bool" label="SerienStream: HTML speichern" default="false" level="3" />
<setting id="show_url_info_serienstream" type="bool" label="SerienStream: Aktuelle URL anzeigen" default="false" /> <setting id="show_url_info_serienstream" type="bool" label="SerienStream: Aktuelle URL anzeigen" default="false" level="3" />
<setting id="log_errors_serienstream" type="bool" label="SerienStream: Fehler mitschreiben" default="false" /> <setting id="log_errors_serienstream" type="bool" label="SerienStream: Fehler mitschreiben" default="false" level="3" />
<setting id="log_urls_aniworld" type="bool" label="AniWorld: URLs mitschreiben" default="false" /> <setting id="log_urls_aniworld" type="bool" label="AniWorld: URLs mitschreiben" default="false" level="3" />
<setting id="dump_html_aniworld" type="bool" label="AniWorld: HTML speichern" default="false" /> <setting id="dump_html_aniworld" type="bool" label="AniWorld: HTML speichern" default="false" level="3" />
<setting id="show_url_info_aniworld" type="bool" label="AniWorld: Aktuelle URL anzeigen" default="false" /> <setting id="show_url_info_aniworld" type="bool" label="AniWorld: Aktuelle URL anzeigen" default="false" level="3" />
<setting id="log_errors_aniworld" type="bool" label="AniWorld: Fehler mitschreiben" default="false" /> <setting id="log_errors_aniworld" type="bool" label="AniWorld: Fehler mitschreiben" default="false" level="3" />
<setting id="log_urls_topstreamfilm" type="bool" label="TopStream: URLs mitschreiben" default="false" /> <setting id="log_urls_topstreamfilm" type="bool" label="TopStream: URLs mitschreiben" default="false" level="3" />
<setting id="dump_html_topstreamfilm" type="bool" label="TopStream: HTML speichern" default="false" /> <setting id="dump_html_topstreamfilm" type="bool" label="TopStream: HTML speichern" default="false" level="3" />
<setting id="show_url_info_topstreamfilm" type="bool" label="TopStream: Aktuelle URL anzeigen" default="false" /> <setting id="show_url_info_topstreamfilm" type="bool" label="TopStream: Aktuelle URL anzeigen" default="false" level="3" />
<setting id="log_errors_topstreamfilm" type="bool" label="TopStream: Fehler mitschreiben" default="false" /> <setting id="log_errors_topstreamfilm" type="bool" label="TopStream: Fehler mitschreiben" default="false" level="3" />
<setting id="log_urls_einschalten" type="bool" label="Einschalten: URLs mitschreiben" default="false" /> <setting id="log_urls_einschalten" type="bool" label="Einschalten: URLs mitschreiben" default="false" level="3" />
<setting id="dump_html_einschalten" type="bool" label="Einschalten: HTML speichern" default="false" /> <setting id="dump_html_einschalten" type="bool" label="Einschalten: HTML speichern" default="false" level="3" />
<setting id="show_url_info_einschalten" type="bool" label="Einschalten: Aktuelle URL anzeigen" default="false" /> <setting id="show_url_info_einschalten" type="bool" label="Einschalten: Aktuelle URL anzeigen" default="false" level="3" />
<setting id="log_errors_einschalten" type="bool" label="Einschalten: Fehler mitschreiben" default="false" /> <setting id="log_errors_einschalten" type="bool" label="Einschalten: Fehler mitschreiben" default="false" level="3" />
<setting id="log_urls_filmpalast" type="bool" label="Filmpalast: URLs mitschreiben" default="false" /> <setting id="log_urls_filmpalast" type="bool" label="Filmpalast: URLs mitschreiben" default="false" level="3" />
<setting id="dump_html_filmpalast" type="bool" label="Filmpalast: HTML speichern" default="false" /> <setting id="dump_html_filmpalast" type="bool" label="Filmpalast: HTML speichern" default="false" level="3" />
<setting id="show_url_info_filmpalast" type="bool" label="Filmpalast: Aktuelle URL anzeigen" default="false" /> <setting id="show_url_info_filmpalast" type="bool" label="Filmpalast: Aktuelle URL anzeigen" default="false" level="3" />
<setting id="log_errors_filmpalast" type="bool" label="Filmpalast: Fehler mitschreiben" default="false" /> <setting id="log_errors_filmpalast" type="bool" label="Filmpalast: Fehler mitschreiben" default="false" level="3" />
</category> </category>
<category label="YouTube"> <category label="YouTube">
<setting id="youtube_quality" type="enum" label="YouTube Videoqualitaet" default="0" values="Beste|1080p|720p|480p|360p" /> <setting id="youtube_quality" type="enum" label="YouTube Videoqualitaet" default="0" values="Beste|1080p|720p|480p|360p" />

185
addon/ytdlp_helper.py Normal file
View File

@@ -0,0 +1,185 @@
"""Gemeinsame yt-dlp Hilfsfunktionen fuer YouTube-Wiedergabe.
Wird von youtube_plugin und dokustreams_plugin genutzt.
"""
from __future__ import annotations
import re
from typing import Any, Dict, Optional
try:
import xbmc # type: ignore
def _log(msg: str) -> None:
xbmc.log(f"[ViewIt][yt-dlp] {msg}", xbmc.LOGWARNING)
except ImportError:
def _log(msg: str) -> None:
pass
_YT_ID_RE = re.compile(
r"(?:youtube(?:-nocookie)?\.com/(?:embed/|v/|watch\?.*?v=)|youtu\.be/)"
r"([A-Za-z0-9_-]{11})"
)
def extract_youtube_id(url: str) -> Optional[str]:
"""Extrahiert eine YouTube Video-ID aus verschiedenen URL-Formaten."""
if not url:
return None
m = _YT_ID_RE.search(url)
return m.group(1) if m else None
def _fix_strptime() -> None:
"""Kodi-Workaround: datetime.strptime Race Condition vermeiden.
Kodi's eingebetteter Python kann in Multi-Thread-Umgebungen dazu fuehren
dass der lazy _strptime-Import fehlschlaegt. Wir importieren das Modul
direkt, damit es beim yt-dlp Aufruf bereits geladen ist.
"""
try:
import _strptime # noqa: F401 erzwingt den internen Import
except Exception:
pass
def ensure_ytdlp_in_path() -> bool:
"""Fuegt script.module.yt-dlp/lib zum sys.path hinzu falls noetig."""
_fix_strptime()
try:
import yt_dlp # type: ignore # noqa: F401
return True
except ImportError:
pass
try:
import sys, os
import xbmcvfs # type: ignore
lib_path = xbmcvfs.translatePath("special://home/addons/script.module.yt-dlp/lib")
if lib_path and os.path.isdir(lib_path) and lib_path not in sys.path:
sys.path.insert(0, lib_path)
import yt_dlp # type: ignore # noqa: F401
return True
except Exception:
pass
return False
def get_quality_format() -> str:
"""Liest YouTube-Qualitaet aus den Addon-Einstellungen."""
_QUALITY_MAP = {
"0": "bestvideo[ext=mp4][vcodec^=avc1]+bestaudio[ext=m4a]/bestvideo[ext=mp4]+bestaudio[ext=m4a]/best[ext=mp4]/best",
"1": "bestvideo[height<=1080][ext=mp4][vcodec^=avc1]+bestaudio[ext=m4a]/bestvideo[height<=1080][ext=mp4]+bestaudio[ext=m4a]/best[height<=1080][ext=mp4]/best",
"2": "bestvideo[height<=720][ext=mp4][vcodec^=avc1]+bestaudio[ext=m4a]/bestvideo[height<=720][ext=mp4]+bestaudio[ext=m4a]/best[height<=720][ext=mp4]/best",
"3": "bestvideo[height<=480][ext=mp4][vcodec^=avc1]+bestaudio[ext=m4a]/bestvideo[height<=480][ext=mp4]+bestaudio[ext=m4a]/best[height<=480][ext=mp4]/best",
"4": "bestvideo[height<=360][ext=mp4][vcodec^=avc1]+bestaudio[ext=m4a]/bestvideo[height<=360][ext=mp4]+bestaudio[ext=m4a]/best[height<=360][ext=mp4]/best",
}
try:
import xbmcaddon # type: ignore
val = xbmcaddon.Addon().getSetting("youtube_quality") or "0"
return _QUALITY_MAP.get(val, _QUALITY_MAP["0"])
except Exception:
return _QUALITY_MAP["0"]
_AUDIO_SEP = "||AUDIO||"
_META_SEP = "||META||"
def resolve_youtube_url(video_id: str) -> Optional[str]:
"""Loest eine YouTube Video-ID via yt-dlp zu einer direkten Stream-URL auf.
Bei getrennten Video+Audio-Streams wird der Rueckgabestring im Format
``video_url||AUDIO||audio_url||META||key=val,key=val,...`` kodiert.
Der Aufrufer kann mit ``split_video_audio()`` alle Teile trennen.
"""
if not ensure_ytdlp_in_path():
_log("yt-dlp nicht verfuegbar (script.module.yt-dlp fehlt)")
try:
import xbmcgui # type: ignore
xbmcgui.Dialog().notification(
"yt-dlp fehlt",
"Bitte yt-dlp in den ViewIT-Einstellungen installieren.",
xbmcgui.NOTIFICATION_ERROR,
5000,
)
except Exception:
pass
return None
try:
from yt_dlp import YoutubeDL # type: ignore
except ImportError:
return None
url = f"https://www.youtube.com/watch?v={video_id}"
fmt = get_quality_format()
ydl_opts: Dict[str, Any] = {
"format": fmt,
"quiet": True,
"no_warnings": True,
"extract_flat": False,
}
try:
with YoutubeDL(ydl_opts) as ydl:
info = ydl.extract_info(url, download=False)
if not info:
return None
duration = int(info.get("duration") or 0)
# Einzelne URL (kombinierter Stream)
direct = info.get("url")
if direct:
return direct
# Getrennte Video+Audio-Streams (hoehere Qualitaet)
rf = info.get("requested_formats")
if rf and len(rf) >= 2:
vf, af = rf[0], rf[1]
video_url = vf.get("url")
audio_url = af.get("url")
if video_url and audio_url:
vcodec = vf.get("vcodec") or "avc1.640028"
acodec = af.get("acodec") or "mp4a.40.2"
w = int(vf.get("width") or 1920)
h = int(vf.get("height") or 1080)
fps = int(vf.get("fps") or 25)
vbr = int((vf.get("tbr") or 5000) * 1000)
abr = int((af.get("tbr") or 128) * 1000)
asr = int(af.get("asr") or 44100)
ach = int(af.get("audio_channels") or 2)
meta = (
f"vc={vcodec},ac={acodec},"
f"w={w},h={h},fps={fps},"
f"vbr={vbr},abr={abr},"
f"asr={asr},ach={ach},dur={duration}"
)
_log(f"Getrennte Streams: {h}p {vcodec} + {acodec}")
return f"{video_url}{_AUDIO_SEP}{audio_url}{_META_SEP}{meta}"
if video_url:
return video_url
# Fallback: letztes Format
formats = info.get("formats", [])
if formats:
return formats[-1].get("url")
except Exception as exc:
_log(f"yt-dlp Fehler fuer {video_id}: {exc}")
return None
def split_video_audio(url: str) -> tuple:
"""Trennt eine URL in (video_url, audio_url, meta_dict).
Falls kein Audio-Teil vorhanden: (url, None, {}).
meta_dict enthaelt Keys: vc, ac, w, h, fps, vbr, abr, asr, ach, dur
"""
if _AUDIO_SEP not in url:
return url, None, {}
parts = url.split(_AUDIO_SEP, 1)
video_url = parts[0]
rest = parts[1]
meta: Dict[str, str] = {}
audio_url = rest
if _META_SEP in rest:
audio_url, meta_str = rest.split(_META_SEP, 1)
for pair in meta_str.split(","):
if "=" in pair:
k, v = pair.split("=", 1)
meta[k] = v
return video_url, audio_url, meta