Nightly: refactor readability, progress callbacks, and resource handling
This commit is contained in:
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
from dataclasses import dataclass
|
||||
import re
|
||||
from urllib.parse import quote
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Optional
|
||||
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional
|
||||
|
||||
try: # pragma: no cover - optional dependency
|
||||
import requests
|
||||
@@ -44,6 +44,16 @@ SETTING_LOG_URLS = "log_urls_dokustreams"
|
||||
SETTING_DUMP_HTML = "dump_html_dokustreams"
|
||||
SETTING_SHOW_URL_INFO = "show_url_info_dokustreams"
|
||||
SETTING_LOG_ERRORS = "log_errors_dokustreams"
|
||||
ProgressCallback = Optional[Callable[[str, Optional[int]], Any]]
|
||||
|
||||
|
||||
def _emit_progress(callback: ProgressCallback, message: str, percent: Optional[int] = None) -> None:
|
||||
if not callable(callback):
|
||||
return
|
||||
try:
|
||||
callback(str(message or ""), None if percent is None else int(percent))
|
||||
except Exception:
|
||||
return
|
||||
HEADERS = {
|
||||
"User-Agent": "Mozilla/5.0 (Kodi; ViewIt) AppleWebKit/537.36 (KHTML, like Gecko)",
|
||||
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
|
||||
@@ -213,16 +223,26 @@ def _get_soup(url: str, *, session: Optional[RequestsSession] = None) -> Beautif
|
||||
raise RuntimeError("requests/bs4 sind nicht verfuegbar.")
|
||||
_log_visit(url)
|
||||
sess = session or get_requests_session("dokustreams", headers=HEADERS)
|
||||
response = None
|
||||
try:
|
||||
response = sess.get(url, headers=HEADERS, timeout=DEFAULT_TIMEOUT)
|
||||
response.raise_for_status()
|
||||
except Exception as exc:
|
||||
_log_error_message(f"GET {url} failed: {exc}")
|
||||
raise
|
||||
if response.url and response.url != url:
|
||||
_log_url_event(response.url, kind="REDIRECT")
|
||||
_log_response_html(url, response.text)
|
||||
return BeautifulSoup(response.text, "html.parser")
|
||||
try:
|
||||
final_url = (response.url or url) if response is not None else url
|
||||
body = (response.text or "") if response is not None else ""
|
||||
if final_url != url:
|
||||
_log_url_event(final_url, kind="REDIRECT")
|
||||
_log_response_html(url, body)
|
||||
return BeautifulSoup(body, "html.parser")
|
||||
finally:
|
||||
if response is not None:
|
||||
try:
|
||||
response.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
class DokuStreamsPlugin(BasisPlugin):
|
||||
@@ -247,14 +267,17 @@ class DokuStreamsPlugin(BasisPlugin):
|
||||
if REQUESTS_IMPORT_ERROR:
|
||||
print(f"DokuStreamsPlugin Importfehler: {REQUESTS_IMPORT_ERROR}")
|
||||
|
||||
async def search_titles(self, query: str) -> List[str]:
|
||||
async def search_titles(self, query: str, progress_callback: ProgressCallback = None) -> List[str]:
|
||||
_emit_progress(progress_callback, "Doku-Streams Suche", 15)
|
||||
hits = self._search_hits(query)
|
||||
_emit_progress(progress_callback, f"Treffer verarbeiten ({len(hits)})", 70)
|
||||
self._title_to_url = {hit.title: hit.url for hit in hits if hit.title and hit.url}
|
||||
for hit in hits:
|
||||
if hit.title:
|
||||
self._title_meta[hit.title] = (hit.plot, hit.poster)
|
||||
titles = [hit.title for hit in hits if hit.title]
|
||||
titles.sort(key=lambda value: value.casefold())
|
||||
_emit_progress(progress_callback, f"Fertig: {len(titles)} Treffer", 95)
|
||||
return titles
|
||||
|
||||
def _search_hits(self, query: str) -> List[SearchHit]:
|
||||
|
||||
Reference in New Issue
Block a user