Added the ability to refresh metadata

This commit is contained in:
2025-09-07 17:41:58 +02:00
parent bd846a8d9f
commit 399af7a205
2 changed files with 153 additions and 2 deletions

View File

@@ -1280,6 +1280,112 @@ def handle_local_file(path_str: str):
log({"url": path_str, "status": "error", "error": str(e)})
raise
# --- Refresh sidecar metadata and subtitles for an already-downloaded media file ---
def refresh_media(path_str: str):
"""
Refresh sidecar metadata (info.json, thumbnail) and subtitles for an already-downloaded media file.
Requires a companion .info.json next to the media (to supply the original URL). No media re-download.
"""
try:
p = Path(path_str)
if not p.exists() or not p.is_file():
log({"url": path_str, "status": "error", "error": "file_not_found"})
return
# Locate existing info.json to get the original URL
info_json = None
for cand in [p.parent / f"{p.name}.info.json", p.parent / f"{p.stem}.info.json"]:
if cand.exists():
info_json = cand
break
if not info_json:
log({"path": str(p), "status": "refresh-skip", "reason": "no_info_json"})
print(f"[refresh] skip: no info.json next to {p}", flush=True)
return
info = load_info_json(info_json) or {}
url = info.get("webpage_url") or info.get("original_url") or info.get("url")
if not url:
log({"path": str(p), "status": "refresh-skip", "reason": "no_url_in_info"})
print(f"[refresh] skip: no URL in {info_json}", flush=True)
return
# Prepare yt-dlp command to refresh sidecars only, writing files exactly next to the media
outtmpl = str(p.with_suffix(".%(ext)s"))
sub_langs = os.getenv("YTDLP_SUBS_LANGS", "en.*,en")
cmd = [
"yt-dlp",
"--skip-download",
"--write-info-json",
"--write-thumbnail",
"--convert-thumbnails", "jpg",
"--write-subs", "--write-auto-subs",
"--sub-langs", sub_langs,
"--convert-subs", "srt",
"-o", outtmpl,
url,
]
print(f"[refresh] refreshing sidecars for {p} via yt-dlp", flush=True)
try:
subprocess.check_call(cmd)
except subprocess.CalledProcessError as e:
print(f"[refresh] yt-dlp failed: {e}", flush=True)
raise
# Ensure language-suffixed SRT exists (Plex-friendly) if any subs were fetched
try:
# Pick any .srt just fetched that matches base
for s in p.parent.glob(f"{p.stem}*.srt"):
# If it's already lang-suffixed, keep; also copy to .en.srt when only plain .srt exists
if s.name == f"{p.stem}.srt":
shutil.copy2(s, p.with_suffix(".en.srt"))
except Exception:
pass
# Rebuild NFO using fresh info.json (and RSS if available)
try:
# Try RSS match to enrich metadata (non-fatal if not present)
ep = None
try:
ep = match_media_to_rss(p)
except Exception:
ep = None
fallback = {
"title": p.stem,
"episode_title": p.stem,
"show": p.parent.name,
"description": "",
"pubdate": _extract_date_from_stem(p.stem),
"duration_sec": media_duration_seconds(p),
"image": "",
"guid": "",
}
meta = build_meta_from_sources(p, p.parent.name, fallback, ep)
# Save local artwork too
try:
save_episode_artwork(meta.get("image"), p, meta.get("show"))
except Exception:
pass
# If a transcript already exists, include it in the NFO plot preview
ttxt_path = (TRN / p.stem).with_suffix(".txt")
ttxt = ttxt_path.read_text(encoding="utf-8") if ttxt_path.exists() else None
write_episode_nfo(p, meta, ttxt)
except Exception as e:
print(f"[refresh] NFO/artwork update failed: {e}", flush=True)
log({"path": str(p), "status": "refresh-done"})
print(f"[refresh] done for {p}", flush=True)
except Exception as e:
log({"path": path_str, "status": "error", "error": str(e)})
raise
def handle_web(url: str):
info = {"url": url, "status":"web-downloading", "title":"", "uploader":"", "date":"", "path":""}
log(info)