229 lines
7.4 KiB
Python
229 lines
7.4 KiB
Python
from flask import Flask, request, redirect
|
|
import os, json, time, requests
|
|
from pathlib import Path
|
|
from redis import Redis
|
|
from rq import Queue
|
|
|
|
MEILI_URL = os.getenv("MEILI_URL", "http://meili:7700")
|
|
MEILI_KEY = os.getenv("MEILI_KEY", "") # from .env
|
|
REDIS_URL = os.getenv("REDIS_URL", "redis://redis:6379/0")
|
|
|
|
app = Flask(__name__)
|
|
FEED_LOG = Path(os.getenv("TRANSCRIPT_ROOT", "/transcripts")) / "_feed.log"
|
|
q = Queue(connection=Redis.from_url(REDIS_URL))
|
|
|
|
PAGE = """
|
|
<!doctype html><html><head><meta charset="utf-8">
|
|
<title>PodX - unified search</title>
|
|
<style>
|
|
body{font-family:system-ui, sans-serif;max-width:880px;margin:2rem auto;padding:0 1rem}
|
|
form{display:flex;gap:.5rem;margin-bottom:1rem}
|
|
input[type=url]{flex:1;padding:.7rem}
|
|
button{padding:.7rem 1rem}
|
|
.card{border:1px solid #ddd;padding:1rem;border-radius:8px;margin:.5rem 0}
|
|
small{color:#666}
|
|
input[type=search]{width:100%;padding:.6rem;margin:.5rem 0 1rem}
|
|
mark{background: #fff2a8}
|
|
.badge{display:inline-block;font-size:.75rem;border:1px solid #999;padding:.1rem .4rem;border-radius:999px;margin-right:.4rem}
|
|
</style></head><body>
|
|
<h1>PodX</h1>
|
|
<form action="/enqueue" method="post">
|
|
<input type="url" name="url" placeholder="Paste podcast/video/article URL…" required>
|
|
<button type="submit">Fetch</button>
|
|
</form>
|
|
<details><summary>Batch</summary>
|
|
<form action="/enqueue_batch" method="post">
|
|
<textarea name="urls" rows="4" style="width:100%" placeholder="One URL per line"></textarea>
|
|
<button type="submit">Queue All</button>
|
|
</form>
|
|
</details>
|
|
|
|
<h2>Unified search (podcasts + PDFs + EPUB + Kiwix + Web)</h2>
|
|
<form id="sform">
|
|
<input type="search" name="q" placeholder='e.g., "vector database" OR retrieval augmented generation' autofocus />
|
|
</form>
|
|
<div id="results"></div>
|
|
|
|
<script>
|
|
const form = document.getElementById('sform');
|
|
async function doSearch(){
|
|
const q = new URLSearchParams(new FormData(form)).toString();
|
|
const r = await fetch('/search?'+q);
|
|
document.getElementById('results').innerHTML = await r.text();
|
|
}
|
|
form.addEventListener('input', doSearch);
|
|
doSearch();
|
|
</script>
|
|
|
|
<h2>Recent jobs</h2>
|
|
<div id="feed"></div>
|
|
<script>
|
|
(async function poll(){
|
|
try{
|
|
const r = await fetch('/recent');
|
|
document.getElementById('feed').innerHTML = await r.text();
|
|
}catch(e){}
|
|
setTimeout(poll, 4000);
|
|
})();
|
|
</script>
|
|
|
|
<div style="margin-top:1rem;padding:1rem;border:1px solid #ddd;border-radius:8px;">
|
|
<h3 style="margin-top:0;">Activity</h3>
|
|
<div id="status-summary" style="font-family:system-ui, sans-serif; font-size:14px; margin-bottom:0.5rem;">Loading…</div>
|
|
<pre id="status-feed" style="max-height:300px; overflow:auto; background:#f8f9fa; padding:0.5rem; border-radius:6px; border:1px solid #eee;"></pre>
|
|
</div>
|
|
<script>
|
|
(async function(){
|
|
const feed = document.getElementById('status-feed');
|
|
const sum = document.getElementById('status-summary');
|
|
async function tick(){
|
|
try{
|
|
const r = await fetch('/api/status');
|
|
const j = await r.json();
|
|
if(!j.ok) throw new Error('not ok');
|
|
const ev = j.events || [];
|
|
const last = j.summary || {};
|
|
sum.textContent = last.last_status ? `${last.last_status} — ${last.last_title||''}` : 'Idle';
|
|
feed.textContent = ev.map(e => {
|
|
const s = e.status || '';
|
|
const u = e.url || e.path || e.title || '';
|
|
const up = e.uploader ? ` [${e.uploader}]` : '';
|
|
return `${s.padEnd(14)} ${u}${up}`;
|
|
}).join('\\n');
|
|
}catch(e){
|
|
sum.textContent = 'Status unavailable';
|
|
}
|
|
}
|
|
tick();
|
|
setInterval(tick, 2000);
|
|
})();
|
|
</script>
|
|
</body></html>
|
|
"""
|
|
|
|
def read_feed_tail(max_lines: int = 200):
|
|
if not FEED_LOG.exists():
|
|
return []
|
|
try:
|
|
with open(FEED_LOG, "rb") as f:
|
|
try:
|
|
f.seek(-65536, 2) # read last ~64KB
|
|
except OSError:
|
|
f.seek(0)
|
|
data = f.read().decode("utf-8", errors="ignore")
|
|
except Exception:
|
|
return []
|
|
lines = [x.strip() for x in data.splitlines() if x.strip()]
|
|
events = []
|
|
for ln in lines[-max_lines:]:
|
|
try:
|
|
events.append(json.loads(ln))
|
|
except Exception:
|
|
pass
|
|
return events
|
|
|
|
@app.get("/api/status")
|
|
def api_status():
|
|
events = read_feed_tail(200)
|
|
last = events[-1] if events else {}
|
|
summary = {
|
|
"last_status": last.get("status"),
|
|
"last_title": last.get("title") or last.get("path") or last.get("url"),
|
|
"last_time": int(time.time()),
|
|
"count": len(events),
|
|
}
|
|
return {"ok": True, "summary": summary, "events": events}
|
|
|
|
def meili_search(qstr, limit=30):
|
|
if not qstr.strip():
|
|
return []
|
|
try:
|
|
r = requests.post(
|
|
f"{MEILI_URL}/indexes/library/search",
|
|
headers={"Authorization": f"Bearer {MEILI_KEY}", "Content-Type": "application/json"},
|
|
data=json.dumps({"q": qstr, "limit": limit}),
|
|
timeout=5,
|
|
)
|
|
if r.status_code != 200:
|
|
return []
|
|
return r.json().get("hits", [])
|
|
except Exception:
|
|
return []
|
|
|
|
@app.get("/health")
|
|
def health():
|
|
return "ok"
|
|
|
|
@app.get("/")
|
|
def index():
|
|
return PAGE
|
|
|
|
@app.post("/enqueue")
|
|
def enqueue():
|
|
url = request.form["url"].strip()
|
|
q.enqueue("worker.handle_url", url)
|
|
return redirect("/")
|
|
|
|
@app.post("/enqueue_batch")
|
|
def enqueue_batch():
|
|
urls = [u.strip() for u in request.form["urls"].splitlines() if u.strip()]
|
|
for u in urls: q.enqueue("worker.handle_url", u)
|
|
return redirect("/")
|
|
|
|
@app.get("/recent")
|
|
def recent():
|
|
try:
|
|
with open("/transcripts/_feed.log", "r", encoding="utf-8") as f:
|
|
tail = f.readlines()[-40:]
|
|
except FileNotFoundError:
|
|
tail=[]
|
|
html = []
|
|
for line in reversed(tail):
|
|
try: item = json.loads(line)
|
|
except: continue
|
|
html.append(f"<div class='card'><b>{item.get('title','')}</b><br><small>{item.get('uploader','')} — {item.get('date','')} — {item.get('status','')}</small><br><small>{item.get('path','')}</small></div>")
|
|
return "\n".join(html)
|
|
|
|
@app.get("/search")
|
|
def search():
|
|
qstr = request.args.get("q","")
|
|
hits = meili_search(qstr)
|
|
out=[]
|
|
for h in hits:
|
|
t = h.get("title","")
|
|
src = h.get("source","")
|
|
typ = h.get("type","")
|
|
ctx = h.get("_formatted",{}).get("text", h.get("text","")[:300])
|
|
segs = h.get("segments",[])
|
|
ts = int(segs[0]["start"]) if segs else 0
|
|
if typ == 'podcast':
|
|
open_link = f"/open?file={requests.utils.quote(src)}&t={ts}"
|
|
else:
|
|
open_link = f"/open?file={requests.utils.quote(src)}"
|
|
transcript_link = f" | <a href=\"/subtitle?file={requests.utils.quote(src)}\">Transcript</a>" if typ == 'podcast' else ""
|
|
badge = f"<span class='badge'>{typ}</span>"
|
|
out.append(
|
|
f"<div class='card'><b>{badge}{t}</b><br><small>{src}</small>"
|
|
f"<p>{ctx}</p>"
|
|
f"<a href='{open_link}'>Open</a>"
|
|
f"{transcript_link}"
|
|
f"</div>"
|
|
)
|
|
return "\n".join(out) or "<small>No results yet.</small>"
|
|
|
|
@app.get("/open")
|
|
def open_local():
|
|
file = request.args.get("file","")
|
|
t = int(request.args.get("t","0"))
|
|
return f"<pre>{file}\nStart at: {t} sec</pre>"
|
|
|
|
@app.get("/subtitle")
|
|
def subtitle():
|
|
file = request.args.get("file","")
|
|
base = os.path.splitext(os.path.basename(file))[0]
|
|
p = f"/transcripts/{base}.vtt"
|
|
if os.path.exists(p):
|
|
with open(p,"r",encoding="utf-8") as f:
|
|
return f"<pre>{f.read()}</pre>"
|
|
return "<small>No VTT found.</small>"
|