Files
aoc/backend/main.py
Tomas Kracmar b0198012eb
Some checks failed
CI / lint-and-test (push) Has been cancelled
feat: implement Phase 3 scaling
- Replace skip-based pagination with cursor-based pagination (timestamp|_id cursors)
- Add Prometheus /metrics endpoint with request latency, fetch volume, and error counters
- Implement incremental fetch watermarking per source (watermarks collection in MongoDB)
- Add Graph change notification webhook endpoint (/api/webhooks/graph)
- Add correlation ID middleware for distributed tracing (x-request-id header)
- Update frontend to use cursor-based pagination with Prev/Next navigation
- Update tests for cursor pagination, metrics, webhooks, and watermark mocking
2026-04-14 14:58:50 +02:00

119 lines
3.6 KiB
Python

import asyncio
import logging
import time
from contextlib import suppress
from pathlib import Path
import structlog
from config import CORS_ORIGINS, ENABLE_PERIODIC_FETCH, FETCH_INTERVAL_MINUTES
from database import setup_indexes
from fastapi import FastAPI, HTTPException, Request
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import Response
from fastapi.staticfiles import StaticFiles
from metrics import observe_request, prometheus_metrics
from middleware import CorrelationIdMiddleware
from routes.config import router as config_router
from routes.events import router as events_router
from routes.fetch import router as fetch_router
from routes.fetch import run_fetch
from routes.webhooks import router as webhooks_router
def configure_logging():
structlog.configure(
processors=[
structlog.stdlib.filter_by_level,
structlog.stdlib.add_logger_name,
structlog.stdlib.add_log_level,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.TimeStamper(fmt="iso"),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.processors.UnicodeDecoder(),
structlog.processors.JSONRenderer(),
],
context_class=dict,
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True,
)
logging.basicConfig(format="%(message)s", level=logging.INFO)
configure_logging()
logger = structlog.get_logger("aoc.fetcher")
app = FastAPI()
app.add_middleware(CorrelationIdMiddleware)
app.add_middleware(
CORSMiddleware,
allow_origins=CORS_ORIGINS,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
@app.middleware("http")
async def prometheus_middleware(request: Request, call_next):
start = time.time()
response = await call_next(request)
duration = time.time() - start
path = getattr(request.scope.get("route"), "path", request.url.path)
observe_request(request.method, path, response.status_code, duration)
return response
app.include_router(fetch_router, prefix="/api")
app.include_router(events_router, prefix="/api")
app.include_router(config_router, prefix="/api")
app.include_router(webhooks_router, prefix="/api")
@app.get("/health")
async def health_check():
from database import db
try:
db.command("ping")
return {"status": "ok", "database": "connected"}
except Exception as exc:
logger.error("Health check failed", error=str(exc))
raise HTTPException(status_code=503, detail="Database unavailable") from exc
@app.get("/metrics")
async def metrics():
return Response(content=prometheus_metrics(), media_type="text/plain")
frontend_dir = Path(__file__).parent / "frontend"
app.mount("/", StaticFiles(directory=frontend_dir, html=True), name="frontend")
async def _periodic_fetch():
while True:
try:
await asyncio.to_thread(run_fetch)
logger.info("Periodic fetch completed.")
except Exception as exc:
logger.error("Periodic fetch failed", error=str(exc))
await asyncio.sleep(FETCH_INTERVAL_MINUTES * 60)
@app.on_event("startup")
async def start_periodic_fetch():
setup_indexes()
if ENABLE_PERIODIC_FETCH:
app.state.fetch_task = asyncio.create_task(_periodic_fetch())
@app.on_event("shutdown")
async def stop_periodic_fetch():
task = getattr(app.state, "fetch_task", None)
if task:
task.cancel()
with suppress(Exception):
await task