First version

This commit is contained in:
2025-11-28 21:43:44 +01:00
commit 90f0e14f6e
22 changed files with 1674 additions and 0 deletions

104
backend/routes/events.py Normal file
View File

@@ -0,0 +1,104 @@
from fastapi import APIRouter, HTTPException
from database import events_collection
router = APIRouter()
@router.get("/events")
def list_events(
service: str = None,
actor: str = None,
operation: str = None,
result: str = None,
start: str = None,
end: str = None,
search: str = None,
page: int = 1,
page_size: int = 50,
):
filters = []
if service:
filters.append({"service": service})
if actor:
filters.append(
{
"$or": [
{"actor_display": {"$regex": actor, "$options": "i"}},
{"actor_upn": {"$regex": actor, "$options": "i"}},
{"actor.user.userPrincipalName": {"$regex": actor, "$options": "i"}},
{"actor.user.id": actor},
]
}
)
if operation:
filters.append({"operation": {"$regex": operation, "$options": "i"}})
if result:
filters.append({"result": {"$regex": result, "$options": "i"}})
if start or end:
time_filter = {}
if start:
time_filter["$gte"] = start
if end:
time_filter["$lte"] = end
filters.append({"timestamp": time_filter})
if search:
filters.append(
{
"$or": [
{"raw_text": {"$regex": search, "$options": "i"}},
{"display_summary": {"$regex": search, "$options": "i"}},
{"actor_display": {"$regex": search, "$options": "i"}},
{"target_displays": {"$elemMatch": {"$regex": search, "$options": "i"}}},
{"operation": {"$regex": search, "$options": "i"}},
]
}
)
query = {"$and": filters} if filters else {}
safe_page_size = max(1, min(page_size, 500))
safe_page = max(1, page)
skip = (safe_page - 1) * safe_page_size
try:
total = events_collection.count_documents(query)
cursor = events_collection.find(query).sort("timestamp", -1).skip(skip).limit(safe_page_size)
events = list(cursor)
except Exception as exc:
raise HTTPException(status_code=500, detail=f"Failed to query events: {exc}") from exc
for e in events:
e["_id"] = str(e["_id"])
return {
"items": events,
"total": total,
"page": safe_page,
"page_size": safe_page_size,
}
@router.get("/filter-options")
def filter_options(limit: int = 200):
"""
Provide distinct values for UI filters (best-effort, capped).
"""
safe_limit = max(1, min(limit, 1000))
try:
services = sorted(events_collection.distinct("service"))[:safe_limit]
operations = sorted(events_collection.distinct("operation"))[:safe_limit]
results = sorted([r for r in events_collection.distinct("result") if r])[:safe_limit]
actors = sorted([a for a in events_collection.distinct("actor_display") if a])[:safe_limit]
actor_upns = sorted([a for a in events_collection.distinct("actor_upn") if a])[:safe_limit]
devices = sorted([a for a in events_collection.distinct("target_displays") if isinstance(a, str)])[:safe_limit]
except Exception as exc:
raise HTTPException(status_code=500, detail=f"Failed to load filter options: {exc}") from exc
return {
"services": services,
"operations": operations,
"results": results,
"actors": actors,
"actor_upns": actor_upns,
"devices": devices,
}

40
backend/routes/fetch.py Normal file
View File

@@ -0,0 +1,40 @@
from fastapi import APIRouter, HTTPException
from pymongo import UpdateOne
from database import events_collection
from graph.audit_logs import fetch_audit_logs
from sources.unified_audit import fetch_unified_audit
from sources.intune_audit import fetch_intune_audit
from models.event_model import normalize_event
router = APIRouter()
@router.get("/fetch-audit-logs")
def fetch_logs(hours: int = 168):
window = max(1, min(hours, 720)) # cap to 30 days for sanity
logs = []
errors = []
def fetch_source(fn, label):
try:
return fn(hours=window)
except Exception as exc:
errors.append(f"{label}: {exc}")
return []
logs.extend(fetch_source(fetch_audit_logs, "Directory audit"))
logs.extend(fetch_source(fetch_unified_audit, "Unified audit (Exchange/SharePoint/Teams)"))
logs.extend(fetch_source(fetch_intune_audit, "Intune audit"))
normalized = [normalize_event(e) for e in logs]
if normalized:
ops = []
for doc in normalized:
key = doc.get("dedupe_key")
if key:
ops.append(UpdateOne({"dedupe_key": key}, {"$set": doc}, upsert=True))
else:
ops.append(UpdateOne({"id": doc.get("id"), "timestamp": doc.get("timestamp")}, {"$set": doc}, upsert=True))
events_collection.bulk_write(ops, ordered=False)
return {"stored_events": len(normalized), "errors": errors}