feat: copy raw event and AI explain in modal
All checks were successful
CI / lint-and-test (push) Successful in 32s
All checks were successful
CI / lint-and-test (push) Successful in 32s
- Add POST /api/events/{id}/explain endpoint that fetches event + related events
and asks the LLM for a plain-language explanation with security context
- Add 'Copy' button to raw event modal (uses navigator.clipboard)
- Add 'Explain' button to raw event modal (only when AI_FEATURES_ENABLED)
- Show explanation in modal with markdown rendering
- Add CSS for modal actions and explanation panel
- Add tests for explain endpoint (404, no LLM key, mocked LLM success)
This commit is contained in:
@@ -214,8 +214,16 @@
|
||||
<div class="modal__content">
|
||||
<div class="modal__header">
|
||||
<h3 id="modalTitle">Raw Event</h3>
|
||||
<div class="modal__actions">
|
||||
<button type="button" class="ghost" @click="copyRawEvent()">Copy</button>
|
||||
<button type="button" class="ghost" x-show="aiFeaturesEnabled" :disabled="modalExplainLoading" @click="explainEvent()" x-text="modalExplainLoading ? 'Explaining…' : 'Explain'">Explain</button>
|
||||
<button type="button" id="closeModal" class="ghost" @click="modalOpen = false">Close</button>
|
||||
</div>
|
||||
</div>
|
||||
<div x-show="modalExplanation || modalExplainError" class="modal__explanation">
|
||||
<div x-show="modalExplainError" class="ask-error" x-text="modalExplainError"></div>
|
||||
<div x-show="modalExplanation" class="ask-answer" x-html="_mdToHtml(modalExplanation)"></div>
|
||||
</div>
|
||||
<pre id="modalBody" x-text="modalBody"></pre>
|
||||
</div>
|
||||
</div>
|
||||
@@ -233,6 +241,10 @@
|
||||
currentCursor: null,
|
||||
modalOpen: false,
|
||||
modalBody: '',
|
||||
modalEventId: '',
|
||||
modalExplanation: '',
|
||||
modalExplainLoading: false,
|
||||
modalExplainError: '',
|
||||
authBtnText: 'Login',
|
||||
authConfig: null,
|
||||
msalInstance: null,
|
||||
@@ -672,9 +684,44 @@
|
||||
} catch (err) {
|
||||
this.modalBody = `Error serializing event:\n${err.message}\n\nEvent ID: ${e.id || 'N/A'}`;
|
||||
}
|
||||
this.modalEventId = e.id || '';
|
||||
this.modalExplanation = '';
|
||||
this.modalExplainError = '';
|
||||
this.modalOpen = true;
|
||||
},
|
||||
|
||||
async copyRawEvent() {
|
||||
if (!this.modalBody) return;
|
||||
try {
|
||||
await navigator.clipboard.writeText(this.modalBody);
|
||||
this.statusText = 'Raw event copied to clipboard.';
|
||||
setTimeout(() => { if (this.statusText === 'Raw event copied to clipboard.') this.statusText = ''; }, 2000);
|
||||
} catch (err) {
|
||||
this.statusText = 'Failed to copy to clipboard.';
|
||||
}
|
||||
},
|
||||
|
||||
async explainEvent() {
|
||||
if (!this.modalEventId) return;
|
||||
this.modalExplainLoading = true;
|
||||
this.modalExplanation = '';
|
||||
this.modalExplainError = '';
|
||||
try {
|
||||
const res = await fetch(`/api/events/${this.modalEventId}/explain`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json', ...this.authHeader() },
|
||||
});
|
||||
if (!res.ok) throw new Error(await res.text());
|
||||
const body = await res.json();
|
||||
this.modalExplanation = body.explanation;
|
||||
this.modalExplainError = body.llm_error || '';
|
||||
} catch (err) {
|
||||
this.modalExplainError = err.message || 'Failed to explain event.';
|
||||
} finally {
|
||||
this.modalExplainLoading = false;
|
||||
}
|
||||
},
|
||||
|
||||
async addTag(e, tag) {
|
||||
if (!tag.trim()) return;
|
||||
const tags = [...(e.tags || []), tag.trim()];
|
||||
|
||||
@@ -364,6 +364,22 @@ input {
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
.modal__actions {
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.modal__explanation {
|
||||
background: rgba(255, 255, 255, 0.03);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 10px;
|
||||
padding: 12px;
|
||||
margin-bottom: 10px;
|
||||
font-size: 14px;
|
||||
line-height: 1.6;
|
||||
}
|
||||
|
||||
.modal pre {
|
||||
background: rgba(255, 255, 255, 0.02);
|
||||
color: var(--text);
|
||||
|
||||
@@ -456,6 +456,131 @@ def _to_event_ref(e: dict) -> dict:
|
||||
}
|
||||
|
||||
|
||||
_EXPLAIN_SYSTEM_PROMPT = """You are a Microsoft 365 security and compliance expert.
|
||||
An administrator needs help understanding an audit event.
|
||||
|
||||
Your task:
|
||||
1. Explain what happened in plain language (1-2 sentences).
|
||||
2. Identify who performed the action and what was the target.
|
||||
3. Assess whether this is typical admin activity or something to investigate.
|
||||
4. Highlight any security implications (privilege escalation, unusual actor, after-hours activity, etc.).
|
||||
5. Suggest what the admin should do next, if anything.
|
||||
|
||||
Keep the answer under 200 words. Use bullet points for readability.
|
||||
Do not invent facts that are not in the data.
|
||||
"""
|
||||
|
||||
|
||||
async def _explain_event(event: dict, related: list[dict]) -> str:
|
||||
if not LLM_API_KEY:
|
||||
raise RuntimeError("LLM_API_KEY not configured")
|
||||
|
||||
event_text = json.dumps(event, indent=2, default=str)
|
||||
|
||||
related_text = ""
|
||||
if related:
|
||||
related_text = "\n\nRelated events in the last 24 hours:\n"
|
||||
for i, e in enumerate(related[:10], 1):
|
||||
ts = e.get("timestamp", "?")[:16].replace("T", " ")
|
||||
op = e.get("operation", "unknown")
|
||||
actor = e.get("actor_display", "unknown")
|
||||
targets = ", ".join(e.get("target_displays") or []) or "—"
|
||||
result = e.get("result", "—")
|
||||
related_text += f"{i}. {ts} — {op} by {actor} on {targets} ({result})\n"
|
||||
|
||||
messages = [
|
||||
{"role": "system", "content": _EXPLAIN_SYSTEM_PROMPT},
|
||||
{
|
||||
"role": "user",
|
||||
"content": f"Audit event:\n{event_text}{related_text}\n\nPlease explain this event.",
|
||||
},
|
||||
]
|
||||
|
||||
url = _build_chat_url(LLM_BASE_URL, LLM_API_VERSION)
|
||||
headers = {"Content-Type": "application/json"}
|
||||
if "azure" in LLM_BASE_URL.lower() or "cognitiveservices" in LLM_BASE_URL.lower():
|
||||
headers["api-key"] = LLM_API_KEY
|
||||
else:
|
||||
headers["Authorization"] = f"Bearer {LLM_API_KEY}"
|
||||
|
||||
payload = {
|
||||
"model": LLM_MODEL,
|
||||
"messages": messages,
|
||||
"max_completion_tokens": 600,
|
||||
}
|
||||
|
||||
async with httpx.AsyncClient(timeout=LLM_TIMEOUT_SECONDS) as client:
|
||||
resp = await client.post(url, headers=headers, json=payload)
|
||||
if resp.status_code >= 400:
|
||||
body = resp.text
|
||||
logger.error("LLM API error", status_code=resp.status_code, url=url, response_body=body)
|
||||
raise RuntimeError(f"LLM API error {resp.status_code}: {body[:500]}")
|
||||
data = resp.json()
|
||||
return data["choices"][0]["message"]["content"].strip()
|
||||
|
||||
|
||||
@router.post("/events/{event_id}/explain")
|
||||
async def explain_event(event_id: str, user: dict = Depends(require_auth)):
|
||||
event = events_collection.find_one({"id": event_id})
|
||||
if not event:
|
||||
raise HTTPException(status_code=404, detail="Event not found")
|
||||
|
||||
event.pop("_id", None)
|
||||
|
||||
# Fetch related events for context (same actor or target in last 24h)
|
||||
related = []
|
||||
since = (datetime.now(UTC) - timedelta(hours=24)).isoformat().replace("+00:00", "Z")
|
||||
actor = event.get("actor_upn") or event.get("actor_display")
|
||||
target = event.get("target_displays", [None])[0] if event.get("target_displays") else None
|
||||
|
||||
or_filters = [{"timestamp": {"$gte": since}}, {"id": {"$ne": event_id}}]
|
||||
if actor:
|
||||
or_filters.append(
|
||||
{
|
||||
"$or": [
|
||||
{"actor_upn": actor},
|
||||
{"actor_display": actor},
|
||||
]
|
||||
}
|
||||
)
|
||||
if target:
|
||||
or_filters.append({"target_displays": target})
|
||||
|
||||
if len(or_filters) > 2:
|
||||
try:
|
||||
rel_cursor = events_collection.find({"$and": or_filters}).sort("timestamp", -1).limit(10)
|
||||
related = list(rel_cursor)
|
||||
for r in related:
|
||||
r.pop("_id", None)
|
||||
r.pop("raw", None)
|
||||
except Exception as exc:
|
||||
logger.warning("Failed to fetch related events", error=str(exc))
|
||||
|
||||
if not LLM_API_KEY:
|
||||
return {
|
||||
"explanation": "LLM is not configured. Set LLM_API_KEY in your environment to enable event explanations.",
|
||||
"llm_used": False,
|
||||
"llm_error": "LLM_API_KEY not configured",
|
||||
}
|
||||
|
||||
try:
|
||||
explanation = await _explain_event(event, related)
|
||||
return {
|
||||
"explanation": explanation,
|
||||
"llm_used": True,
|
||||
"llm_error": None,
|
||||
"related_count": len(related),
|
||||
}
|
||||
except Exception as exc:
|
||||
logger.warning("Event explanation failed", error=str(exc))
|
||||
return {
|
||||
"explanation": "Unable to generate an explanation at this time. Please check the raw event details.",
|
||||
"llm_used": False,
|
||||
"llm_error": str(exc),
|
||||
"related_count": len(related),
|
||||
}
|
||||
|
||||
|
||||
@router.post("/ask", response_model=AskResponse)
|
||||
async def ask_question(body: AskRequest, user: dict = Depends(require_auth)):
|
||||
question = body.question.strip()
|
||||
|
||||
@@ -55,6 +55,58 @@ def test_mcp_sse_auth_required_when_enabled(client, monkeypatch):
|
||||
assert response.status_code == 401
|
||||
|
||||
|
||||
def test_explain_event_not_found(client):
|
||||
response = client.post("/api/events/nonexistent/explain")
|
||||
assert response.status_code == 404
|
||||
|
||||
|
||||
def test_explain_event_no_llm_key(client, mock_events_collection, monkeypatch):
|
||||
monkeypatch.setattr("routes.ask.LLM_API_KEY", "")
|
||||
mock_events_collection.insert_one(
|
||||
{
|
||||
"id": "evt-explain",
|
||||
"timestamp": datetime.now(UTC).isoformat(),
|
||||
"service": "Directory",
|
||||
"operation": "Add user",
|
||||
"result": "success",
|
||||
"actor_display": "Alice",
|
||||
"raw_text": "",
|
||||
}
|
||||
)
|
||||
response = client.post("/api/events/evt-explain/explain")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert "explanation" in data
|
||||
assert data["llm_used"] is False
|
||||
assert "LLM_API_KEY" in (data.get("llm_error") or "")
|
||||
|
||||
|
||||
def test_explain_event_with_llm_mock(client, mock_events_collection, monkeypatch):
|
||||
monkeypatch.setattr("routes.ask.LLM_API_KEY", "test-key")
|
||||
|
||||
async def fake_explain(event, related):
|
||||
return "This is a test explanation."
|
||||
|
||||
monkeypatch.setattr("routes.ask._explain_event", fake_explain)
|
||||
|
||||
mock_events_collection.insert_one(
|
||||
{
|
||||
"id": "evt-explain2",
|
||||
"timestamp": datetime.now(UTC).isoformat(),
|
||||
"service": "Directory",
|
||||
"operation": "Add user",
|
||||
"result": "success",
|
||||
"actor_display": "Alice",
|
||||
"raw_text": "",
|
||||
}
|
||||
)
|
||||
response = client.post("/api/events/evt-explain2/explain")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["explanation"] == "This is a test explanation."
|
||||
assert data["llm_used"] is True
|
||||
|
||||
|
||||
def test_health(client):
|
||||
response = client.get("/health")
|
||||
assert response.status_code == 200
|
||||
|
||||
Reference in New Issue
Block a user