@@ -245,6 +246,7 @@
askAnswerHtml: '',
askEvents: [],
askLlmUsed: false,
+ askLlmError: '',
async initApp() {
await this.initAuth();
@@ -501,6 +503,7 @@
this.askAnswerHtml = this._mdToHtml(body.answer);
this.askEvents = body.events || [];
this.askLlmUsed = body.llm_used;
+ this.askLlmError = body.llm_error || '';
} catch (err) {
this.askAnswer = 'Sorry, something went wrong: ' + (err.message || 'Unknown error');
this.askAnswerHtml = this.askAnswer;
@@ -515,6 +518,7 @@
this.askAnswerHtml = '';
this.askEvents = [];
this.askLlmUsed = false;
+ this.askLlmError = '';
},
_mdToHtml(text) {
diff --git a/backend/frontend/style.css b/backend/frontend/style.css
index 4e78fdd..1b18cd0 100644
--- a/backend/frontend/style.css
+++ b/backend/frontend/style.css
@@ -418,6 +418,16 @@ input {
font-size: 13px;
}
+.ask-error {
+ background: rgba(249, 115, 22, 0.1);
+ border: 1px solid rgba(249, 115, 22, 0.3);
+ border-radius: 8px;
+ padding: 10px 14px;
+ color: #fdba74;
+ font-size: 14px;
+ margin-bottom: 10px;
+}
+
.ask-events {
margin-bottom: 14px;
}
diff --git a/backend/models/api.py b/backend/models/api.py
index b22e679..e7ff83a 100644
--- a/backend/models/api.py
+++ b/backend/models/api.py
@@ -92,3 +92,4 @@ class AskResponse(BaseModel):
events: list[AskEventRef]
query_info: dict
llm_used: bool
+ llm_error: str | None = None
diff --git a/backend/routes/ask.py b/backend/routes/ask.py
index 59e78ff..a459e03 100644
--- a/backend/routes/ask.py
+++ b/backend/routes/ask.py
@@ -272,16 +272,21 @@ async def ask_question(body: AskRequest, user: dict = Depends(require_auth)):
events=[],
query_info={"entity": entity, "start": start, "end": end, "event_count": 0},
llm_used=False,
+ llm_error="LLM not used — no events found." if not LLM_API_KEY else None,
)
# Try LLM summarisation
answer = ""
llm_used = False
- if LLM_API_KEY:
+ llm_error = None
+ if not LLM_API_KEY:
+ llm_error = "LLM_API_KEY is not configured. Set it in your .env to enable AI narrative summarisation."
+ else:
try:
answer = await _call_llm(question, events)
llm_used = True
except Exception as exc:
+ llm_error = f"LLM call failed: {exc}"
logger.warning("LLM call failed, falling back to structured summary", error=str(exc))
# Fallback: structured summary if LLM unavailable or failed
@@ -315,4 +320,5 @@ async def ask_question(body: AskRequest, user: dict = Depends(require_auth)):
"mongo_query": json.dumps(query, default=str),
},
llm_used=llm_used,
+ llm_error=llm_error,
)