feat(ui): surface LLM stream errors in chat

Add a stream error event type to the shared schema and wire runtime handling to
convert provider payloads into a concise string format. When a stream error is
seen, emit a Run error event, preserve partial output, and stop the turn to
avoid additional tool execution.

In the renderer, display errors inline as assistant messages with destructive
styling and trigger a toast for immediate visibility. Include error events when
loading run history so prior failures are visible.
This commit is contained in:
Ramnique Singh 2026-02-16 08:34:51 +05:30
parent 11245660fb
commit e1d50c62da
4 changed files with 111 additions and 4 deletions

View file

@ -265,6 +265,9 @@ export class StreamStepMessageBuilder {
case "finish-step":
this.providerOptions = event.providerOptions;
break;
case "error":
this.flushBuffers();
break;
}
}
@ -278,6 +281,30 @@ export class StreamStepMessageBuilder {
}
}
function formatLlmStreamError(rawError: unknown): string {
let name: string | undefined;
let responseBody: string | undefined;
if (rawError && typeof rawError === "object") {
const err = rawError as Record<string, unknown>;
const nested = (err.error && typeof err.error === "object") ? err.error as Record<string, unknown> : null;
const nameValue = err.name ?? nested?.name;
const responseBodyValue = err.responseBody ?? nested?.responseBody;
if (nameValue !== undefined) {
name = String(nameValue);
}
if (responseBodyValue !== undefined) {
responseBody = String(responseBodyValue);
}
} else if (typeof rawError === "string") {
responseBody = rawError;
}
const lines: string[] = [];
if (name) lines.push(`name: ${name}`);
if (responseBody) lines.push(`responseBody: ${responseBody}`);
return lines.length ? lines.join("\n") : "Model stream error";
}
export async function loadAgent(id: string): Promise<z.infer<typeof Agent>> {
if (id === "copilot" || id === "rowboatx") {
return CopilotAgent;
@ -792,6 +819,7 @@ export async function* streamAgent({
timeZoneName: 'short'
});
const instructionsWithDateTime = `Current date and time: ${currentDateTime}\n\n${agent.instructions}`;
let streamError: string | null = null;
for await (const event of streamLlm(
model,
state.messages,
@ -810,6 +838,16 @@ export async function* streamAgent({
event: event,
subflow: [],
});
if (event.type === "error") {
streamError = event.error;
yield* processEvent({
runId,
type: "error",
error: streamError,
subflow: [],
});
break;
}
}
// build and emit final message from agent response
@ -822,6 +860,10 @@ export async function* streamAgent({
subflow: [],
});
if (streamError) {
return;
}
// if there were any ask-human calls, emit those events
if (message.content instanceof Array) {
for (const part of message.content) {
@ -895,6 +937,12 @@ async function* streamLlm(
signal?.throwIfAborted();
// console.log("\n\n\t>>>>\t\tstream event", JSON.stringify(event));
switch (event.type) {
case "error":
yield {
type: "error",
error: formatLlmStreamError((event as { error?: unknown }).error ?? event),
};
return;
case "reasoning-start":
yield {
type: "reasoning-start",
@ -945,7 +993,7 @@ async function* streamLlm(
};
break;
default:
// console.warn("Unknown event type", event);
console.log('unknown stream event:', JSON.stringify(event));
continue;
}
}