feat(ui): surface LLM stream errors in chat

Add a stream error event type to the shared schema and wire runtime handling to
convert provider payloads into a concise string format. When a stream error is
seen, emit a Run error event, preserve partial output, and stop the turn to
avoid additional tool execution.

In the renderer, display errors inline as assistant messages with destructive
styling and trigger a toast for immediate visibility. Include error events when
loading run history so prior failures are visible.
This commit is contained in:
Ramnique Singh 2026-02-16 08:34:51 +05:30
parent 11245660fb
commit e1d50c62da
4 changed files with 111 additions and 4 deletions

View file

@ -55,6 +55,7 @@ import { FileCardProvider } from '@/contexts/file-card-context'
import { MarkdownPreOverride } from '@/components/ai-elements/markdown-code-override'
import { AgentScheduleConfig } from '@x/shared/dist/agent-schedule.js'
import { AgentScheduleState } from '@x/shared/dist/agent-schedule-state.js'
import { toast } from "sonner"
type DirEntry = z.infer<typeof workspace.DirEntry>
type RunEventType = z.infer<typeof RunEvent>
@ -81,12 +82,20 @@ interface ToolCall {
timestamp: number;
}
type ConversationItem = ChatMessage | ToolCall;
interface ErrorMessage {
id: string;
kind: 'error';
message: string;
timestamp: number;
}
type ConversationItem = ChatMessage | ToolCall | ErrorMessage;
type ToolState = 'input-streaming' | 'input-available' | 'output-available' | 'output-error';
const isChatMessage = (item: ConversationItem): item is ChatMessage => 'role' in item
const isToolCall = (item: ConversationItem): item is ToolCall => 'name' in item
const isErrorMessage = (item: ConversationItem): item is ErrorMessage => 'kind' in item && item.kind === 'error'
const toToolState = (status: ToolCall['status']): ToolState => {
switch (status) {
@ -1102,6 +1111,15 @@ function App() {
}
break
}
case 'error': {
items.push({
id: `error-${Date.now()}-${Math.random()}`,
kind: 'error',
message: event.error,
timestamp: event.ts ? new Date(event.ts).getTime() : Date.now(),
})
break
}
case 'llm-stream-event': {
// We don't need to reconstruct streaming events for history
// Reasoning is captured in the final message
@ -1439,6 +1457,13 @@ function App() {
setIsProcessing(false)
setIsStopping(false)
setStopClickedAt(null)
setConversation(prev => [...prev, {
id: `error-${Date.now()}`,
kind: 'error',
message: event.error,
timestamp: Date.now(),
}])
toast.error(event.error.split('\n')[0] || 'Model error')
console.error('Run error:', event.error)
break
}
@ -2226,6 +2251,16 @@ function App() {
)
}
if (isErrorMessage(item)) {
return (
<Message key={item.id} from="assistant">
<MessageContent className="rounded-lg border border-destructive/30 bg-destructive/10 px-4 py-3 text-destructive">
<pre className="whitespace-pre-wrap font-mono text-xs">{item.message}</pre>
</MessageContent>
</Message>
)
}
return null
}

View file

@ -52,12 +52,20 @@ interface ToolCall {
timestamp: number
}
type ConversationItem = ChatMessage | ToolCall
interface ErrorMessage {
id: string
kind: 'error'
message: string
timestamp: number
}
type ConversationItem = ChatMessage | ToolCall | ErrorMessage
type ToolState = 'input-streaming' | 'input-available' | 'output-available' | 'output-error'
const isChatMessage = (item: ConversationItem): item is ChatMessage => 'role' in item
const isToolCall = (item: ConversationItem): item is ToolCall => 'name' in item
const isErrorMessage = (item: ConversationItem): item is ErrorMessage => 'kind' in item && item.kind === 'error'
const toToolState = (status: ToolCall['status']): ToolState => {
switch (status) {
@ -417,6 +425,16 @@ export function ChatSidebar({
)
}
if (isErrorMessage(item)) {
return (
<Message key={item.id} from="assistant">
<MessageContent className="rounded-lg border border-destructive/30 bg-destructive/10 px-4 py-3 text-destructive">
<pre className="whitespace-pre-wrap font-mono text-xs">{item.message}</pre>
</MessageContent>
</Message>
)
}
return null
}

View file

@ -265,6 +265,9 @@ export class StreamStepMessageBuilder {
case "finish-step":
this.providerOptions = event.providerOptions;
break;
case "error":
this.flushBuffers();
break;
}
}
@ -278,6 +281,30 @@ export class StreamStepMessageBuilder {
}
}
function formatLlmStreamError(rawError: unknown): string {
let name: string | undefined;
let responseBody: string | undefined;
if (rawError && typeof rawError === "object") {
const err = rawError as Record<string, unknown>;
const nested = (err.error && typeof err.error === "object") ? err.error as Record<string, unknown> : null;
const nameValue = err.name ?? nested?.name;
const responseBodyValue = err.responseBody ?? nested?.responseBody;
if (nameValue !== undefined) {
name = String(nameValue);
}
if (responseBodyValue !== undefined) {
responseBody = String(responseBodyValue);
}
} else if (typeof rawError === "string") {
responseBody = rawError;
}
const lines: string[] = [];
if (name) lines.push(`name: ${name}`);
if (responseBody) lines.push(`responseBody: ${responseBody}`);
return lines.length ? lines.join("\n") : "Model stream error";
}
export async function loadAgent(id: string): Promise<z.infer<typeof Agent>> {
if (id === "copilot" || id === "rowboatx") {
return CopilotAgent;
@ -792,6 +819,7 @@ export async function* streamAgent({
timeZoneName: 'short'
});
const instructionsWithDateTime = `Current date and time: ${currentDateTime}\n\n${agent.instructions}`;
let streamError: string | null = null;
for await (const event of streamLlm(
model,
state.messages,
@ -810,6 +838,16 @@ export async function* streamAgent({
event: event,
subflow: [],
});
if (event.type === "error") {
streamError = event.error;
yield* processEvent({
runId,
type: "error",
error: streamError,
subflow: [],
});
break;
}
}
// build and emit final message from agent response
@ -822,6 +860,10 @@ export async function* streamAgent({
subflow: [],
});
if (streamError) {
return;
}
// if there were any ask-human calls, emit those events
if (message.content instanceof Array) {
for (const part of message.content) {
@ -895,6 +937,12 @@ async function* streamLlm(
signal?.throwIfAborted();
// console.log("\n\n\t>>>>\t\tstream event", JSON.stringify(event));
switch (event.type) {
case "error":
yield {
type: "error",
error: formatLlmStreamError((event as { error?: unknown }).error ?? event),
};
return;
case "reasoning-start":
yield {
type: "reasoning-start",
@ -945,7 +993,7 @@ async function* streamLlm(
};
break;
default:
// console.warn("Unknown event type", event);
console.log('unknown stream event:', JSON.stringify(event));
continue;
}
}

View file

@ -51,6 +51,11 @@ export const LlmStepStreamFinishStepEvent = z.object({
providerOptions: ProviderOptions.optional(),
});
export const LlmStepStreamErrorEvent = BaseEvent.extend({
type: z.literal("error"),
error: z.string(),
});
export const LlmStepStreamEvent = z.union([
LlmStepStreamReasoningStartEvent,
LlmStepStreamReasoningDeltaEvent,
@ -60,4 +65,5 @@ export const LlmStepStreamEvent = z.union([
LlmStepStreamTextEndEvent,
LlmStepStreamToolCallEvent,
LlmStepStreamFinishStepEvent,
]);
LlmStepStreamErrorEvent,
]);