feat(ui): surface LLM stream errors in chat

Add a stream error event type to the shared schema and wire runtime handling to
convert provider payloads into a concise string format. When a stream error is
seen, emit a Run error event, preserve partial output, and stop the turn to
avoid additional tool execution.

In the renderer, display errors inline as assistant messages with destructive
styling and trigger a toast for immediate visibility. Include error events when
loading run history so prior failures are visible.
This commit is contained in:
Ramnique Singh 2026-02-16 08:34:51 +05:30
parent 11245660fb
commit e1d50c62da
4 changed files with 111 additions and 4 deletions

View file

@ -55,6 +55,7 @@ import { FileCardProvider } from '@/contexts/file-card-context'
import { MarkdownPreOverride } from '@/components/ai-elements/markdown-code-override'
import { AgentScheduleConfig } from '@x/shared/dist/agent-schedule.js'
import { AgentScheduleState } from '@x/shared/dist/agent-schedule-state.js'
import { toast } from "sonner"
type DirEntry = z.infer<typeof workspace.DirEntry>
type RunEventType = z.infer<typeof RunEvent>
@ -81,12 +82,20 @@ interface ToolCall {
timestamp: number;
}
type ConversationItem = ChatMessage | ToolCall;
interface ErrorMessage {
id: string;
kind: 'error';
message: string;
timestamp: number;
}
type ConversationItem = ChatMessage | ToolCall | ErrorMessage;
type ToolState = 'input-streaming' | 'input-available' | 'output-available' | 'output-error';
const isChatMessage = (item: ConversationItem): item is ChatMessage => 'role' in item
const isToolCall = (item: ConversationItem): item is ToolCall => 'name' in item
const isErrorMessage = (item: ConversationItem): item is ErrorMessage => 'kind' in item && item.kind === 'error'
const toToolState = (status: ToolCall['status']): ToolState => {
switch (status) {
@ -1102,6 +1111,15 @@ function App() {
}
break
}
case 'error': {
items.push({
id: `error-${Date.now()}-${Math.random()}`,
kind: 'error',
message: event.error,
timestamp: event.ts ? new Date(event.ts).getTime() : Date.now(),
})
break
}
case 'llm-stream-event': {
// We don't need to reconstruct streaming events for history
// Reasoning is captured in the final message
@ -1439,6 +1457,13 @@ function App() {
setIsProcessing(false)
setIsStopping(false)
setStopClickedAt(null)
setConversation(prev => [...prev, {
id: `error-${Date.now()}`,
kind: 'error',
message: event.error,
timestamp: Date.now(),
}])
toast.error(event.error.split('\n')[0] || 'Model error')
console.error('Run error:', event.error)
break
}
@ -2226,6 +2251,16 @@ function App() {
)
}
if (isErrorMessage(item)) {
return (
<Message key={item.id} from="assistant">
<MessageContent className="rounded-lg border border-destructive/30 bg-destructive/10 px-4 py-3 text-destructive">
<pre className="whitespace-pre-wrap font-mono text-xs">{item.message}</pre>
</MessageContent>
</Message>
)
}
return null
}

View file

@ -52,12 +52,20 @@ interface ToolCall {
timestamp: number
}
type ConversationItem = ChatMessage | ToolCall
interface ErrorMessage {
id: string
kind: 'error'
message: string
timestamp: number
}
type ConversationItem = ChatMessage | ToolCall | ErrorMessage
type ToolState = 'input-streaming' | 'input-available' | 'output-available' | 'output-error'
const isChatMessage = (item: ConversationItem): item is ChatMessage => 'role' in item
const isToolCall = (item: ConversationItem): item is ToolCall => 'name' in item
const isErrorMessage = (item: ConversationItem): item is ErrorMessage => 'kind' in item && item.kind === 'error'
const toToolState = (status: ToolCall['status']): ToolState => {
switch (status) {
@ -417,6 +425,16 @@ export function ChatSidebar({
)
}
if (isErrorMessage(item)) {
return (
<Message key={item.id} from="assistant">
<MessageContent className="rounded-lg border border-destructive/30 bg-destructive/10 px-4 py-3 text-destructive">
<pre className="whitespace-pre-wrap font-mono text-xs">{item.message}</pre>
</MessageContent>
</Message>
)
}
return null
}