feat: moved chat persistance to Server Side

This commit is contained in:
DESKTOP-RTLN3BA\$punk 2026-05-04 03:06:15 -07:00
parent 2e1b9b5582
commit 19b6e0a025
19 changed files with 4515 additions and 390 deletions

View file

@ -114,6 +114,29 @@ export function readStreamedChatTurnId(data: unknown): string | null {
return typeof value === "string" && value.length > 0 ? value : null;
}
/**
* Parse the payload of `data-user-message-id` / `data-assistant-message-id`
* SSE events emitted by `stream_new_chat` and `stream_resume_chat` after
* `persist_user_turn` / `persist_assistant_shell` resolve a canonical
* `new_chat_messages.id`. Mirrors {@link readStreamedChatTurnId}.
*
* Returns `null` when the payload is malformed (missing or non-numeric
* `message_id`); callers should treat this as "ignore the event" so a
* malformed BE payload never overwrites the optimistic id with a bogus
* value.
*/
export function readStreamedMessageId(
data: unknown
): { messageId: number; turnId: string | null } | null {
if (typeof data !== "object" || data === null) return null;
const obj = data as { message_id?: unknown; turn_id?: unknown };
if (typeof obj.message_id !== "number" || !Number.isFinite(obj.message_id)) {
return null;
}
const turnId = typeof obj.turn_id === "string" && obj.turn_id.length > 0 ? obj.turn_id : null;
return { messageId: obj.message_id, turnId };
}
export function applyTurnIdToAssistantMessageList(
messages: ThreadMessageLike[],
assistantMsgId: string,

View file

@ -487,6 +487,37 @@ export type SSEEvent =
type: "data-turn-info";
data: { chat_turn_id: string };
}
| {
/**
* Emitted by ``stream_new_chat`` AFTER ``data-turn-info`` /
* ``data-turn-status`` and BEFORE any LLM streaming events,
* once ``persist_user_turn`` has resolved the canonical
* ``new_chat_messages.id`` for the user-side row of the
* current turn. The frontend renames its optimistic
* ``msg-user-XXX`` placeholder id to ``msg-{message_id}``
* so DB-id-gated UI (comments, edit-from-this-message)
* unlocks immediately. Not emitted by ``stream_resume_chat``
* (resume reuses the original turn's user message).
*/
type: "data-user-message-id";
data: { message_id: number; turn_id: string };
}
| {
/**
* Emitted by ``stream_new_chat`` AND ``stream_resume_chat``
* AFTER ``data-turn-info`` / ``data-turn-status`` and BEFORE
* any LLM streaming events, once ``persist_assistant_shell``
* has resolved the canonical ``new_chat_messages.id`` for
* the assistant-side row of the current turn. The frontend
* renames its optimistic ``msg-assistant-XXX`` placeholder
* id, migrates the local ``tokenUsageStore`` and
* ``pendingInterrupt`` references, and binds the running
* mutable ``assistantMsgId`` closure variable to the
* canonical id for the rest of the stream.
*/
type: "data-assistant-message-id";
data: { message_id: number; turn_id: string };
}
| {
/**
* Best-effort revert pass that ran BEFORE this regeneration.

View file

@ -144,6 +144,17 @@ export async function getThreadMessages(threadId: number): Promise<ThreadHistory
* via ``data-turn-info``. Persisting it lets later edits locate the
* matching LangGraph checkpoint without HumanMessage scanning. Older
* callers can still omit it for back-compat.
*
* @deprecated Replaced by the SSE-based message ID handshake. The
* streaming generator (`stream_new_chat` / `stream_resume_chat`) now
* persists both the user and assistant rows server-side via
* `persist_user_turn` / `persist_assistant_shell` and emits
* `data-user-message-id` / `data-assistant-message-id` SSE events so
* the UI renames its optimistic IDs in real time. The only remaining
* caller is `persistAssistantErrorMessage` (pre-stream error fallback
* for requests the server never accepted the server has nothing to
* persist in that case). After the legacy route is removed in a
* follow-up PR this function will be deleted entirely.
*/
export async function appendMessage(
threadId: number,