diff --git a/surfsense_backend/app/schemas/new_chat.py b/surfsense_backend/app/schemas/new_chat.py index aa95e49e6..a7200903e 100644 --- a/surfsense_backend/app/schemas/new_chat.py +++ b/surfsense_backend/app/schemas/new_chat.py @@ -7,7 +7,7 @@ These schemas follow the assistant-ui ThreadHistoryAdapter pattern: """ from datetime import datetime -from typing import Any +from typing import Any, Literal from uuid import UUID from pydantic import BaseModel, ConfigDict, Field @@ -193,6 +193,16 @@ class RegenerateRequest(BaseModel): mentioned_surfsense_doc_ids: list[int] | None = None +class ResumeDecision(BaseModel): + type: Literal["approve", "edit", "reject"] + edited_action: dict[str, Any] | None = None + + +class ResumeRequest(BaseModel): + search_space_id: int + decisions: list[ResumeDecision] + + # ============================================================================= # Public Chat Snapshot Schemas # ============================================================================= diff --git a/surfsense_backend/app/services/new_streaming_service.py b/surfsense_backend/app/services/new_streaming_service.py index 57fbc9663..aa14dec7d 100644 --- a/surfsense_backend/app/services/new_streaming_service.py +++ b/surfsense_backend/app/services/new_streaming_service.py @@ -504,6 +504,18 @@ class VercelStreamingService: }, ) + def format_interrupt_request(self, interrupt_value: dict[str, Any]) -> str: + """Format an interrupt request for human-in-the-loop approval. + + Args: + interrupt_value: The interrupt payload from HumanInTheLoopMiddleware + containing action_requests and review_configs. + + Returns: + str: SSE formatted interrupt request data part + """ + return self.format_data("interrupt-request", interrupt_value) + # ========================================================================= # Error Part # ========================================================================= diff --git a/surfsense_backend/app/tasks/chat/stream_new_chat.py b/surfsense_backend/app/tasks/chat/stream_new_chat.py index 31e67c7ff..801d140be 100644 --- a/surfsense_backend/app/tasks/chat/stream_new_chat.py +++ b/surfsense_backend/app/tasks/chat/stream_new_chat.py @@ -1175,6 +1175,17 @@ async def stream_new_chat( if completion_event: yield completion_event + # Check if the graph was interrupted (human-in-the-loop) + state = await agent.aget_state(config) + is_interrupted = state.tasks and any(task.interrupts for task in state.tasks) + if is_interrupted: + interrupt_value = state.tasks[0].interrupts[0].value + yield streaming_service.format_interrupt_request(interrupt_value) + yield streaming_service.format_finish_step() + yield streaming_service.format_finish() + yield streaming_service.format_done() + return + # Generate LLM title for new chats after first response # Check if this is the first assistant response by counting existing assistant messages from sqlalchemy import func