Reorganize streaming orchestration modules into relay and orchestration folders.

This commit is contained in:
CREDO23 2026-05-07 16:00:15 +02:00
parent f8754a9dab
commit 52593d88db
10 changed files with 170 additions and 12 deletions

View file

@ -1,5 +1,11 @@
"""Composable orchestration pieces for chat streaming."""
from app.tasks.chat.streaming.orchestration.event_stream import stream_agent_events
from app.tasks.chat.streaming.orchestration.input import StreamExecutionInput
from app.tasks.chat.streaming.orchestration.output import StreamOutput
__all__ = ["stream_agent_events"]
__all__ = [
"StreamExecutionInput",
"StreamOutput",
"stream_agent_events",
]

View file

@ -6,9 +6,9 @@ from collections.abc import AsyncIterator
from typing import Any
from app.agents.new_chat.feature_flags import get_flags
from app.tasks.chat.streaming.event_relay import EventRelay
from app.tasks.chat.streaming.orchestration.output import StreamOutput
from app.tasks.chat.streaming.relay.event_relay import EventRelay
from app.tasks.chat.streaming.relay.state import AgentEventRelayState
from app.tasks.chat.streaming.stream_result import StreamResult
async def stream_agent_events(
@ -17,7 +17,7 @@ async def stream_agent_events(
config: dict[str, Any],
input_data: Any,
streaming_service: Any,
result: StreamResult,
result: StreamOutput,
step_prefix: str = "thinking",
initial_step_id: str | None = None,
initial_step_title: str = "",

View file

@ -0,0 +1,23 @@
"""Inputs for orchestrator-owned streaming execution."""
from __future__ import annotations
from dataclasses import dataclass
from typing import Any
@dataclass(frozen=True)
class StreamExecutionInput:
"""Container for dependencies required by ``stream_agent_events``."""
agent: Any
config: dict[str, Any]
input_data: Any
streaming_service: Any
step_prefix: str = "thinking"
initial_step_id: str | None = None
initial_step_title: str = ""
initial_step_items: list[str] | None = None
content_builder: Any | None = None
runtime_context: Any = None

View file

@ -0,0 +1,160 @@
"""Top-level chat streaming entrypoints.
For now these orchestrator functions are thin compatibility wrappers around the
current ``stream_new_chat`` / ``stream_resume_chat`` implementations. Routing
calls through this module lets us cut over to the fully modular event relay in
one place later without touching API routes again.
"""
from __future__ import annotations
from collections.abc import AsyncGenerator
from typing import Any, Literal
from app.agents.new_chat.filesystem_selection import FilesystemSelection
from app.db import ChatVisibility
from app.tasks.chat.stream_new_chat import stream_new_chat, stream_resume_chat
from app.tasks.chat.streaming.orchestration.event_stream import stream_agent_events
from app.tasks.chat.streaming.orchestration.input import StreamExecutionInput
from app.tasks.chat.streaming.orchestration.output import StreamOutput
async def stream_chat(
*,
user_query: str,
search_space_id: int,
chat_id: int,
user_id: str | None = None,
llm_config_id: int = -1,
mentioned_document_ids: list[int] | None = None,
mentioned_surfsense_doc_ids: list[int] | None = None,
mentioned_documents: list[dict[str, Any]] | None = None,
checkpoint_id: str | None = None,
needs_history_bootstrap: bool = False,
thread_visibility: ChatVisibility | None = None,
current_user_display_name: str | None = None,
disabled_tools: list[str] | None = None,
filesystem_selection: FilesystemSelection | None = None,
request_id: str | None = None,
user_image_data_urls: list[str] | None = None,
orchestration_input: StreamExecutionInput | None = None,
) -> AsyncGenerator[str, None]:
"""Stream a new chat turn through the current production pipeline."""
if orchestration_input is not None:
result = StreamOutput(
request_id=request_id,
turn_id=f"{chat_id}:orchestrator",
filesystem_mode=(
filesystem_selection.mode.value if filesystem_selection else "cloud"
),
client_platform=(
filesystem_selection.client_platform.value
if filesystem_selection
else "web"
),
)
async for frame in stream_agent_events(
agent=orchestration_input.agent,
config=orchestration_input.config,
input_data=orchestration_input.input_data,
streaming_service=orchestration_input.streaming_service,
result=result,
step_prefix=orchestration_input.step_prefix,
initial_step_id=orchestration_input.initial_step_id,
initial_step_title=orchestration_input.initial_step_title,
initial_step_items=orchestration_input.initial_step_items,
content_builder=orchestration_input.content_builder,
runtime_context=orchestration_input.runtime_context,
):
yield frame
return
async for chunk in stream_new_chat(
user_query=user_query,
search_space_id=search_space_id,
chat_id=chat_id,
user_id=user_id,
llm_config_id=llm_config_id,
mentioned_document_ids=mentioned_document_ids,
mentioned_surfsense_doc_ids=mentioned_surfsense_doc_ids,
mentioned_documents=mentioned_documents,
checkpoint_id=checkpoint_id,
needs_history_bootstrap=needs_history_bootstrap,
thread_visibility=thread_visibility,
current_user_display_name=current_user_display_name,
disabled_tools=disabled_tools,
filesystem_selection=filesystem_selection,
request_id=request_id,
user_image_data_urls=user_image_data_urls,
):
yield chunk
async def stream_resume(
*,
chat_id: int,
search_space_id: int,
decisions: list[dict],
user_id: str | None = None,
llm_config_id: int = -1,
thread_visibility: ChatVisibility | None = None,
filesystem_selection: FilesystemSelection | None = None,
request_id: str | None = None,
disabled_tools: list[str] | None = None,
) -> AsyncGenerator[str, None]:
"""Resume an interrupted chat turn through the current production pipeline."""
async for chunk in stream_resume_chat(
chat_id=chat_id,
search_space_id=search_space_id,
decisions=decisions,
user_id=user_id,
llm_config_id=llm_config_id,
thread_visibility=thread_visibility,
filesystem_selection=filesystem_selection,
request_id=request_id,
disabled_tools=disabled_tools,
):
yield chunk
async def stream_regenerate(
*,
user_query: str,
search_space_id: int,
chat_id: int,
user_id: str | None = None,
llm_config_id: int = -1,
mentioned_document_ids: list[int] | None = None,
mentioned_surfsense_doc_ids: list[int] | None = None,
mentioned_documents: list[dict[str, Any]] | None = None,
checkpoint_id: str | None = None,
needs_history_bootstrap: bool = False,
thread_visibility: ChatVisibility | None = None,
current_user_display_name: str | None = None,
disabled_tools: list[str] | None = None,
filesystem_selection: FilesystemSelection | None = None,
request_id: str | None = None,
user_image_data_urls: list[str] | None = None,
flow: Literal["new", "regenerate"] = "regenerate",
) -> AsyncGenerator[str, None]:
"""Regenerate an assistant turn through the current production pipeline."""
async for chunk in stream_new_chat(
user_query=user_query,
search_space_id=search_space_id,
chat_id=chat_id,
user_id=user_id,
llm_config_id=llm_config_id,
mentioned_document_ids=mentioned_document_ids,
mentioned_surfsense_doc_ids=mentioned_surfsense_doc_ids,
mentioned_documents=mentioned_documents,
checkpoint_id=checkpoint_id,
needs_history_bootstrap=needs_history_bootstrap,
thread_visibility=thread_visibility,
current_user_display_name=current_user_display_name,
disabled_tools=disabled_tools,
filesystem_selection=filesystem_selection,
request_id=request_id,
user_image_data_urls=user_image_data_urls,
flow=flow,
):
yield chunk

View file

@ -0,0 +1,32 @@
"""Output facts collected while streaming one orchestrated agent turn."""
from __future__ import annotations
from dataclasses import dataclass, field
from typing import Any
@dataclass
class StreamOutput:
accumulated_text: str = ""
is_interrupted: bool = False
interrupt_value: dict[str, Any] | None = None
sandbox_files: list[str] = field(default_factory=list)
agent_called_update_memory: bool = False
request_id: str | None = None
turn_id: str = ""
filesystem_mode: str = "cloud"
client_platform: str = "web"
intent_detected: str = "chat_only"
intent_confidence: float = 0.0
write_attempted: bool = False
write_succeeded: bool = False
verification_succeeded: bool = False
commit_gate_passed: bool = True
commit_gate_reason: str = ""
assistant_message_id: int | None = None
content_builder: Any | None = field(default=None, repr=False)
# Backwards-compatible alias while imports migrate.
StreamResult = StreamOutput