mirror of
https://github.com/MODSetter/SurfSense.git
synced 2026-05-08 23:32:40 +02:00
- Added streaming service support to the Q&A agent for real-time token streaming. - Updated `answer_question` method to stream responses token-by-token to the frontend. - Modified `handle_qna_workflow` to handle both custom and values streaming modes. - Enhanced state management to include streaming service for improved user experience.
32 lines
952 B
Python
32 lines
952 B
Python
"""Define the state structures for the agent."""
|
|
|
|
from __future__ import annotations
|
|
|
|
from dataclasses import dataclass, field
|
|
from typing import Any
|
|
|
|
from sqlalchemy.ext.asyncio import AsyncSession
|
|
|
|
from app.services.streaming_service import StreamingService
|
|
|
|
|
|
@dataclass
|
|
class State:
|
|
"""Defines the dynamic state for the Q&A agent during execution.
|
|
|
|
This state tracks the database session, chat history, and the outputs
|
|
generated by the agent's nodes during question answering.
|
|
See: https://langchain-ai.github.io/langgraph/concepts/low_level/#state
|
|
for more information.
|
|
"""
|
|
|
|
# Runtime context
|
|
db_session: AsyncSession
|
|
|
|
# Streaming service for real-time token streaming
|
|
streaming_service: StreamingService | None = None
|
|
|
|
chat_history: list[Any] | None = field(default_factory=list)
|
|
# OUTPUT: Populated by agent nodes
|
|
reranked_documents: list[Any] | None = None
|
|
final_answer: str | None = None
|