feat: added attachment support

This commit is contained in:
DESKTOP-RTLN3BA\$punk 2025-12-21 22:26:33 -08:00
parent bb971460fc
commit c2dcb2045d
62 changed files with 1166 additions and 9012 deletions

View file

@ -13,7 +13,6 @@ from app.agents.podcaster.state import State as PodcasterState
from app.celery_app import celery_app
from app.config import config
from app.db import Podcast
from app.tasks.podcast_tasks import generate_chat_podcast
logger = logging.getLogger(__name__)
@ -40,58 +39,6 @@ def get_celery_session_maker():
return async_sessionmaker(engine, expire_on_commit=False)
@celery_app.task(name="generate_chat_podcast", bind=True)
def generate_chat_podcast_task(
self,
chat_id: int,
search_space_id: int,
user_id: int,
podcast_title: str | None = None,
user_prompt: str | None = None,
):
"""
Celery task to generate podcast from chat.
Args:
chat_id: ID of the chat to generate podcast from
search_space_id: ID of the search space
user_id: ID of the user,
podcast_title: Title for the podcast
user_prompt: Optional prompt from the user to guide the podcast generation
"""
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
loop.run_until_complete(
_generate_chat_podcast(
chat_id, search_space_id, user_id, podcast_title, user_prompt
)
)
loop.run_until_complete(loop.shutdown_asyncgens())
finally:
asyncio.set_event_loop(None)
loop.close()
async def _generate_chat_podcast(
chat_id: int,
search_space_id: int,
user_id: int,
podcast_title: str | None = None,
user_prompt: str | None = None,
):
"""Generate chat podcast with new session."""
async with get_celery_session_maker()() as session:
try:
await generate_chat_podcast(
session, chat_id, search_space_id, user_id, podcast_title, user_prompt
)
except Exception as e:
logger.error(f"Error generating podcast from chat: {e!s}")
raise
# =============================================================================
# Content-based podcast generation (for new-chat)
# =============================================================================

View file

@ -18,11 +18,28 @@ from app.agents.new_chat.llm_config import (
create_chat_litellm_from_config,
load_llm_config_from_yaml,
)
from app.schemas.new_chat import ChatMessage
from app.schemas.new_chat import ChatAttachment, ChatMessage
from app.services.connector_service import ConnectorService
from app.services.new_streaming_service import VercelStreamingService
def format_attachments_as_context(attachments: list[ChatAttachment]) -> str:
"""Format attachments as context for the agent."""
if not attachments:
return ""
context_parts = ["<user_attachments>"]
for i, attachment in enumerate(attachments, 1):
context_parts.append(
f"<attachment index='{i}' name='{attachment.name}' type='{attachment.type}'>"
)
context_parts.append(f"<![CDATA[{attachment.content}]]>")
context_parts.append("</attachment>")
context_parts.append("</user_attachments>")
return "\n".join(context_parts)
async def stream_new_chat(
user_query: str,
user_id: str | UUID,
@ -31,6 +48,7 @@ async def stream_new_chat(
session: AsyncSession,
llm_config_id: int = -1,
messages: list[ChatMessage] | None = None,
attachments: list[ChatAttachment] | None = None,
) -> AsyncGenerator[str, None]:
"""
Stream chat responses from the new SurfSense deep agent.
@ -96,6 +114,14 @@ async def stream_new_chat(
# Build input with message history from frontend
langchain_messages = []
# Format the user query with attachment context if any
final_query = user_query
if attachments:
attachment_context = format_attachments_as_context(attachments)
final_query = (
f"{attachment_context}\n\n<user_query>{user_query}</user_query>"
)
# if messages:
# # Convert frontend messages to LangChain format
# for msg in messages:
@ -104,8 +130,8 @@ async def stream_new_chat(
# elif msg.role == "assistant":
# langchain_messages.append(AIMessage(content=msg.content))
# else:
# Fallback: just use the current user query
langchain_messages.append(HumanMessage(content=user_query))
# Fallback: just use the current user query with attachment context
langchain_messages.append(HumanMessage(content=final_query))
input_state = {
# Lets not pass this message atm because we are using the checkpointer to manage the conversation history

View file

@ -1,11 +1,15 @@
from sqlalchemy import select
from sqlalchemy.exc import SQLAlchemyError
"""
Legacy podcast task for old chat system.
NOTE: The old Chat model has been removed. This module is kept for backwards
compatibility but the generate_chat_podcast function will raise an error
if called. Use generate_content_podcast_task in celery_tasks/podcast_tasks.py
for new-chat podcast generation instead.
"""
from sqlalchemy.ext.asyncio import AsyncSession
from app.agents.podcaster.graph import graph as podcaster_graph
from app.agents.podcaster.state import State
from app.db import Chat, Podcast
from app.services.task_logging_service import TaskLoggingService
from app.db import Podcast # noqa: F401 - imported for backwards compatibility
async def generate_chat_podcast(
@ -16,196 +20,13 @@ async def generate_chat_podcast(
podcast_title: str | None = None,
user_prompt: str | None = None,
):
task_logger = TaskLoggingService(session, search_space_id)
"""
Legacy function for generating podcasts from old chat system.
# Log task start
log_entry = await task_logger.log_task_start(
task_name="generate_chat_podcast",
source="podcast_task",
message=f"Starting podcast generation for chat {chat_id}",
metadata={
"chat_id": chat_id,
"search_space_id": search_space_id,
"podcast_title": podcast_title,
"user_id": str(user_id),
"user_prompt": user_prompt,
},
This function is deprecated as the old Chat model has been removed.
Use generate_content_podcast_task for new-chat podcast generation.
"""
raise NotImplementedError(
"generate_chat_podcast is deprecated. The old Chat model has been removed. "
"Use generate_content_podcast_task for podcast generation from new-chat."
)
try:
# Fetch the chat with the specified ID
await task_logger.log_task_progress(
log_entry, f"Fetching chat {chat_id} from database", {"stage": "fetch_chat"}
)
query = select(Chat).filter(
Chat.id == chat_id, Chat.search_space_id == search_space_id
)
result = await session.execute(query)
chat = result.scalars().first()
if not chat:
await task_logger.log_task_failure(
log_entry,
f"Chat with id {chat_id} not found in search space {search_space_id}",
"Chat not found",
{"error_type": "ChatNotFound"},
)
raise ValueError(
f"Chat with id {chat_id} not found in search space {search_space_id}"
)
# Create chat history structure
await task_logger.log_task_progress(
log_entry,
f"Processing chat history for chat {chat_id}",
{"stage": "process_chat_history", "message_count": len(chat.messages)},
)
chat_history_str = "<chat_history>"
processed_messages = 0
for message in chat.messages:
if message["role"] == "user":
chat_history_str += f"<user_message>{message['content']}</user_message>"
processed_messages += 1
elif message["role"] == "assistant":
chat_history_str += (
f"<assistant_message>{message['content']}</assistant_message>"
)
processed_messages += 1
chat_history_str += "</chat_history>"
# Pass it to the SurfSense Podcaster
await task_logger.log_task_progress(
log_entry,
f"Initializing podcast generation for chat {chat_id}",
{
"stage": "initialize_podcast_generation",
"processed_messages": processed_messages,
"content_length": len(chat_history_str),
},
)
config = {
"configurable": {
"podcast_title": podcast_title or "SurfSense Podcast",
"user_id": str(user_id),
"search_space_id": search_space_id,
"user_prompt": user_prompt,
}
}
# Initialize state with database session and streaming service
initial_state = State(source_content=chat_history_str, db_session=session)
# Run the graph directly
await task_logger.log_task_progress(
log_entry,
f"Running podcast generation graph for chat {chat_id}",
{"stage": "run_podcast_graph"},
)
result = await podcaster_graph.ainvoke(initial_state, config=config)
# Convert podcast transcript entries to serializable format
await task_logger.log_task_progress(
log_entry,
f"Processing podcast transcript for chat {chat_id}",
{
"stage": "process_transcript",
"transcript_entries": len(result["podcast_transcript"]),
},
)
serializable_transcript = []
for entry in result["podcast_transcript"]:
serializable_transcript.append(
{"speaker_id": entry.speaker_id, "dialog": entry.dialog}
)
# Create a new podcast entry
await task_logger.log_task_progress(
log_entry,
f"Creating podcast database entry for chat {chat_id}",
{
"stage": "create_podcast_entry",
"file_location": result.get("final_podcast_file_path"),
},
)
# check if podcast already exists for this chat (re-generation)
existing_podcast = await session.execute(
select(Podcast).filter(Podcast.chat_id == chat_id)
)
existing_podcast = existing_podcast.scalars().first()
if existing_podcast:
existing_podcast.podcast_transcript = serializable_transcript
existing_podcast.file_location = result["final_podcast_file_path"]
existing_podcast.chat_state_version = chat.state_version
await session.commit()
await session.refresh(existing_podcast)
return existing_podcast
else:
podcast = Podcast(
title=f"{podcast_title}",
podcast_transcript=serializable_transcript,
file_location=result["final_podcast_file_path"],
search_space_id=search_space_id,
chat_state_version=chat.state_version,
chat_id=chat.id,
)
# Add to session and commit
session.add(podcast)
await session.commit()
await session.refresh(podcast)
# Log success
await task_logger.log_task_success(
log_entry,
f"Successfully generated podcast for chat {chat_id}",
{
"podcast_id": podcast.id,
"podcast_title": podcast_title,
"transcript_entries": len(serializable_transcript),
"file_location": result.get("final_podcast_file_path"),
"processed_messages": processed_messages,
"content_length": len(chat_history_str),
},
)
return podcast
except ValueError as ve:
# ValueError is already logged above for chat not found
if "not found" not in str(ve):
await task_logger.log_task_failure(
log_entry,
f"Value error during podcast generation for chat {chat_id}",
str(ve),
{"error_type": "ValueError"},
)
raise ve
except SQLAlchemyError as db_error:
await session.rollback()
await task_logger.log_task_failure(
log_entry,
f"Database error during podcast generation for chat {chat_id}",
str(db_error),
{"error_type": "SQLAlchemyError"},
)
raise db_error
except Exception as e:
await session.rollback()
await task_logger.log_task_failure(
log_entry,
f"Unexpected error during podcast generation for chat {chat_id}",
str(e),
{"error_type": type(e).__name__},
)
raise RuntimeError(
f"Failed to generate podcast for chat {chat_id}: {e!s}"
) from e