feat: moved LLMConfigs from User to SearchSpaces

- RBAC soon??
- Updated various services and routes to handle search space-specific LLM preferences.
- Modified frontend components to pass search space ID for LLM configuration management.
- Removed onboarding page and settings page as part of the refactor.
This commit is contained in:
DESKTOP-RTLN3BA\$punk 2025-10-10 00:50:29 -07:00
parent a1b1db3895
commit 633ea3ac0f
44 changed files with 1075 additions and 518 deletions

View file

@ -260,7 +260,9 @@ async def index_airtable_records(
continue
# Generate document summary
user_llm = await get_user_long_context_llm(session, user_id)
user_llm = await get_user_long_context_llm(
session, user_id, search_space_id
)
if user_llm:
document_metadata = {

View file

@ -222,7 +222,9 @@ async def index_clickup_tasks(
continue
# Generate summary with metadata
user_llm = await get_user_long_context_llm(session, user_id)
user_llm = await get_user_long_context_llm(
session, user_id, search_space_id
)
if user_llm:
document_metadata = {

View file

@ -233,7 +233,9 @@ async def index_confluence_pages(
continue
# Generate summary with metadata
user_llm = await get_user_long_context_llm(session, user_id)
user_llm = await get_user_long_context_llm(
session, user_id, search_space_id
)
comment_count = len(comments)
if user_llm:

View file

@ -325,7 +325,9 @@ async def index_discord_messages(
continue
# Get user's long context LLM
user_llm = await get_user_long_context_llm(session, user_id)
user_llm = await get_user_long_context_llm(
session, user_id, search_space_id
)
if not user_llm:
logger.error(
f"No long context LLM configured for user {user_id}"

View file

@ -213,7 +213,9 @@ async def index_github_repos(
continue
# Generate summary with metadata
user_llm = await get_user_long_context_llm(session, user_id)
user_llm = await get_user_long_context_llm(
session, user_id, search_space_id
)
if user_llm:
# Extract file extension from file path
file_extension = (

View file

@ -266,7 +266,9 @@ async def index_google_calendar_events(
continue
# Generate summary with metadata
user_llm = await get_user_long_context_llm(session, user_id)
user_llm = await get_user_long_context_llm(
session, user_id, search_space_id
)
if user_llm:
document_metadata = {

View file

@ -210,7 +210,9 @@ async def index_google_gmail_messages(
continue
# Generate summary with metadata
user_llm = await get_user_long_context_llm(session, user_id)
user_llm = await get_user_long_context_llm(
session, user_id, search_space_id
)
if user_llm:
document_metadata = {

View file

@ -216,7 +216,9 @@ async def index_jira_issues(
continue
# Generate summary with metadata
user_llm = await get_user_long_context_llm(session, user_id)
user_llm = await get_user_long_context_llm(
session, user_id, search_space_id
)
comment_count = len(formatted_issue.get("comments", []))
if user_llm:

View file

@ -228,7 +228,9 @@ async def index_linear_issues(
continue
# Generate summary with metadata
user_llm = await get_user_long_context_llm(session, user_id)
user_llm = await get_user_long_context_llm(
session, user_id, search_space_id
)
state = formatted_issue.get("state", "Unknown")
description = formatted_issue.get("description", "")
comment_count = len(formatted_issue.get("comments", []))

View file

@ -270,7 +270,9 @@ async def index_luma_events(
continue
# Generate summary with metadata
user_llm = await get_user_long_context_llm(session, user_id)
user_llm = await get_user_long_context_llm(
session, user_id, search_space_id
)
if user_llm:
document_metadata = {

View file

@ -299,7 +299,9 @@ async def index_notion_pages(
continue
# Get user's long context LLM
user_llm = await get_user_long_context_llm(session, user_id)
user_llm = await get_user_long_context_llm(
session, user_id, search_space_id
)
if not user_llm:
logger.error(f"No long context LLM configured for user {user_id}")
skipped_pages.append(f"{page_title} (no LLM configured)")

View file

@ -104,9 +104,11 @@ async def add_extension_received_document(
return existing_document
# Get user's long context LLM
user_llm = await get_user_long_context_llm(session, user_id)
user_llm = await get_user_long_context_llm(session, user_id, search_space_id)
if not user_llm:
raise RuntimeError(f"No long context LLM configured for user {user_id}")
raise RuntimeError(
f"No long context LLM configured for user {user_id} in search space {search_space_id}"
)
# Generate summary with metadata
document_metadata = {

View file

@ -60,9 +60,11 @@ async def add_received_file_document_using_unstructured(
# TODO: Check if file_markdown exceeds token limit of embedding model
# Get user's long context LLM
user_llm = await get_user_long_context_llm(session, user_id)
user_llm = await get_user_long_context_llm(session, user_id, search_space_id)
if not user_llm:
raise RuntimeError(f"No long context LLM configured for user {user_id}")
raise RuntimeError(
f"No long context LLM configured for user {user_id} in search space {search_space_id}"
)
# Generate summary with metadata
document_metadata = {
@ -140,9 +142,11 @@ async def add_received_file_document_using_llamacloud(
return existing_document
# Get user's long context LLM
user_llm = await get_user_long_context_llm(session, user_id)
user_llm = await get_user_long_context_llm(session, user_id, search_space_id)
if not user_llm:
raise RuntimeError(f"No long context LLM configured for user {user_id}")
raise RuntimeError(
f"No long context LLM configured for user {user_id} in search space {search_space_id}"
)
# Generate summary with metadata
document_metadata = {
@ -221,9 +225,11 @@ async def add_received_file_document_using_docling(
return existing_document
# Get user's long context LLM
user_llm = await get_user_long_context_llm(session, user_id)
user_llm = await get_user_long_context_llm(session, user_id, search_space_id)
if not user_llm:
raise RuntimeError(f"No long context LLM configured for user {user_id}")
raise RuntimeError(
f"No long context LLM configured for user {user_id} in search space {search_space_id}"
)
# Generate summary using chunked processing for large documents
from app.services.docling_service import create_docling_service

View file

@ -75,9 +75,11 @@ async def add_received_markdown_file_document(
return existing_document
# Get user's long context LLM
user_llm = await get_user_long_context_llm(session, user_id)
user_llm = await get_user_long_context_llm(session, user_id, search_space_id)
if not user_llm:
raise RuntimeError(f"No long context LLM configured for user {user_id}")
raise RuntimeError(
f"No long context LLM configured for user {user_id} in search space {search_space_id}"
)
# Generate summary with metadata
document_metadata = {

View file

@ -161,9 +161,11 @@ async def add_crawled_url_document(
)
# Get user's long context LLM
user_llm = await get_user_long_context_llm(session, user_id)
user_llm = await get_user_long_context_llm(session, user_id, search_space_id)
if not user_llm:
raise RuntimeError(f"No long context LLM configured for user {user_id}")
raise RuntimeError(
f"No long context LLM configured for user {user_id} in search space {search_space_id}"
)
# Generate summary
await task_logger.log_task_progress(

View file

@ -234,9 +234,11 @@ async def add_youtube_video_document(
)
# Get user's long context LLM
user_llm = await get_user_long_context_llm(session, user_id)
user_llm = await get_user_long_context_llm(session, user_id, search_space_id)
if not user_llm:
raise RuntimeError(f"No long context LLM configured for user {user_id}")
raise RuntimeError(
f"No long context LLM configured for user {user_id} in search space {search_space_id}"
)
# Generate summary
await task_logger.log_task_progress(

View file

@ -98,6 +98,7 @@ async def generate_chat_podcast(
"configurable": {
"podcast_title": "SurfSense",
"user_id": str(user_id),
"search_space_id": search_space_id,
}
}
# Initialize state with database session and streaming service