Merge remote-tracking branch 'upstream/main' into feat/bookstack-connector

This commit is contained in:
Differ 2025-12-06 09:15:02 +08:00
commit e238fab638
110 changed files with 10076 additions and 1671 deletions

View file

@ -38,19 +38,24 @@ https://github.com/user-attachments/assets/a0a16566-6967-4374-ac51-9b3e07fbecd7
## Key Features
### 💡 **Idea**:
Have your own highly customizable private NotebookLM and Perplexity integrated with external sources.
- Have your own highly customizable private NotebookLM and Perplexity integrated with external sources.
### 📁 **Multiple File Format Uploading Support**
Save content from your own personal files *(Documents, images, videos and supports **50+ file extensions**)* to your own personal knowledge base .
- Save content from your own personal files *(Documents, images, videos and supports **50+ file extensions**)* to your own personal knowledge base .
### 🔍 **Powerful Search**
Quickly research or find anything in your saved content .
- Quickly research or find anything in your saved content .
### 💬 **Chat with your Saved Content**
Interact in Natural Language and get cited answers.
- Interact in Natural Language and get cited answers.
### 📄 **Cited Answers**
Get Cited answers just like Perplexity.
- Get Cited answers just like Perplexity.
### 🔔 **Privacy & Local LLM Support**
Works Flawlessly with Ollama local LLMs.
- Works Flawlessly with Ollama local LLMs.
### 🏠 **Self Hostable**
Open source and easy to deploy locally.
- Open source and easy to deploy locally.
### 👥 **Team Collaboration with RBAC**
- Role-Based Access Control for Search Spaces
- Invite team members with customizable roles (Owner, Admin, Editor, Viewer)
- Granular permissions for documents, chats, connectors, and settings
- Share knowledge bases securely within your organization
### 🎙️ Podcasts
- Blazingly fast podcast generation agent. (Creates a 3-minute podcast in under 20 seconds.)
- Convert your chat conversations into engaging audio content

View file

@ -39,25 +39,31 @@ https://github.com/user-attachments/assets/a0a16566-6967-4374-ac51-9b3e07fbecd7
## 核心功能
### 💡 **理念**:
拥有您自己的高度可定制的私有 NotebookLM 和 Perplexity并与外部数据源集成。
- 拥有您自己的高度可定制的私有 NotebookLM 和 Perplexity并与外部数据源集成。
### 📁 **支持多种文件格式上传**
将您个人文件中的内容(文档、图像、视频,支持 **50+ 种文件扩展名**)保存到您自己的个人知识库。
- 将您个人文件中的内容(文档、图像、视频,支持 **50+ 种文件扩展名**)保存到您自己的个人知识库。
### 🔍 **强大的搜索功能**
快速研究或查找已保存内容中的任何信息。
- 快速研究或查找已保存内容中的任何信息。
### 💬 **与已保存内容对话**
使用自然语言交互并获得引用答案。
- 使用自然语言交互并获得引用答案。
### 📄 **引用答案**
像 Perplexity 一样获得带引用的答案。
- 像 Perplexity 一样获得带引用的答案。
### 🔔 **隐私保护与本地 LLM 支持**
完美支持 Ollama 本地大语言模型。
- 完美支持 Ollama 本地大语言模型。
### 🏠 **可自托管**
开源且易于本地部署。
- 开源且易于本地部署。
### 👥 **团队协作与 RBAC**
- 搜索空间的基于角色的访问控制
- 使用可自定义的角色(所有者、管理员、编辑者、查看者)邀请团队成员
- 对文档、聊天、连接器和设置的细粒度权限控制
- 在组织内安全共享知识库
### 🎙️ **播客功能**
- 超快速播客生成代理(在 20 秒内创建 3 分钟播客)

View file

@ -0,0 +1,179 @@
"""Add RBAC tables for search space access control
Revision ID: 39
Revises: 38
Create Date: 2025-11-27 00:00:00.000000
This migration adds:
- Permission enum for granular access control
- search_space_roles table for custom roles per search space
- search_space_memberships table for user-searchspace-role relationships
- search_space_invites table for invite links
"""
from collections.abc import Sequence
from sqlalchemy import inspect
from alembic import op
revision: str = "39"
down_revision: str | None = "38"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
"""Upgrade schema - add RBAC tables for search space access control."""
# Create search_space_roles table
op.execute(
"""
CREATE TABLE IF NOT EXISTS search_space_roles (
id SERIAL PRIMARY KEY,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
name VARCHAR(100) NOT NULL,
description VARCHAR(500),
permissions TEXT[] NOT NULL DEFAULT '{}',
is_default BOOLEAN NOT NULL DEFAULT FALSE,
is_system_role BOOLEAN NOT NULL DEFAULT FALSE,
search_space_id INTEGER NOT NULL REFERENCES searchspaces(id) ON DELETE CASCADE,
CONSTRAINT uq_searchspace_role_name UNIQUE (search_space_id, name)
);
"""
)
# Create search_space_invites table (needs to be created before memberships due to FK)
op.execute(
"""
CREATE TABLE IF NOT EXISTS search_space_invites (
id SERIAL PRIMARY KEY,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
invite_code VARCHAR(64) NOT NULL UNIQUE,
search_space_id INTEGER NOT NULL REFERENCES searchspaces(id) ON DELETE CASCADE,
role_id INTEGER REFERENCES search_space_roles(id) ON DELETE SET NULL,
created_by_id UUID REFERENCES "user"(id) ON DELETE SET NULL,
expires_at TIMESTAMPTZ,
max_uses INTEGER,
uses_count INTEGER NOT NULL DEFAULT 0,
is_active BOOLEAN NOT NULL DEFAULT TRUE,
name VARCHAR(100)
);
"""
)
# Create search_space_memberships table
op.execute(
"""
CREATE TABLE IF NOT EXISTS search_space_memberships (
id SERIAL PRIMARY KEY,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
user_id UUID NOT NULL REFERENCES "user"(id) ON DELETE CASCADE,
search_space_id INTEGER NOT NULL REFERENCES searchspaces(id) ON DELETE CASCADE,
role_id INTEGER REFERENCES search_space_roles(id) ON DELETE SET NULL,
is_owner BOOLEAN NOT NULL DEFAULT FALSE,
joined_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
invited_by_invite_id INTEGER REFERENCES search_space_invites(id) ON DELETE SET NULL,
CONSTRAINT uq_user_searchspace_membership UNIQUE (user_id, search_space_id)
);
"""
)
# Get connection and inspector for checking existing indexes
conn = op.get_bind()
inspector = inspect(conn)
# Create indexes for search_space_roles
existing_indexes = [
idx["name"] for idx in inspector.get_indexes("search_space_roles")
]
if "ix_search_space_roles_id" not in existing_indexes:
op.create_index("ix_search_space_roles_id", "search_space_roles", ["id"])
if "ix_search_space_roles_created_at" not in existing_indexes:
op.create_index(
"ix_search_space_roles_created_at", "search_space_roles", ["created_at"]
)
if "ix_search_space_roles_name" not in existing_indexes:
op.create_index("ix_search_space_roles_name", "search_space_roles", ["name"])
# Create indexes for search_space_memberships
existing_indexes = [
idx["name"] for idx in inspector.get_indexes("search_space_memberships")
]
if "ix_search_space_memberships_id" not in existing_indexes:
op.create_index(
"ix_search_space_memberships_id", "search_space_memberships", ["id"]
)
if "ix_search_space_memberships_created_at" not in existing_indexes:
op.create_index(
"ix_search_space_memberships_created_at",
"search_space_memberships",
["created_at"],
)
if "ix_search_space_memberships_user_id" not in existing_indexes:
op.create_index(
"ix_search_space_memberships_user_id",
"search_space_memberships",
["user_id"],
)
if "ix_search_space_memberships_search_space_id" not in existing_indexes:
op.create_index(
"ix_search_space_memberships_search_space_id",
"search_space_memberships",
["search_space_id"],
)
# Create indexes for search_space_invites
existing_indexes = [
idx["name"] for idx in inspector.get_indexes("search_space_invites")
]
if "ix_search_space_invites_id" not in existing_indexes:
op.create_index("ix_search_space_invites_id", "search_space_invites", ["id"])
if "ix_search_space_invites_created_at" not in existing_indexes:
op.create_index(
"ix_search_space_invites_created_at", "search_space_invites", ["created_at"]
)
if "ix_search_space_invites_invite_code" not in existing_indexes:
op.create_index(
"ix_search_space_invites_invite_code",
"search_space_invites",
["invite_code"],
)
def downgrade() -> None:
"""Downgrade schema - remove RBAC tables."""
# Drop indexes for search_space_memberships
op.drop_index(
"ix_search_space_memberships_search_space_id",
table_name="search_space_memberships",
)
op.drop_index(
"ix_search_space_memberships_user_id", table_name="search_space_memberships"
)
op.drop_index(
"ix_search_space_memberships_created_at", table_name="search_space_memberships"
)
op.drop_index(
"ix_search_space_memberships_id", table_name="search_space_memberships"
)
# Drop indexes for search_space_invites
op.drop_index(
"ix_search_space_invites_invite_code", table_name="search_space_invites"
)
op.drop_index(
"ix_search_space_invites_created_at", table_name="search_space_invites"
)
op.drop_index("ix_search_space_invites_id", table_name="search_space_invites")
# Drop indexes for search_space_roles
op.drop_index("ix_search_space_roles_name", table_name="search_space_roles")
op.drop_index("ix_search_space_roles_created_at", table_name="search_space_roles")
op.drop_index("ix_search_space_roles_id", table_name="search_space_roles")
# Drop tables in correct order (respecting foreign key constraints)
op.drop_table("search_space_memberships")
op.drop_table("search_space_invites")
op.drop_table("search_space_roles")

View file

@ -0,0 +1,63 @@
"""Move LLM preferences from user-level to search space level
Revision ID: 40
Revises: 39
Create Date: 2024-11-27
This migration moves LLM preferences (long_context_llm_id, fast_llm_id, strategic_llm_id)
from the user_search_space_preferences table to the searchspaces table itself.
This change supports the RBAC model where LLM preferences are shared by all members
of a search space, rather than being per-user.
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "40"
down_revision = "39"
branch_labels = None
depends_on = None
def upgrade():
# Add LLM preference columns to searchspaces table
op.add_column(
"searchspaces",
sa.Column("long_context_llm_id", sa.Integer(), nullable=True),
)
op.add_column(
"searchspaces",
sa.Column("fast_llm_id", sa.Integer(), nullable=True),
)
op.add_column(
"searchspaces",
sa.Column("strategic_llm_id", sa.Integer(), nullable=True),
)
# Migrate existing preferences from user_search_space_preferences to searchspaces
# We take the owner's preferences (the user who created the search space)
connection = op.get_bind()
# Get all search spaces and their owner's preferences
connection.execute(
sa.text("""
UPDATE searchspaces ss
SET
long_context_llm_id = usp.long_context_llm_id,
fast_llm_id = usp.fast_llm_id,
strategic_llm_id = usp.strategic_llm_id
FROM user_search_space_preferences usp
WHERE ss.id = usp.search_space_id
AND ss.user_id = usp.user_id
""")
)
def downgrade():
# Remove LLM preference columns from searchspaces table
op.drop_column("searchspaces", "strategic_llm_id")
op.drop_column("searchspaces", "fast_llm_id")
op.drop_column("searchspaces", "long_context_llm_id")

View file

@ -0,0 +1,212 @@
"""Backfill RBAC data for existing search spaces
Revision ID: 41
Revises: 40
Create Date: 2025-11-28
This migration creates default roles and owner memberships for all existing
search spaces that were created before the RBAC system was implemented.
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "41"
down_revision = "40"
branch_labels = None
depends_on = None
# Default role permissions (must match DEFAULT_ROLE_PERMISSIONS in db.py)
DEFAULT_ROLES = [
{
"name": "Owner",
"description": "Full access to all resources",
"permissions": ["*"],
"is_system_role": True,
"is_default": False,
},
{
"name": "Admin",
"description": "Can manage members, roles, and all content",
"permissions": [
"documents:create",
"documents:read",
"documents:update",
"documents:delete",
"chats:create",
"chats:read",
"chats:update",
"chats:delete",
"llm_configs:create",
"llm_configs:read",
"llm_configs:update",
"llm_configs:delete",
"logs:read",
"logs:delete",
"podcasts:create",
"podcasts:read",
"podcasts:update",
"podcasts:delete",
"connectors:create",
"connectors:read",
"connectors:update",
"connectors:delete",
"members:read",
"members:update",
"members:delete",
"roles:create",
"roles:read",
"roles:update",
"roles:delete",
"invites:create",
"invites:read",
"invites:delete",
"settings:read",
"settings:update",
],
"is_system_role": True,
"is_default": False,
},
{
"name": "Editor",
"description": "Can create and edit content",
"permissions": [
"documents:create",
"documents:read",
"documents:update",
"chats:create",
"chats:read",
"chats:update",
"llm_configs:read",
"logs:read",
"podcasts:create",
"podcasts:read",
"podcasts:update",
"connectors:create",
"connectors:read",
"connectors:update",
"members:read",
"roles:read",
],
"is_system_role": True,
"is_default": True,
},
{
"name": "Viewer",
"description": "Read-only access to content",
"permissions": [
"documents:read",
"chats:read",
"llm_configs:read",
"logs:read",
"podcasts:read",
"connectors:read",
"members:read",
"roles:read",
],
"is_system_role": True,
"is_default": False,
},
]
def upgrade():
connection = op.get_bind()
# Get all existing search spaces that don't have roles yet
search_spaces = connection.execute(
sa.text("""
SELECT ss.id, ss.user_id
FROM searchspaces ss
WHERE NOT EXISTS (
SELECT 1 FROM search_space_roles ssr
WHERE ssr.search_space_id = ss.id
)
""")
).fetchall()
for ss_id, owner_user_id in search_spaces:
owner_role_id = None
# Create default roles for each search space
for role in DEFAULT_ROLES:
# Convert permissions list to PostgreSQL array literal format for raw SQL
perms_literal = (
"ARRAY[" + ",".join(f"'{p}'" for p in role["permissions"]) + "]::TEXT[]"
)
result = connection.execute(
sa.text(f"""
INSERT INTO search_space_roles
(name, description, permissions, is_default, is_system_role, search_space_id)
VALUES (:name, :description, {perms_literal}, :is_default, :is_system_role, :search_space_id)
RETURNING id
"""),
{
"name": role["name"],
"description": role["description"],
"is_default": role["is_default"],
"is_system_role": role["is_system_role"],
"search_space_id": ss_id,
},
)
role_id = result.fetchone()[0]
# Keep track of Owner role ID
if role["name"] == "Owner":
owner_role_id = role_id
# Create owner membership for the search space creator
if owner_user_id and owner_role_id:
# Check if membership already exists
existing = connection.execute(
sa.text("""
SELECT 1 FROM search_space_memberships
WHERE user_id = :user_id AND search_space_id = :search_space_id
"""),
{"user_id": owner_user_id, "search_space_id": ss_id},
).fetchone()
if not existing:
connection.execute(
sa.text("""
INSERT INTO search_space_memberships
(user_id, search_space_id, role_id, is_owner)
VALUES (:user_id, :search_space_id, :role_id, TRUE)
"""),
{
"user_id": owner_user_id,
"search_space_id": ss_id,
"role_id": owner_role_id,
},
)
def downgrade():
# This migration only adds data, not schema changes
# Downgrade would remove all roles and memberships created by this migration
# However, this is destructive and may affect manually created data
# So we only remove system roles and owner memberships that were auto-created
connection = op.get_bind()
# Remove memberships where user is owner and role is system Owner role
connection.execute(
sa.text("""
DELETE FROM search_space_memberships ssm
USING search_space_roles ssr
WHERE ssm.role_id = ssr.id
AND ssm.is_owner = TRUE
AND ssr.is_system_role = TRUE
AND ssr.name = 'Owner'
""")
)
# Remove system roles
connection.execute(
sa.text("""
DELETE FROM search_space_roles
WHERE is_system_role = TRUE
""")
)

View file

@ -0,0 +1,52 @@
"""Drop user_search_space_preferences table
Revision ID: 42
Revises: 41
Create Date: 2025-11-28
This table is no longer needed after RBAC implementation:
- LLM preferences are now stored on SearchSpace directly
- User-SearchSpace relationships are handled by SearchSpaceMembership
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "42"
down_revision = "41"
branch_labels = None
depends_on = None
def upgrade():
# Drop the user_search_space_preferences table
op.drop_table("user_search_space_preferences")
def downgrade():
# Recreate the table if rolling back
op.create_table(
"user_search_space_preferences",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column(
"created_at", sa.DateTime(timezone=True), server_default=sa.func.now()
),
sa.Column(
"user_id",
sa.UUID(),
sa.ForeignKey("user.id", ondelete="CASCADE"),
nullable=False,
),
sa.Column(
"search_space_id",
sa.Integer(),
sa.ForeignKey("searchspaces.id", ondelete="CASCADE"),
nullable=False,
),
sa.Column("long_context_llm_id", sa.Integer(), nullable=True),
sa.Column("fast_llm_id", sa.Integer(), nullable=True),
sa.Column("strategic_llm_id", sa.Integer(), nullable=True),
sa.UniqueConstraint("user_id", "search_space_id", name="uq_user_searchspace"),
)

View file

@ -0,0 +1,75 @@
"""43_add_blocknote_fields_to_documents
Revision ID: 43
Revises: 42
Create Date: 2025-11-30
Adds fields for live document editing:
- blocknote_document: JSONB editor state
- content_needs_reindexing: Flag for regenerating chunks/summary
- last_edited_at: Last edit timestamp
"""
from collections.abc import Sequence
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "43"
down_revision: str | None = "42"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
"""Upgrade schema - Add BlockNote fields and trigger population task."""
# Add the columns
op.add_column(
"documents",
sa.Column(
"blocknote_document", postgresql.JSONB(astext_type=sa.Text()), nullable=True
),
)
op.add_column(
"documents",
sa.Column(
"content_needs_reindexing",
sa.Boolean(),
nullable=False,
server_default=sa.false(),
),
)
op.add_column(
"documents",
sa.Column("last_edited_at", sa.TIMESTAMP(timezone=True), nullable=True),
)
# Trigger the Celery task to populate blocknote_document for existing documents
try:
from app.tasks.celery_tasks.blocknote_migration_tasks import (
populate_blocknote_for_documents_task,
)
# Queue the task to run asynchronously
populate_blocknote_for_documents_task.apply_async()
print(
"✓ Queued Celery task to populate blocknote_document for existing documents"
)
except Exception as e:
# If Celery is not available or task queueing fails, log but don't fail the migration
print(f"⚠ Warning: Could not queue blocknote population task: {e}")
print(" You can manually trigger it later with:")
print(
" celery -A app.celery_app call app.tasks.celery_tasks.blocknote_migration_tasks.populate_blocknote_for_documents_task"
)
def downgrade() -> None:
"""Downgrade schema - Remove BlockNote fields."""
op.drop_column("documents", "last_edited_at")
op.drop_column("documents", "content_needs_reindexing")
op.drop_column("documents", "blocknote_document")

View file

@ -11,7 +11,7 @@ from sqlalchemy.ext.asyncio import AsyncSession
# Additional imports for document fetching
from sqlalchemy.future import select
from app.db import Document, SearchSpace
from app.db import Document
from app.services.connector_service import ConnectorService
from app.services.query_service import QueryService
@ -92,19 +92,18 @@ def extract_sources_from_documents(
async def fetch_documents_by_ids(
document_ids: list[int], user_id: str, db_session: AsyncSession
document_ids: list[int], search_space_id: int, db_session: AsyncSession
) -> tuple[list[dict[str, Any]], list[dict[str, Any]]]:
"""
Fetch documents by their IDs with ownership check using DOCUMENTS mode approach.
Fetch documents by their IDs within a search space.
This function ensures that only documents belonging to the user are fetched,
providing security by checking ownership through SearchSpace association.
This function ensures that only documents belonging to the search space are fetched.
Similar to SearchMode.DOCUMENTS, it fetches full documents and concatenates their chunks.
Also creates source objects for UI display, grouped by document type.
Args:
document_ids: List of document IDs to fetch
user_id: The user ID to check ownership
search_space_id: The search space ID to filter by
db_session: The database session
Returns:
@ -114,11 +113,12 @@ async def fetch_documents_by_ids(
return [], []
try:
# Query documents with ownership check
# Query documents filtered by search space
result = await db_session.execute(
select(Document)
.join(SearchSpace)
.filter(Document.id.in_(document_ids), SearchSpace.user_id == user_id)
select(Document).filter(
Document.id.in_(document_ids),
Document.search_space_id == search_space_id,
)
)
documents = result.scalars().all()
@ -515,7 +515,6 @@ async def fetch_documents_by_ids(
async def fetch_relevant_documents(
research_questions: list[str],
user_id: str,
search_space_id: int,
db_session: AsyncSession,
connectors_to_search: list[str],
@ -536,7 +535,6 @@ async def fetch_relevant_documents(
Args:
research_questions: List of research questions to find documents for
user_id: The user ID
search_space_id: The search space ID
db_session: The database session
connectors_to_search: List of connectors to search
@ -619,7 +617,6 @@ async def fetch_relevant_documents(
youtube_chunks,
) = await connector_service.search_youtube(
user_query=reformulated_query,
user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode,
@ -646,7 +643,6 @@ async def fetch_relevant_documents(
extension_chunks,
) = await connector_service.search_extension(
user_query=reformulated_query,
user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode,
@ -673,7 +669,6 @@ async def fetch_relevant_documents(
crawled_urls_chunks,
) = await connector_service.search_crawled_urls(
user_query=reformulated_query,
user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode,
@ -697,7 +692,6 @@ async def fetch_relevant_documents(
elif connector == "FILE":
source_object, files_chunks = await connector_service.search_files(
user_query=reformulated_query,
user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode,
@ -721,7 +715,6 @@ async def fetch_relevant_documents(
elif connector == "SLACK_CONNECTOR":
source_object, slack_chunks = await connector_service.search_slack(
user_query=reformulated_query,
user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode,
@ -748,7 +741,6 @@ async def fetch_relevant_documents(
notion_chunks,
) = await connector_service.search_notion(
user_query=reformulated_query,
user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode,
@ -775,7 +767,6 @@ async def fetch_relevant_documents(
github_chunks,
) = await connector_service.search_github(
user_query=reformulated_query,
user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode,
@ -802,7 +793,6 @@ async def fetch_relevant_documents(
linear_chunks,
) = await connector_service.search_linear(
user_query=reformulated_query,
user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode,
@ -829,7 +819,6 @@ async def fetch_relevant_documents(
tavily_chunks,
) = await connector_service.search_tavily(
user_query=reformulated_query,
user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
)
@ -855,7 +844,6 @@ async def fetch_relevant_documents(
searx_chunks,
) = await connector_service.search_searxng(
user_query=reformulated_query,
user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
)
@ -881,7 +869,6 @@ async def fetch_relevant_documents(
linkup_chunks,
) = await connector_service.search_linkup(
user_query=reformulated_query,
user_id=user_id,
search_space_id=search_space_id,
mode=linkup_mode,
)
@ -907,7 +894,6 @@ async def fetch_relevant_documents(
baidu_chunks,
) = await connector_service.search_baidu(
user_query=reformulated_query,
user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
)
@ -933,7 +919,6 @@ async def fetch_relevant_documents(
discord_chunks,
) = await connector_service.search_discord(
user_query=reformulated_query,
user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode,
@ -955,7 +940,6 @@ async def fetch_relevant_documents(
elif connector == "JIRA_CONNECTOR":
source_object, jira_chunks = await connector_service.search_jira(
user_query=reformulated_query,
user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode,
@ -981,7 +965,6 @@ async def fetch_relevant_documents(
calendar_chunks,
) = await connector_service.search_google_calendar(
user_query=reformulated_query,
user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode,
@ -1007,7 +990,6 @@ async def fetch_relevant_documents(
airtable_chunks,
) = await connector_service.search_airtable(
user_query=reformulated_query,
user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode,
@ -1033,7 +1015,6 @@ async def fetch_relevant_documents(
gmail_chunks,
) = await connector_service.search_google_gmail(
user_query=reformulated_query,
user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode,
@ -1059,7 +1040,6 @@ async def fetch_relevant_documents(
confluence_chunks,
) = await connector_service.search_confluence(
user_query=reformulated_query,
user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode,
@ -1085,7 +1065,6 @@ async def fetch_relevant_documents(
clickup_chunks,
) = await connector_service.search_clickup(
user_query=reformulated_query,
user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode,
@ -1112,7 +1091,6 @@ async def fetch_relevant_documents(
luma_chunks,
) = await connector_service.search_luma(
user_query=reformulated_query,
user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode,
@ -1139,7 +1117,6 @@ async def fetch_relevant_documents(
elasticsearch_chunks,
) = await connector_service.search_elasticsearch(
user_query=reformulated_query,
user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode,
@ -1315,7 +1292,6 @@ async def reformulate_user_query(
reformulated_query = await QueryService.reformulate_query_with_chat_history(
user_query=user_query,
session=state.db_session,
user_id=configuration.user_id,
search_space_id=configuration.search_space_id,
chat_history_str=chat_history_str,
)
@ -1389,7 +1365,7 @@ async def handle_qna_workflow(
user_selected_documents,
) = await fetch_documents_by_ids(
document_ids=configuration.document_ids_to_add_in_context,
user_id=configuration.user_id,
search_space_id=configuration.search_space_id,
db_session=state.db_session,
)
@ -1404,7 +1380,7 @@ async def handle_qna_workflow(
# Create connector service using state db_session
connector_service = ConnectorService(
state.db_session, user_id=configuration.user_id
state.db_session, search_space_id=configuration.search_space_id
)
await connector_service.initialize_counter()
@ -1413,7 +1389,6 @@ async def handle_qna_workflow(
relevant_documents = await fetch_relevant_documents(
research_questions=research_questions,
user_id=configuration.user_id,
search_space_id=configuration.search_space_id,
db_session=state.db_session,
connectors_to_search=configuration.connectors_to_search,
@ -1459,14 +1434,18 @@ async def handle_qna_workflow(
"user_query": user_query, # Use the reformulated query
"reformulated_query": reformulated_query,
"relevant_documents": all_documents, # Use combined documents
"user_id": configuration.user_id,
"search_space_id": configuration.search_space_id,
"language": configuration.language,
}
}
# Create the state for the QNA agent (it has a different state structure)
qna_state = {"db_session": state.db_session, "chat_history": state.chat_history}
# Pass streaming_service so the QNA agent can stream tokens directly
qna_state = {
"db_session": state.db_session,
"chat_history": state.chat_history,
"streaming_service": streaming_service,
}
try:
writer(
@ -1481,36 +1460,26 @@ async def handle_qna_workflow(
complete_content = ""
captured_reranked_documents = []
# Call the QNA agent with streaming
async for _chunk_type, chunk in qna_agent_graph.astream(
qna_state, qna_config, stream_mode=["values"]
# Call the QNA agent with both custom and values streaming modes
# - "custom" captures token-by-token streams from answer_question via writer()
# - "values" captures state updates including final_answer and reranked_documents
async for stream_mode, chunk in qna_agent_graph.astream(
qna_state, qna_config, stream_mode=["custom", "values"]
):
if "final_answer" in chunk:
new_content = chunk["final_answer"]
if new_content and new_content != complete_content:
# Extract only the new content (delta)
delta = new_content[len(complete_content) :]
complete_content = new_content
if stream_mode == "custom":
# Handle custom stream events (token chunks from answer_question)
if isinstance(chunk, dict) and "yield_value" in chunk:
# Forward the streamed token to the parent writer
writer(chunk)
elif stream_mode == "values" and isinstance(chunk, dict):
# Handle state value updates
# Capture the final answer from state
if chunk.get("final_answer"):
complete_content = chunk["final_answer"]
# Stream the real-time answer if there's new content
if delta:
# Update terminal with progress
word_count = len(complete_content.split())
writer(
{
"yield_value": streaming_service.format_terminal_info_delta(
f"✍️ Writing answer... ({word_count} words)"
)
}
)
writer(
{"yield_value": streaming_service.format_text_chunk(delta)}
)
# Capture reranked documents from QNA agent for further question generation
if "reranked_documents" in chunk:
captured_reranked_documents = chunk["reranked_documents"]
# Capture reranked documents from QNA agent for further question generation
if chunk.get("reranked_documents"):
captured_reranked_documents = chunk["reranked_documents"]
# Set default if no content was received
if not complete_content:
@ -1551,12 +1520,11 @@ async def generate_further_questions(
Returns:
Dict containing the further questions in the "further_questions" key for state update.
"""
from app.services.llm_service import get_user_fast_llm
from app.services.llm_service import get_fast_llm
# Get configuration and state data
configuration = Configuration.from_runnable_config(config)
chat_history = state.chat_history
user_id = configuration.user_id
search_space_id = configuration.search_space_id
streaming_service = state.streaming_service
@ -1571,10 +1539,10 @@ async def generate_further_questions(
}
)
# Get user's fast LLM
llm = await get_user_fast_llm(state.db_session, user_id, search_space_id)
# Get search space's fast LLM
llm = await get_fast_llm(state.db_session, search_space_id)
if not llm:
error_message = f"No fast LLM configured for user {user_id} in search space {search_space_id}"
error_message = f"No fast LLM configured for search space {search_space_id}"
print(error_message)
writer({"yield_value": streaming_service.format_error(error_message)})

View file

@ -18,7 +18,6 @@ class Configuration:
relevant_documents: list[
Any
] # Documents provided directly to the agent for answering
user_id: str # User identifier
search_space_id: int # Search space identifier
language: str | None = None # Language for responses

View file

@ -3,6 +3,7 @@ from typing import Any
from langchain_core.messages import HumanMessage, SystemMessage
from langchain_core.runnables import RunnableConfig
from langgraph.types import StreamWriter
from sqlalchemy import select
from app.db import SearchSpace
@ -129,9 +130,11 @@ async def rerank_documents(state: State, config: RunnableConfig) -> dict[str, An
return {"reranked_documents": documents}
async def answer_question(state: State, config: RunnableConfig) -> dict[str, Any]:
async def answer_question(
state: State, config: RunnableConfig, writer: StreamWriter
) -> dict[str, Any]:
"""
Answer the user's question using the provided documents.
Answer the user's question using the provided documents with real-time streaming.
This node takes the relevant documents provided in the configuration and uses
an LLM to generate a comprehensive answer to the user's question with
@ -139,19 +142,23 @@ async def answer_question(state: State, config: RunnableConfig) -> dict[str, Any
documents. If no documents are provided, it will use chat history to generate
an answer.
The response is streamed token-by-token for real-time updates to the frontend.
Returns:
Dict containing the final answer in the "final_answer" key.
"""
from app.services.llm_service import get_user_fast_llm
from app.services.llm_service import get_fast_llm
# Get configuration and relevant documents from configuration
configuration = Configuration.from_runnable_config(config)
documents = state.reranked_documents
user_query = configuration.user_query
user_id = configuration.user_id
search_space_id = configuration.search_space_id
language = configuration.language
# Get streaming service from state
streaming_service = state.streaming_service
# Fetch search space to get QnA configuration
result = await state.db_session.execute(
select(SearchSpace).where(SearchSpace.id == search_space_id)
@ -178,10 +185,10 @@ async def answer_question(state: State, config: RunnableConfig) -> dict[str, Any
else ""
)
# Get user's fast LLM
llm = await get_user_fast_llm(state.db_session, user_id, search_space_id)
# Get search space's fast LLM
llm = await get_fast_llm(state.db_session, search_space_id)
if not llm:
error_message = f"No fast LLM configured for user {user_id} in search space {search_space_id}"
error_message = f"No fast LLM configured for search space {search_space_id}"
print(error_message)
raise RuntimeError(error_message)
@ -280,8 +287,17 @@ async def answer_question(state: State, config: RunnableConfig) -> dict[str, Any
total_tokens = calculate_token_count(messages_with_chat_history, llm.model)
print(f"Final token count: {total_tokens}")
# Call the LLM and get the response
response = await llm.ainvoke(messages_with_chat_history)
final_answer = response.content
# Stream the LLM response token by token
final_answer = ""
async for chunk in llm.astream(messages_with_chat_history):
# Extract the content from the chunk
if hasattr(chunk, "content") and chunk.content:
token = chunk.content
final_answer += token
# Stream the token to the frontend via custom stream
if streaming_service:
writer({"yield_value": streaming_service.format_text_chunk(token)})
return {"final_answer": final_answer}

View file

@ -7,6 +7,8 @@ from typing import Any
from sqlalchemy.ext.asyncio import AsyncSession
from app.services.streaming_service import StreamingService
@dataclass
class State:
@ -21,6 +23,9 @@ class State:
# Runtime context
db_session: AsyncSession
# Streaming service for real-time token streaming
streaming_service: StreamingService | None = None
chat_history: list[Any] | None = field(default_factory=list)
# OUTPUT: Populated by agent nodes
reranked_documents: list[Any] | None = None

View file

@ -63,6 +63,8 @@ celery_app = Celery(
"app.tasks.celery_tasks.podcast_tasks",
"app.tasks.celery_tasks.connector_tasks",
"app.tasks.celery_tasks.schedule_checker_task",
"app.tasks.celery_tasks.blocknote_migration_tasks",
"app.tasks.celery_tasks.document_reindex_tasks",
],
)

View file

@ -92,7 +92,11 @@ class JiraConnector:
}
def make_api_request(
self, endpoint: str, params: dict[str, Any] | None = None
self,
endpoint: str,
params: dict[str, Any] | None = None,
method: str = "GET",
json_payload: dict[str, Any] | None = None,
) -> dict[str, Any]:
"""
Make a request to the Jira API.
@ -116,7 +120,12 @@ class JiraConnector:
url = f"{self.base_url}/rest/api/{self.api_version}/{endpoint}"
headers = self.get_headers()
response = requests.get(url, headers=headers, params=params, timeout=500)
if method.upper() == "POST":
response = requests.post(
url, headers=headers, json=json_payload, timeout=500
)
else:
response = requests.get(url, headers=headers, params=params, timeout=500)
if response.status_code == 200:
return response.json()
@ -169,19 +178,23 @@ class JiraConnector:
"project",
]
params = {
"jql": jql,
"fields": ",".join(fields),
"maxResults": 100,
"startAt": 0,
}
all_issues = []
start_at = 0
max_results = 100
all_issues = []
start_at = 0
while True:
params["startAt"] = start_at
result = self.make_api_request("search", params)
json_payload = {
"jql": jql,
"fields": fields, # API accepts list
"maxResults": max_results,
"startAt": start_at,
}
result = self.make_api_request(
"search/jql", json_payload=json_payload, method="POST"
)
if not isinstance(result, dict) or "issues" not in result:
raise Exception("Invalid response from Jira API")
@ -226,9 +239,9 @@ class JiraConnector:
)
# TODO : This JQL needs some improvement to work as expected
_jql = f"{date_filter}"
jql = f"{date_filter}"
if project_key:
_jql = (
jql = (
f'project = "{project_key}" AND {date_filter} ORDER BY created DESC'
)
@ -250,7 +263,7 @@ class JiraConnector:
fields.append("comment")
params = {
# "jql": "", TODO : Add a JQL query to filter from a date range
"jql": jql,
"fields": ",".join(fields),
"maxResults": 100,
"startAt": 0,
@ -262,7 +275,7 @@ class JiraConnector:
while True:
params["startAt"] = start_at
result = self.make_api_request("search", params)
result = self.make_api_request("search/jql", params)
if not isinstance(result, dict) or "issues" not in result:
return [], "Invalid response from Jira API"

View file

@ -20,7 +20,7 @@ from sqlalchemy import (
UniqueConstraint,
text,
)
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.dialects.postgresql import JSONB, UUID
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
from sqlalchemy.orm import DeclarativeBase, Mapped, declared_attr, relationship
@ -133,6 +133,169 @@ class LogStatus(str, Enum):
FAILED = "FAILED"
class Permission(str, Enum):
"""
Granular permissions for search space resources.
Use '*' (FULL_ACCESS) to grant all permissions.
"""
# Documents
DOCUMENTS_CREATE = "documents:create"
DOCUMENTS_READ = "documents:read"
DOCUMENTS_UPDATE = "documents:update"
DOCUMENTS_DELETE = "documents:delete"
# Chats
CHATS_CREATE = "chats:create"
CHATS_READ = "chats:read"
CHATS_UPDATE = "chats:update"
CHATS_DELETE = "chats:delete"
# LLM Configs
LLM_CONFIGS_CREATE = "llm_configs:create"
LLM_CONFIGS_READ = "llm_configs:read"
LLM_CONFIGS_UPDATE = "llm_configs:update"
LLM_CONFIGS_DELETE = "llm_configs:delete"
# Podcasts
PODCASTS_CREATE = "podcasts:create"
PODCASTS_READ = "podcasts:read"
PODCASTS_UPDATE = "podcasts:update"
PODCASTS_DELETE = "podcasts:delete"
# Connectors
CONNECTORS_CREATE = "connectors:create"
CONNECTORS_READ = "connectors:read"
CONNECTORS_UPDATE = "connectors:update"
CONNECTORS_DELETE = "connectors:delete"
# Logs
LOGS_READ = "logs:read"
LOGS_DELETE = "logs:delete"
# Members
MEMBERS_INVITE = "members:invite"
MEMBERS_VIEW = "members:view"
MEMBERS_REMOVE = "members:remove"
MEMBERS_MANAGE_ROLES = "members:manage_roles"
# Roles
ROLES_CREATE = "roles:create"
ROLES_READ = "roles:read"
ROLES_UPDATE = "roles:update"
ROLES_DELETE = "roles:delete"
# Search Space Settings
SETTINGS_VIEW = "settings:view"
SETTINGS_UPDATE = "settings:update"
SETTINGS_DELETE = "settings:delete" # Delete the entire search space
# Full access wildcard
FULL_ACCESS = "*"
# Predefined role permission sets for convenience
DEFAULT_ROLE_PERMISSIONS = {
"Owner": [Permission.FULL_ACCESS.value],
"Admin": [
# Documents
Permission.DOCUMENTS_CREATE.value,
Permission.DOCUMENTS_READ.value,
Permission.DOCUMENTS_UPDATE.value,
Permission.DOCUMENTS_DELETE.value,
# Chats
Permission.CHATS_CREATE.value,
Permission.CHATS_READ.value,
Permission.CHATS_UPDATE.value,
Permission.CHATS_DELETE.value,
# LLM Configs
Permission.LLM_CONFIGS_CREATE.value,
Permission.LLM_CONFIGS_READ.value,
Permission.LLM_CONFIGS_UPDATE.value,
Permission.LLM_CONFIGS_DELETE.value,
# Podcasts
Permission.PODCASTS_CREATE.value,
Permission.PODCASTS_READ.value,
Permission.PODCASTS_UPDATE.value,
Permission.PODCASTS_DELETE.value,
# Connectors
Permission.CONNECTORS_CREATE.value,
Permission.CONNECTORS_READ.value,
Permission.CONNECTORS_UPDATE.value,
Permission.CONNECTORS_DELETE.value,
# Logs
Permission.LOGS_READ.value,
Permission.LOGS_DELETE.value,
# Members
Permission.MEMBERS_INVITE.value,
Permission.MEMBERS_VIEW.value,
Permission.MEMBERS_REMOVE.value,
Permission.MEMBERS_MANAGE_ROLES.value,
# Roles
Permission.ROLES_CREATE.value,
Permission.ROLES_READ.value,
Permission.ROLES_UPDATE.value,
Permission.ROLES_DELETE.value,
# Settings (no delete)
Permission.SETTINGS_VIEW.value,
Permission.SETTINGS_UPDATE.value,
],
"Editor": [
# Documents
Permission.DOCUMENTS_CREATE.value,
Permission.DOCUMENTS_READ.value,
Permission.DOCUMENTS_UPDATE.value,
Permission.DOCUMENTS_DELETE.value,
# Chats
Permission.CHATS_CREATE.value,
Permission.CHATS_READ.value,
Permission.CHATS_UPDATE.value,
Permission.CHATS_DELETE.value,
# LLM Configs (read only)
Permission.LLM_CONFIGS_READ.value,
Permission.LLM_CONFIGS_CREATE.value,
Permission.LLM_CONFIGS_UPDATE.value,
# Podcasts
Permission.PODCASTS_CREATE.value,
Permission.PODCASTS_READ.value,
Permission.PODCASTS_UPDATE.value,
Permission.PODCASTS_DELETE.value,
# Connectors (full access for editors)
Permission.CONNECTORS_CREATE.value,
Permission.CONNECTORS_READ.value,
Permission.CONNECTORS_UPDATE.value,
# Logs
Permission.LOGS_READ.value,
# Members (view only)
Permission.MEMBERS_VIEW.value,
# Roles (read only)
Permission.ROLES_READ.value,
# Settings (view only)
Permission.SETTINGS_VIEW.value,
],
"Viewer": [
# Documents (read only)
Permission.DOCUMENTS_READ.value,
# Chats (read only)
Permission.CHATS_READ.value,
# LLM Configs (read only)
Permission.LLM_CONFIGS_READ.value,
# Podcasts (read only)
Permission.PODCASTS_READ.value,
# Connectors (read only)
Permission.CONNECTORS_READ.value,
# Logs (read only)
Permission.LOGS_READ.value,
# Members (view only)
Permission.MEMBERS_VIEW.value,
# Roles (read only)
Permission.ROLES_READ.value,
# Settings (view only)
Permission.SETTINGS_VIEW.value,
],
}
class Base(DeclarativeBase):
pass
@ -182,6 +345,17 @@ class Document(BaseModel, TimestampMixin):
unique_identifier_hash = Column(String, nullable=True, index=True, unique=True)
embedding = Column(Vector(config.embedding_model_instance.dimension))
# BlockNote live editing state (NULL when never edited)
blocknote_document = Column(JSONB, nullable=True)
# blocknote background reindex flag
content_needs_reindexing = Column(
Boolean, nullable=False, default=False, server_default=text("false")
)
# Track when blocknote document was last edited
last_edited_at = Column(TIMESTAMP(timezone=True), nullable=True)
search_space_id = Column(
Integer, ForeignKey("searchspaces.id", ondelete="CASCADE"), nullable=False
)
@ -232,6 +406,13 @@ class SearchSpace(BaseModel, TimestampMixin):
qna_custom_instructions = Column(
Text, nullable=True, default=""
) # User's custom instructions
# Search space-level LLM preferences (shared by all members)
# Note: These can be negative IDs for global configs (from YAML) or positive IDs for custom configs (from DB)
long_context_llm_id = Column(Integer, nullable=True)
fast_llm_id = Column(Integer, nullable=True)
strategic_llm_id = Column(Integer, nullable=True)
user_id = Column(
UUID(as_uuid=True), ForeignKey("user.id", ondelete="CASCADE"), nullable=False
)
@ -273,9 +454,24 @@ class SearchSpace(BaseModel, TimestampMixin):
order_by="LLMConfig.id",
cascade="all, delete-orphan",
)
user_preferences = relationship(
"UserSearchSpacePreference",
# RBAC relationships
roles = relationship(
"SearchSpaceRole",
back_populates="search_space",
order_by="SearchSpaceRole.id",
cascade="all, delete-orphan",
)
memberships = relationship(
"SearchSpaceMembership",
back_populates="search_space",
order_by="SearchSpaceMembership.id",
cascade="all, delete-orphan",
)
invites = relationship(
"SearchSpaceInvite",
back_populates="search_space",
order_by="SearchSpaceInvite.id",
cascade="all, delete-orphan",
)
@ -339,45 +535,6 @@ class LLMConfig(BaseModel, TimestampMixin):
search_space = relationship("SearchSpace", back_populates="llm_configs")
class UserSearchSpacePreference(BaseModel, TimestampMixin):
__tablename__ = "user_search_space_preferences"
__table_args__ = (
UniqueConstraint(
"user_id",
"search_space_id",
name="uq_user_searchspace",
),
)
user_id = Column(
UUID(as_uuid=True), ForeignKey("user.id", ondelete="CASCADE"), nullable=False
)
search_space_id = Column(
Integer, ForeignKey("searchspaces.id", ondelete="CASCADE"), nullable=False
)
# User-specific LLM preferences for this search space
# Note: These can be negative IDs for global configs (from YAML) or positive IDs for custom configs (from DB)
# Foreign keys removed to support global configs with negative IDs
long_context_llm_id = Column(Integer, nullable=True)
fast_llm_id = Column(Integer, nullable=True)
strategic_llm_id = Column(Integer, nullable=True)
# Future RBAC fields can be added here
# role = Column(String(50), nullable=True) # e.g., 'owner', 'editor', 'viewer'
# permissions = Column(JSON, nullable=True)
user = relationship("User", back_populates="search_space_preferences")
search_space = relationship("SearchSpace", back_populates="user_preferences")
# Note: Relationships removed because foreign keys no longer exist
# Global configs (negative IDs) don't exist in llm_configs table
# Application code manually fetches configs when needed
# long_context_llm = relationship("LLMConfig", foreign_keys=[long_context_llm_id], post_update=True)
# fast_llm = relationship("LLMConfig", foreign_keys=[fast_llm_id], post_update=True)
# strategic_llm = relationship("LLMConfig", foreign_keys=[strategic_llm_id], post_update=True)
class Log(BaseModel, TimestampMixin):
__tablename__ = "logs"
@ -395,6 +552,140 @@ class Log(BaseModel, TimestampMixin):
search_space = relationship("SearchSpace", back_populates="logs")
class SearchSpaceRole(BaseModel, TimestampMixin):
"""
Custom roles that can be defined per search space.
Each search space can have multiple roles with different permission sets.
"""
__tablename__ = "search_space_roles"
__table_args__ = (
UniqueConstraint(
"search_space_id",
"name",
name="uq_searchspace_role_name",
),
)
name = Column(String(100), nullable=False, index=True)
description = Column(String(500), nullable=True)
# List of Permission enum values (e.g., ["documents:read", "chats:create"])
permissions = Column(ARRAY(String), nullable=False, default=[])
# Whether this role is assigned to new members by default when they join via invite
is_default = Column(Boolean, nullable=False, default=False)
# System roles (Owner, Admin, Editor, Viewer) cannot be deleted
is_system_role = Column(Boolean, nullable=False, default=False)
search_space_id = Column(
Integer, ForeignKey("searchspaces.id", ondelete="CASCADE"), nullable=False
)
search_space = relationship("SearchSpace", back_populates="roles")
memberships = relationship(
"SearchSpaceMembership", back_populates="role", passive_deletes=True
)
invites = relationship(
"SearchSpaceInvite", back_populates="role", passive_deletes=True
)
class SearchSpaceMembership(BaseModel, TimestampMixin):
"""
Tracks user membership in search spaces with their assigned role.
Each user can be a member of multiple search spaces with different roles.
"""
__tablename__ = "search_space_memberships"
__table_args__ = (
UniqueConstraint(
"user_id",
"search_space_id",
name="uq_user_searchspace_membership",
),
)
user_id = Column(
UUID(as_uuid=True), ForeignKey("user.id", ondelete="CASCADE"), nullable=False
)
search_space_id = Column(
Integer, ForeignKey("searchspaces.id", ondelete="CASCADE"), nullable=False
)
role_id = Column(
Integer,
ForeignKey("search_space_roles.id", ondelete="SET NULL"),
nullable=True,
)
# Indicates if this user is the original creator/owner of the search space
is_owner = Column(Boolean, nullable=False, default=False)
# Timestamp when the user joined (via invite or as creator)
joined_at = Column(
TIMESTAMP(timezone=True),
nullable=False,
default=lambda: datetime.now(UTC),
)
# Reference to the invite used to join (null if owner/creator)
invited_by_invite_id = Column(
Integer,
ForeignKey("search_space_invites.id", ondelete="SET NULL"),
nullable=True,
)
user = relationship("User", back_populates="search_space_memberships")
search_space = relationship("SearchSpace", back_populates="memberships")
role = relationship("SearchSpaceRole", back_populates="memberships")
invited_by_invite = relationship(
"SearchSpaceInvite", back_populates="used_by_memberships"
)
class SearchSpaceInvite(BaseModel, TimestampMixin):
"""
Invite links for search spaces.
Users can create invite links with specific roles that others can use to join.
"""
__tablename__ = "search_space_invites"
# Unique invite code (used in invite URLs)
invite_code = Column(String(64), nullable=False, unique=True, index=True)
search_space_id = Column(
Integer, ForeignKey("searchspaces.id", ondelete="CASCADE"), nullable=False
)
# Role to assign when invite is used (null means use default role)
role_id = Column(
Integer,
ForeignKey("search_space_roles.id", ondelete="SET NULL"),
nullable=True,
)
# User who created this invite
created_by_id = Column(
UUID(as_uuid=True),
ForeignKey("user.id", ondelete="SET NULL"),
nullable=True,
)
# Expiration timestamp (null means never expires)
expires_at = Column(TIMESTAMP(timezone=True), nullable=True)
# Maximum number of times this invite can be used (null means unlimited)
max_uses = Column(Integer, nullable=True)
# Number of times this invite has been used
uses_count = Column(Integer, nullable=False, default=0)
# Whether this invite is currently active
is_active = Column(Boolean, nullable=False, default=True)
# Optional custom name/label for the invite
name = Column(String(100), nullable=True)
search_space = relationship("SearchSpace", back_populates="invites")
role = relationship("SearchSpaceRole", back_populates="invites")
created_by = relationship("User", back_populates="created_invites")
used_by_memberships = relationship(
"SearchSpaceMembership",
back_populates="invited_by_invite",
passive_deletes=True,
)
if config.AUTH_TYPE == "GOOGLE":
class OAuthAccount(SQLAlchemyBaseOAuthAccountTableUUID, Base):
@ -405,11 +696,18 @@ if config.AUTH_TYPE == "GOOGLE":
"OAuthAccount", lazy="joined"
)
search_spaces = relationship("SearchSpace", back_populates="user")
search_space_preferences = relationship(
"UserSearchSpacePreference",
# RBAC relationships
search_space_memberships = relationship(
"SearchSpaceMembership",
back_populates="user",
cascade="all, delete-orphan",
)
created_invites = relationship(
"SearchSpaceInvite",
back_populates="created_by",
passive_deletes=True,
)
# Page usage tracking for ETL services
pages_limit = Column(Integer, nullable=False, default=500, server_default="500")
@ -419,11 +717,18 @@ else:
class User(SQLAlchemyBaseUserTableUUID, Base):
search_spaces = relationship("SearchSpace", back_populates="user")
search_space_preferences = relationship(
"UserSearchSpacePreference",
# RBAC relationships
search_space_memberships = relationship(
"SearchSpaceMembership",
back_populates="user",
cascade="all, delete-orphan",
)
created_invites = relationship(
"SearchSpaceInvite",
back_populates="created_by",
passive_deletes=True,
)
# Page usage tracking for ETL services
pages_limit = Column(Integer, nullable=False, default=500, server_default="500")
@ -494,3 +799,109 @@ async def get_documents_hybrid_search_retriever(
session: AsyncSession = Depends(get_async_session),
):
return DocumentHybridSearchRetriever(session)
def has_permission(user_permissions: list[str], required_permission: str) -> bool:
"""
Check if the user has the required permission.
Supports wildcard (*) for full access.
Args:
user_permissions: List of permission strings the user has
required_permission: The permission string to check for
Returns:
True if user has the permission, False otherwise
"""
if not user_permissions:
return False
# Full access wildcard grants all permissions
if Permission.FULL_ACCESS.value in user_permissions:
return True
return required_permission in user_permissions
def has_any_permission(
user_permissions: list[str], required_permissions: list[str]
) -> bool:
"""
Check if the user has any of the required permissions.
Args:
user_permissions: List of permission strings the user has
required_permissions: List of permission strings to check for (any match)
Returns:
True if user has at least one of the permissions, False otherwise
"""
if not user_permissions:
return False
if Permission.FULL_ACCESS.value in user_permissions:
return True
return any(perm in user_permissions for perm in required_permissions)
def has_all_permissions(
user_permissions: list[str], required_permissions: list[str]
) -> bool:
"""
Check if the user has all of the required permissions.
Args:
user_permissions: List of permission strings the user has
required_permissions: List of permission strings to check for (all must match)
Returns:
True if user has all of the permissions, False otherwise
"""
if not user_permissions:
return False
if Permission.FULL_ACCESS.value in user_permissions:
return True
return all(perm in user_permissions for perm in required_permissions)
def get_default_roles_config() -> list[dict]:
"""
Get the configuration for default system roles.
These roles are created automatically when a search space is created.
Returns:
List of role configurations with name, description, permissions, and flags
"""
return [
{
"name": "Owner",
"description": "Full access to all search space resources and settings",
"permissions": DEFAULT_ROLE_PERMISSIONS["Owner"],
"is_default": False,
"is_system_role": True,
},
{
"name": "Admin",
"description": "Can manage most resources except deleting the search space",
"permissions": DEFAULT_ROLE_PERMISSIONS["Admin"],
"is_default": False,
"is_system_role": True,
},
{
"name": "Editor",
"description": "Can create and edit documents, chats, and podcasts",
"permissions": DEFAULT_ROLE_PERMISSIONS["Editor"],
"is_default": True, # Default role for new members via invite
"is_system_role": True,
},
{
"name": "Viewer",
"description": "Read-only access to search space resources",
"permissions": DEFAULT_ROLE_PERMISSIONS["Viewer"],
"is_default": False,
"is_system_role": True,
},
]

View file

@ -12,8 +12,7 @@ class ChucksHybridSearchRetriever:
self,
query_text: str,
top_k: int,
user_id: str,
search_space_id: int | None = None,
search_space_id: int,
) -> list:
"""
Perform vector similarity search on chunks.
@ -21,8 +20,7 @@ class ChucksHybridSearchRetriever:
Args:
query_text: The search query text
top_k: Number of results to return
user_id: The ID of the user performing the search
search_space_id: Optional search space ID to filter results
search_space_id: The search space ID to search within
Returns:
List of chunks sorted by vector similarity
@ -31,25 +29,20 @@ class ChucksHybridSearchRetriever:
from sqlalchemy.orm import joinedload
from app.config import config
from app.db import Chunk, Document, SearchSpace
from app.db import Chunk, Document
# Get embedding for the query
embedding_model = config.embedding_model_instance
query_embedding = embedding_model.embed(query_text)
# Build the base query with user ownership check
# Build the query filtered by search space
query = (
select(Chunk)
.options(joinedload(Chunk.document).joinedload(Document.search_space))
.join(Document, Chunk.document_id == Document.id)
.join(SearchSpace, Document.search_space_id == SearchSpace.id)
.where(SearchSpace.user_id == user_id)
.where(Document.search_space_id == search_space_id)
)
# Add search space filter if provided
if search_space_id is not None:
query = query.where(Document.search_space_id == search_space_id)
# Add vector similarity ordering
query = query.order_by(Chunk.embedding.op("<=>")(query_embedding)).limit(top_k)
@ -63,8 +56,7 @@ class ChucksHybridSearchRetriever:
self,
query_text: str,
top_k: int,
user_id: str,
search_space_id: int | None = None,
search_space_id: int,
) -> list:
"""
Perform full-text keyword search on chunks.
@ -72,8 +64,7 @@ class ChucksHybridSearchRetriever:
Args:
query_text: The search query text
top_k: Number of results to return
user_id: The ID of the user performing the search
search_space_id: Optional search space ID to filter results
search_space_id: The search space ID to search within
Returns:
List of chunks sorted by text relevance
@ -81,28 +72,23 @@ class ChucksHybridSearchRetriever:
from sqlalchemy import func, select
from sqlalchemy.orm import joinedload
from app.db import Chunk, Document, SearchSpace
from app.db import Chunk, Document
# Create tsvector and tsquery for PostgreSQL full-text search
tsvector = func.to_tsvector("english", Chunk.content)
tsquery = func.plainto_tsquery("english", query_text)
# Build the base query with user ownership check
# Build the query filtered by search space
query = (
select(Chunk)
.options(joinedload(Chunk.document).joinedload(Document.search_space))
.join(Document, Chunk.document_id == Document.id)
.join(SearchSpace, Document.search_space_id == SearchSpace.id)
.where(SearchSpace.user_id == user_id)
.where(Document.search_space_id == search_space_id)
.where(
tsvector.op("@@")(tsquery)
) # Only include results that match the query
)
# Add search space filter if provided
if search_space_id is not None:
query = query.where(Document.search_space_id == search_space_id)
# Add text search ranking
query = query.order_by(func.ts_rank_cd(tsvector, tsquery).desc()).limit(top_k)
@ -116,8 +102,7 @@ class ChucksHybridSearchRetriever:
self,
query_text: str,
top_k: int,
user_id: str,
search_space_id: int | None = None,
search_space_id: int,
document_type: str | None = None,
) -> list:
"""
@ -126,8 +111,7 @@ class ChucksHybridSearchRetriever:
Args:
query_text: The search query text
top_k: Number of results to return
user_id: The ID of the user performing the search
search_space_id: Optional search space ID to filter results
search_space_id: The search space ID to search within
document_type: Optional document type to filter results (e.g., "FILE", "CRAWLED_URL")
Returns:
@ -137,7 +121,7 @@ class ChucksHybridSearchRetriever:
from sqlalchemy.orm import joinedload
from app.config import config
from app.db import Chunk, Document, DocumentType, SearchSpace
from app.db import Chunk, Document, DocumentType
# Get embedding for the query
embedding_model = config.embedding_model_instance
@ -151,12 +135,8 @@ class ChucksHybridSearchRetriever:
tsvector = func.to_tsvector("english", Chunk.content)
tsquery = func.plainto_tsquery("english", query_text)
# Base conditions for document filtering
base_conditions = [SearchSpace.user_id == user_id]
# Add search space filter if provided
if search_space_id is not None:
base_conditions.append(Document.search_space_id == search_space_id)
# Base conditions for chunk filtering - search space is required
base_conditions = [Document.search_space_id == search_space_id]
# Add document type filter if provided
if document_type is not None:
@ -171,7 +151,7 @@ class ChucksHybridSearchRetriever:
else:
base_conditions.append(Document.document_type == document_type)
# CTE for semantic search with user ownership check
# CTE for semantic search filtered by search space
semantic_search_cte = (
select(
Chunk.id,
@ -180,7 +160,6 @@ class ChucksHybridSearchRetriever:
.label("rank"),
)
.join(Document, Chunk.document_id == Document.id)
.join(SearchSpace, Document.search_space_id == SearchSpace.id)
.where(*base_conditions)
)
@ -190,7 +169,7 @@ class ChucksHybridSearchRetriever:
.cte("semantic_search")
)
# CTE for keyword search with user ownership check
# CTE for keyword search filtered by search space
keyword_search_cte = (
select(
Chunk.id,
@ -199,7 +178,6 @@ class ChucksHybridSearchRetriever:
.label("rank"),
)
.join(Document, Chunk.document_id == Document.id)
.join(SearchSpace, Document.search_space_id == SearchSpace.id)
.where(*base_conditions)
.where(tsvector.op("@@")(tsquery))
)

View file

@ -12,8 +12,7 @@ class DocumentHybridSearchRetriever:
self,
query_text: str,
top_k: int,
user_id: str,
search_space_id: int | None = None,
search_space_id: int,
) -> list:
"""
Perform vector similarity search on documents.
@ -21,8 +20,7 @@ class DocumentHybridSearchRetriever:
Args:
query_text: The search query text
top_k: Number of results to return
user_id: The ID of the user performing the search
search_space_id: Optional search space ID to filter results
search_space_id: The search space ID to search within
Returns:
List of documents sorted by vector similarity
@ -31,24 +29,19 @@ class DocumentHybridSearchRetriever:
from sqlalchemy.orm import joinedload
from app.config import config
from app.db import Document, SearchSpace
from app.db import Document
# Get embedding for the query
embedding_model = config.embedding_model_instance
query_embedding = embedding_model.embed(query_text)
# Build the base query with user ownership check
# Build the query filtered by search space
query = (
select(Document)
.options(joinedload(Document.search_space))
.join(SearchSpace, Document.search_space_id == SearchSpace.id)
.where(SearchSpace.user_id == user_id)
.where(Document.search_space_id == search_space_id)
)
# Add search space filter if provided
if search_space_id is not None:
query = query.where(Document.search_space_id == search_space_id)
# Add vector similarity ordering
query = query.order_by(Document.embedding.op("<=>")(query_embedding)).limit(
top_k
@ -64,8 +57,7 @@ class DocumentHybridSearchRetriever:
self,
query_text: str,
top_k: int,
user_id: str,
search_space_id: int | None = None,
search_space_id: int,
) -> list:
"""
Perform full-text keyword search on documents.
@ -73,8 +65,7 @@ class DocumentHybridSearchRetriever:
Args:
query_text: The search query text
top_k: Number of results to return
user_id: The ID of the user performing the search
search_space_id: Optional search space ID to filter results
search_space_id: The search space ID to search within
Returns:
List of documents sorted by text relevance
@ -82,27 +73,22 @@ class DocumentHybridSearchRetriever:
from sqlalchemy import func, select
from sqlalchemy.orm import joinedload
from app.db import Document, SearchSpace
from app.db import Document
# Create tsvector and tsquery for PostgreSQL full-text search
tsvector = func.to_tsvector("english", Document.content)
tsquery = func.plainto_tsquery("english", query_text)
# Build the base query with user ownership check
# Build the query filtered by search space
query = (
select(Document)
.options(joinedload(Document.search_space))
.join(SearchSpace, Document.search_space_id == SearchSpace.id)
.where(SearchSpace.user_id == user_id)
.where(Document.search_space_id == search_space_id)
.where(
tsvector.op("@@")(tsquery)
) # Only include results that match the query
)
# Add search space filter if provided
if search_space_id is not None:
query = query.where(Document.search_space_id == search_space_id)
# Add text search ranking
query = query.order_by(func.ts_rank_cd(tsvector, tsquery).desc()).limit(top_k)
@ -116,8 +102,7 @@ class DocumentHybridSearchRetriever:
self,
query_text: str,
top_k: int,
user_id: str,
search_space_id: int | None = None,
search_space_id: int,
document_type: str | None = None,
) -> list:
"""
@ -126,8 +111,7 @@ class DocumentHybridSearchRetriever:
Args:
query_text: The search query text
top_k: Number of results to return
user_id: The ID of the user performing the search
search_space_id: Optional search space ID to filter results
search_space_id: The search space ID to search within
document_type: Optional document type to filter results (e.g., "FILE", "CRAWLED_URL")
"""
@ -135,7 +119,7 @@ class DocumentHybridSearchRetriever:
from sqlalchemy.orm import joinedload
from app.config import config
from app.db import Document, DocumentType, SearchSpace
from app.db import Document, DocumentType
# Get embedding for the query
embedding_model = config.embedding_model_instance
@ -149,12 +133,8 @@ class DocumentHybridSearchRetriever:
tsvector = func.to_tsvector("english", Document.content)
tsquery = func.plainto_tsquery("english", query_text)
# Base conditions for document filtering
base_conditions = [SearchSpace.user_id == user_id]
# Add search space filter if provided
if search_space_id is not None:
base_conditions.append(Document.search_space_id == search_space_id)
# Base conditions for document filtering - search space is required
base_conditions = [Document.search_space_id == search_space_id]
# Add document type filter if provided
if document_type is not None:
@ -169,17 +149,13 @@ class DocumentHybridSearchRetriever:
else:
base_conditions.append(Document.document_type == document_type)
# CTE for semantic search with user ownership check
semantic_search_cte = (
select(
Document.id,
func.rank()
.over(order_by=Document.embedding.op("<=>")(query_embedding))
.label("rank"),
)
.join(SearchSpace, Document.search_space_id == SearchSpace.id)
.where(*base_conditions)
)
# CTE for semantic search filtered by search space
semantic_search_cte = select(
Document.id,
func.rank()
.over(order_by=Document.embedding.op("<=>")(query_embedding))
.label("rank"),
).where(*base_conditions)
semantic_search_cte = (
semantic_search_cte.order_by(Document.embedding.op("<=>")(query_embedding))
@ -187,7 +163,7 @@ class DocumentHybridSearchRetriever:
.cte("semantic_search")
)
# CTE for keyword search with user ownership check
# CTE for keyword search filtered by search space
keyword_search_cte = (
select(
Document.id,
@ -195,7 +171,6 @@ class DocumentHybridSearchRetriever:
.over(order_by=func.ts_rank_cd(tsvector, tsquery).desc())
.label("rank"),
)
.join(SearchSpace, Document.search_space_id == SearchSpace.id)
.where(*base_conditions)
.where(tsvector.op("@@")(tsquery))
)

View file

@ -5,6 +5,7 @@ from .airtable_add_connector_route import (
)
from .chats_routes import router as chats_router
from .documents_routes import router as documents_router
from .editor_routes import router as editor_router
from .google_calendar_add_connector_route import (
router as google_calendar_add_connector_router,
)
@ -15,12 +16,15 @@ from .llm_config_routes import router as llm_config_router
from .logs_routes import router as logs_router
from .luma_add_connector_route import router as luma_add_connector_router
from .podcasts_routes import router as podcasts_router
from .rbac_routes import router as rbac_router
from .search_source_connectors_routes import router as search_source_connectors_router
from .search_spaces_routes import router as search_spaces_router
router = APIRouter()
router.include_router(search_spaces_router)
router.include_router(rbac_router) # RBAC routes for roles, members, invites
router.include_router(editor_router)
router.include_router(documents_router)
router.include_router(podcasts_router)
router.include_router(chats_router)

View file

@ -6,7 +6,14 @@ from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from sqlalchemy.orm import selectinload
from app.db import Chat, SearchSpace, User, UserSearchSpacePreference, get_async_session
from app.db import (
Chat,
Permission,
SearchSpace,
SearchSpaceMembership,
User,
get_async_session,
)
from app.schemas import (
AISDKChatRequest,
ChatCreate,
@ -16,7 +23,7 @@ from app.schemas import (
)
from app.tasks.stream_connector_search_results import stream_connector_search_results
from app.users import current_active_user
from app.utils.check_ownership import check_ownership
from app.utils.rbac import check_permission
from app.utils.validators import (
validate_connectors,
validate_document_ids,
@ -59,45 +66,38 @@ async def handle_chat_data(
# print("RESQUEST DATA:", request_data)
# print("SELECTED CONNECTORS:", selected_connectors)
# Check if the search space belongs to the current user
# Check if the user has chat access to the search space
try:
await check_ownership(session, SearchSpace, search_space_id, user)
language_result = await session.execute(
select(UserSearchSpacePreference)
.options(
selectinload(UserSearchSpacePreference.search_space).selectinload(
SearchSpace.llm_configs
),
# Note: Removed selectinload for LLM relationships as they no longer exist
# Global configs (negative IDs) don't have foreign keys
# LLM configs are now fetched manually when needed
)
.filter(
UserSearchSpacePreference.search_space_id == search_space_id,
UserSearchSpacePreference.user_id == user.id,
)
await check_permission(
session,
user,
search_space_id,
Permission.CHATS_CREATE.value,
"You don't have permission to use chat in this search space",
)
user_preference = language_result.scalars().first()
# print("UserSearchSpacePreference:", user_preference)
# Get search space with LLM configs (preferences are now stored at search space level)
search_space_result = await session.execute(
select(SearchSpace)
.options(selectinload(SearchSpace.llm_configs))
.filter(SearchSpace.id == search_space_id)
)
search_space = search_space_result.scalars().first()
language = None
llm_configs = [] # Initialize to empty list
if (
user_preference
and user_preference.search_space
and user_preference.search_space.llm_configs
):
llm_configs = user_preference.search_space.llm_configs
if search_space and search_space.llm_configs:
llm_configs = search_space.llm_configs
# Manually fetch LLM configs since relationships no longer exist
# Check fast_llm, long_context_llm, and strategic_llm IDs
# Get language from configured LLM preferences
# LLM preferences are now stored on the SearchSpace model
from app.config import config as app_config
for llm_id in [
user_preference.fast_llm_id,
user_preference.long_context_llm_id,
user_preference.strategic_llm_id,
search_space.fast_llm_id,
search_space.long_context_llm_id,
search_space.strategic_llm_id,
]:
if llm_id is not None:
# Check if it's a global config (negative ID)
@ -161,8 +161,18 @@ async def create_chat(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""
Create a new chat.
Requires CHATS_CREATE permission.
"""
try:
await check_ownership(session, SearchSpace, chat.search_space_id, user)
await check_permission(
session,
user,
chat.search_space_id,
Permission.CHATS_CREATE.value,
"You don't have permission to create chats in this search space",
)
db_chat = Chat(**chat.model_dump())
session.add(db_chat)
await session.commit()
@ -197,6 +207,10 @@ async def read_chats(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""
List chats the user has access to.
Requires CHATS_READ permission for the search space(s).
"""
# Validate pagination parameters
if skip < 0:
raise HTTPException(
@ -212,9 +226,17 @@ async def read_chats(
status_code=400, detail="search_space_id must be a positive integer"
)
try:
# Select specific fields excluding messages
query = (
select(
if search_space_id is not None:
# Check permission for specific search space
await check_permission(
session,
user,
search_space_id,
Permission.CHATS_READ.value,
"You don't have permission to read chats in this search space",
)
# Select specific fields excluding messages
query = select(
Chat.id,
Chat.type,
Chat.title,
@ -222,17 +244,28 @@ async def read_chats(
Chat.search_space_id,
Chat.created_at,
Chat.state_version,
).filter(Chat.search_space_id == search_space_id)
else:
# Get chats from all search spaces user has membership in
query = (
select(
Chat.id,
Chat.type,
Chat.title,
Chat.initial_connectors,
Chat.search_space_id,
Chat.created_at,
Chat.state_version,
)
.join(SearchSpace)
.join(SearchSpaceMembership)
.filter(SearchSpaceMembership.user_id == user.id)
)
.join(SearchSpace)
.filter(SearchSpace.user_id == user.id)
)
# Filter by search_space_id if provided
if search_space_id is not None:
query = query.filter(Chat.search_space_id == search_space_id)
result = await session.execute(query.offset(skip).limit(limit))
return result.all()
except HTTPException:
raise
except OperationalError:
raise HTTPException(
status_code=503, detail="Database operation failed. Please try again later."
@ -249,19 +282,32 @@ async def read_chat(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""
Get a specific chat by ID.
Requires CHATS_READ permission for the search space.
"""
try:
result = await session.execute(
select(Chat)
.join(SearchSpace)
.filter(Chat.id == chat_id, SearchSpace.user_id == user.id)
)
result = await session.execute(select(Chat).filter(Chat.id == chat_id))
chat = result.scalars().first()
if not chat:
raise HTTPException(
status_code=404,
detail="Chat not found or you don't have permission to access it",
detail="Chat not found",
)
# Check permission for the search space
await check_permission(
session,
user,
chat.search_space_id,
Permission.CHATS_READ.value,
"You don't have permission to read chats in this search space",
)
return chat
except HTTPException:
raise
except OperationalError:
raise HTTPException(
status_code=503, detail="Database operation failed. Please try again later."
@ -280,8 +326,26 @@ async def update_chat(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""
Update a chat.
Requires CHATS_UPDATE permission for the search space.
"""
try:
db_chat = await read_chat(chat_id, session, user)
result = await session.execute(select(Chat).filter(Chat.id == chat_id))
db_chat = result.scalars().first()
if not db_chat:
raise HTTPException(status_code=404, detail="Chat not found")
# Check permission for the search space
await check_permission(
session,
user,
db_chat.search_space_id,
Permission.CHATS_UPDATE.value,
"You don't have permission to update chats in this search space",
)
update_data = chat_update.model_dump(exclude_unset=True)
for key, value in update_data.items():
if key == "messages":
@ -318,8 +382,26 @@ async def delete_chat(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""
Delete a chat.
Requires CHATS_DELETE permission for the search space.
"""
try:
db_chat = await read_chat(chat_id, session, user)
result = await session.execute(select(Chat).filter(Chat.id == chat_id))
db_chat = result.scalars().first()
if not db_chat:
raise HTTPException(status_code=404, detail="Chat not found")
# Check permission for the search space
await check_permission(
session,
user,
db_chat.search_space_id,
Permission.CHATS_DELETE.value,
"You don't have permission to delete chats in this search space",
)
await session.delete(db_chat)
await session.commit()
return {"message": "Chat deleted successfully"}

View file

@ -10,7 +10,9 @@ from app.db import (
Chunk,
Document,
DocumentType,
Permission,
SearchSpace,
SearchSpaceMembership,
User,
get_async_session,
)
@ -22,7 +24,7 @@ from app.schemas import (
PaginatedResponse,
)
from app.users import current_active_user
from app.utils.check_ownership import check_ownership
from app.utils.rbac import check_permission
try:
asyncio.set_event_loop_policy(asyncio.DefaultEventLoopPolicy())
@ -44,9 +46,19 @@ async def create_documents(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""
Create new documents.
Requires DOCUMENTS_CREATE permission.
"""
try:
# Check if the user owns the search space
await check_ownership(session, SearchSpace, request.search_space_id, user)
# Check permission
await check_permission(
session,
user,
request.search_space_id,
Permission.DOCUMENTS_CREATE.value,
"You don't have permission to create documents in this search space",
)
if request.document_type == DocumentType.EXTENSION:
from app.tasks.celery_tasks.document_tasks import (
@ -59,8 +71,12 @@ async def create_documents(
"metadata": {
"VisitedWebPageTitle": individual_document.metadata.VisitedWebPageTitle,
"VisitedWebPageURL": individual_document.metadata.VisitedWebPageURL,
"BrowsingSessionId": individual_document.metadata.BrowsingSessionId,
"VisitedWebPageDateWithTimeInISOString": individual_document.metadata.VisitedWebPageDateWithTimeInISOString,
"VisitedWebPageVisitDurationInMilliseconds": individual_document.metadata.VisitedWebPageVisitDurationInMilliseconds,
"VisitedWebPageReffererURL": individual_document.metadata.VisitedWebPageReffererURL,
},
"content": individual_document.content,
"pageContent": individual_document.pageContent,
}
process_extension_document_task.delay(
document_dict, request.search_space_id, str(user.id)
@ -93,8 +109,19 @@ async def create_documents_file_upload(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""
Upload files as documents.
Requires DOCUMENTS_CREATE permission.
"""
try:
await check_ownership(session, SearchSpace, search_space_id, user)
# Check permission
await check_permission(
session,
user,
search_space_id,
Permission.DOCUMENTS_CREATE.value,
"You don't have permission to create documents in this search space",
)
if not files:
raise HTTPException(status_code=400, detail="No files provided")
@ -151,7 +178,8 @@ async def read_documents(
user: User = Depends(current_active_user),
):
"""
List documents owned by the current user, with optional filtering and pagination.
List documents the user has access to, with optional filtering and pagination.
Requires DOCUMENTS_READ permission for the search space(s).
Args:
skip: Absolute number of items to skip from the beginning. If provided, it takes precedence over 'page'.
@ -167,40 +195,49 @@ async def read_documents(
Notes:
- If both 'skip' and 'page' are provided, 'skip' is used.
- Results are scoped to documents owned by the current user.
- Results are scoped to documents in search spaces the user has membership in.
"""
try:
from sqlalchemy import func
query = (
select(Document).join(SearchSpace).filter(SearchSpace.user_id == user.id)
)
# Filter by search_space_id if provided
# If specific search_space_id, check permission
if search_space_id is not None:
query = query.filter(Document.search_space_id == search_space_id)
await check_permission(
session,
user,
search_space_id,
Permission.DOCUMENTS_READ.value,
"You don't have permission to read documents in this search space",
)
query = select(Document).filter(Document.search_space_id == search_space_id)
count_query = (
select(func.count())
.select_from(Document)
.filter(Document.search_space_id == search_space_id)
)
else:
# Get documents from all search spaces user has membership in
query = (
select(Document)
.join(SearchSpace)
.join(SearchSpaceMembership)
.filter(SearchSpaceMembership.user_id == user.id)
)
count_query = (
select(func.count())
.select_from(Document)
.join(SearchSpace)
.join(SearchSpaceMembership)
.filter(SearchSpaceMembership.user_id == user.id)
)
# Filter by document_types if provided
if document_types is not None and document_types.strip():
type_list = [t.strip() for t in document_types.split(",") if t.strip()]
if type_list:
query = query.filter(Document.document_type.in_(type_list))
# Get total count
count_query = (
select(func.count())
.select_from(Document)
.join(SearchSpace)
.filter(SearchSpace.user_id == user.id)
)
if search_space_id is not None:
count_query = count_query.filter(
Document.search_space_id == search_space_id
)
if document_types is not None and document_types.strip():
type_list = [t.strip() for t in document_types.split(",") if t.strip()]
if type_list:
count_query = count_query.filter(Document.document_type.in_(type_list))
total_result = await session.execute(count_query)
total = total_result.scalar() or 0
@ -235,6 +272,8 @@ async def read_documents(
)
return PaginatedResponse(items=api_documents, total=total)
except HTTPException:
raise
except Exception as e:
raise HTTPException(
status_code=500, detail=f"Failed to fetch documents: {e!s}"
@ -254,6 +293,7 @@ async def search_documents(
):
"""
Search documents by title substring, optionally filtered by search_space_id and document_types.
Requires DOCUMENTS_READ permission for the search space(s).
Args:
title: Case-insensitive substring to match against document titles. Required.
@ -275,37 +315,48 @@ async def search_documents(
try:
from sqlalchemy import func
query = (
select(Document).join(SearchSpace).filter(SearchSpace.user_id == user.id)
)
# If specific search_space_id, check permission
if search_space_id is not None:
query = query.filter(Document.search_space_id == search_space_id)
await check_permission(
session,
user,
search_space_id,
Permission.DOCUMENTS_READ.value,
"You don't have permission to read documents in this search space",
)
query = select(Document).filter(Document.search_space_id == search_space_id)
count_query = (
select(func.count())
.select_from(Document)
.filter(Document.search_space_id == search_space_id)
)
else:
# Get documents from all search spaces user has membership in
query = (
select(Document)
.join(SearchSpace)
.join(SearchSpaceMembership)
.filter(SearchSpaceMembership.user_id == user.id)
)
count_query = (
select(func.count())
.select_from(Document)
.join(SearchSpace)
.join(SearchSpaceMembership)
.filter(SearchSpaceMembership.user_id == user.id)
)
# Only search by title (case-insensitive)
query = query.filter(Document.title.ilike(f"%{title}%"))
count_query = count_query.filter(Document.title.ilike(f"%{title}%"))
# Filter by document_types if provided
if document_types is not None and document_types.strip():
type_list = [t.strip() for t in document_types.split(",") if t.strip()]
if type_list:
query = query.filter(Document.document_type.in_(type_list))
# Get total count
count_query = (
select(func.count())
.select_from(Document)
.join(SearchSpace)
.filter(SearchSpace.user_id == user.id)
)
if search_space_id is not None:
count_query = count_query.filter(
Document.search_space_id == search_space_id
)
count_query = count_query.filter(Document.title.ilike(f"%{title}%"))
if document_types is not None and document_types.strip():
type_list = [t.strip() for t in document_types.split(",") if t.strip()]
if type_list:
count_query = count_query.filter(Document.document_type.in_(type_list))
total_result = await session.execute(count_query)
total = total_result.scalar() or 0
@ -340,6 +391,8 @@ async def search_documents(
)
return PaginatedResponse(items=api_documents, total=total)
except HTTPException:
raise
except Exception as e:
raise HTTPException(
status_code=500, detail=f"Failed to search documents: {e!s}"
@ -353,7 +406,8 @@ async def get_document_type_counts(
user: User = Depends(current_active_user),
):
"""
Get counts of documents by type for the current user.
Get counts of documents by type for search spaces the user has access to.
Requires DOCUMENTS_READ permission for the search space(s).
Args:
search_space_id: If provided, restrict counts to a specific search space.
@ -366,20 +420,36 @@ async def get_document_type_counts(
try:
from sqlalchemy import func
query = (
select(Document.document_type, func.count(Document.id))
.join(SearchSpace)
.filter(SearchSpace.user_id == user.id)
.group_by(Document.document_type)
)
if search_space_id is not None:
query = query.filter(Document.search_space_id == search_space_id)
# Check permission for specific search space
await check_permission(
session,
user,
search_space_id,
Permission.DOCUMENTS_READ.value,
"You don't have permission to read documents in this search space",
)
query = (
select(Document.document_type, func.count(Document.id))
.filter(Document.search_space_id == search_space_id)
.group_by(Document.document_type)
)
else:
# Get counts from all search spaces user has membership in
query = (
select(Document.document_type, func.count(Document.id))
.join(SearchSpace)
.join(SearchSpaceMembership)
.filter(SearchSpaceMembership.user_id == user.id)
.group_by(Document.document_type)
)
result = await session.execute(query)
type_counts = dict(result.all())
return type_counts
except HTTPException:
raise
except Exception as e:
raise HTTPException(
status_code=500, detail=f"Failed to fetch document type counts: {e!s}"
@ -394,6 +464,7 @@ async def get_document_by_chunk_id(
):
"""
Retrieves a document based on a chunk ID, including all its chunks ordered by creation time.
Requires DOCUMENTS_READ permission for the search space.
The document's embedding and chunk embeddings are excluded from the response.
"""
try:
@ -406,21 +477,29 @@ async def get_document_by_chunk_id(
status_code=404, detail=f"Chunk with id {chunk_id} not found"
)
# Get the associated document and verify ownership
# Get the associated document
document_result = await session.execute(
select(Document)
.options(selectinload(Document.chunks))
.join(SearchSpace)
.filter(Document.id == chunk.document_id, SearchSpace.user_id == user.id)
.filter(Document.id == chunk.document_id)
)
document = document_result.scalars().first()
if not document:
raise HTTPException(
status_code=404,
detail="Document not found or you don't have access to it",
detail="Document not found",
)
# Check permission for the search space
await check_permission(
session,
user,
document.search_space_id,
Permission.DOCUMENTS_READ.value,
"You don't have permission to read documents in this search space",
)
# Sort chunks by creation time
sorted_chunks = sorted(document.chunks, key=lambda x: x.created_at)
@ -449,11 +528,13 @@ async def read_document(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""
Get a specific document by ID.
Requires DOCUMENTS_READ permission for the search space.
"""
try:
result = await session.execute(
select(Document)
.join(SearchSpace)
.filter(Document.id == document_id, SearchSpace.user_id == user.id)
select(Document).filter(Document.id == document_id)
)
document = result.scalars().first()
@ -462,6 +543,15 @@ async def read_document(
status_code=404, detail=f"Document with id {document_id} not found"
)
# Check permission for the search space
await check_permission(
session,
user,
document.search_space_id,
Permission.DOCUMENTS_READ.value,
"You don't have permission to read documents in this search space",
)
# Convert database object to API-friendly format
return DocumentRead(
id=document.id,
@ -472,6 +562,8 @@ async def read_document(
created_at=document.created_at,
search_space_id=document.search_space_id,
)
except HTTPException:
raise
except Exception as e:
raise HTTPException(
status_code=500, detail=f"Failed to fetch document: {e!s}"
@ -485,12 +577,13 @@ async def update_document(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""
Update a document.
Requires DOCUMENTS_UPDATE permission for the search space.
"""
try:
# Query the document directly instead of using read_document function
result = await session.execute(
select(Document)
.join(SearchSpace)
.filter(Document.id == document_id, SearchSpace.user_id == user.id)
select(Document).filter(Document.id == document_id)
)
db_document = result.scalars().first()
@ -499,6 +592,15 @@ async def update_document(
status_code=404, detail=f"Document with id {document_id} not found"
)
# Check permission for the search space
await check_permission(
session,
user,
db_document.search_space_id,
Permission.DOCUMENTS_UPDATE.value,
"You don't have permission to update documents in this search space",
)
update_data = document_update.model_dump(exclude_unset=True)
for key, value in update_data.items():
setattr(db_document, key, value)
@ -530,12 +632,13 @@ async def delete_document(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""
Delete a document.
Requires DOCUMENTS_DELETE permission for the search space.
"""
try:
# Query the document directly instead of using read_document function
result = await session.execute(
select(Document)
.join(SearchSpace)
.filter(Document.id == document_id, SearchSpace.user_id == user.id)
select(Document).filter(Document.id == document_id)
)
document = result.scalars().first()
@ -544,6 +647,15 @@ async def delete_document(
status_code=404, detail=f"Document with id {document_id} not found"
)
# Check permission for the search space
await check_permission(
session,
user,
document.search_space_id,
Permission.DOCUMENTS_DELETE.value,
"You don't have permission to delete documents in this search space",
)
await session.delete(document)
await session.commit()
return {"message": "Document deleted successfully"}

View file

@ -0,0 +1,166 @@
"""
Editor routes for BlockNote document editing.
"""
from datetime import UTC, datetime
from typing import Any
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.orm import selectinload
from app.db import Document, Permission, User, get_async_session
from app.users import current_active_user
from app.utils.rbac import check_permission
router = APIRouter()
@router.get("/search-spaces/{search_space_id}/documents/{document_id}/editor-content")
async def get_editor_content(
search_space_id: int,
document_id: int,
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""
Get document content for editing.
Returns BlockNote JSON document. If blocknote_document is NULL,
attempts to generate it from chunks (lazy migration).
Requires DOCUMENTS_READ permission.
"""
# Check RBAC permission
await check_permission(
session,
user,
search_space_id,
Permission.DOCUMENTS_READ.value,
"You don't have permission to read documents in this search space",
)
result = await session.execute(
select(Document)
.options(selectinload(Document.chunks))
.filter(
Document.id == document_id,
Document.search_space_id == search_space_id,
)
)
document = result.scalars().first()
if not document:
raise HTTPException(status_code=404, detail="Document not found")
# If blocknote_document exists, return it
if document.blocknote_document:
return {
"document_id": document.id,
"title": document.title,
"blocknote_document": document.blocknote_document,
"last_edited_at": document.last_edited_at.isoformat()
if document.last_edited_at
else None,
}
# Lazy migration: Try to generate blocknote_document from chunks
from app.utils.blocknote_converter import convert_markdown_to_blocknote
chunks = sorted(document.chunks, key=lambda c: c.id)
if not chunks:
raise HTTPException(
status_code=400,
detail="This document has no chunks and cannot be edited. Please re-upload to enable editing.",
)
# Reconstruct markdown from chunks
markdown_content = "\n\n".join(chunk.content for chunk in chunks)
if not markdown_content.strip():
raise HTTPException(
status_code=400,
detail="This document has empty content and cannot be edited.",
)
# Convert to BlockNote
blocknote_json = await convert_markdown_to_blocknote(markdown_content)
if not blocknote_json:
raise HTTPException(
status_code=500,
detail="Failed to convert document to editable format. Please try again later.",
)
# Save the generated blocknote_document (lazy migration)
document.blocknote_document = blocknote_json
document.content_needs_reindexing = False
document.last_edited_at = None
await session.commit()
return {
"document_id": document.id,
"title": document.title,
"blocknote_document": blocknote_json,
"last_edited_at": None,
}
@router.post("/search-spaces/{search_space_id}/documents/{document_id}/save")
async def save_document(
search_space_id: int,
document_id: int,
data: dict[str, Any],
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""
Save BlockNote document and trigger reindexing.
Called when user clicks 'Save & Exit'.
Requires DOCUMENTS_UPDATE permission.
"""
from app.tasks.celery_tasks.document_reindex_tasks import reindex_document_task
# Check RBAC permission
await check_permission(
session,
user,
search_space_id,
Permission.DOCUMENTS_UPDATE.value,
"You don't have permission to update documents in this search space",
)
result = await session.execute(
select(Document).filter(
Document.id == document_id,
Document.search_space_id == search_space_id,
)
)
document = result.scalars().first()
if not document:
raise HTTPException(status_code=404, detail="Document not found")
blocknote_document = data.get("blocknote_document")
if not blocknote_document:
raise HTTPException(status_code=400, detail="blocknote_document is required")
# Save BlockNote document
document.blocknote_document = blocknote_document
document.last_edited_at = datetime.now(UTC)
document.content_needs_reindexing = True
await session.commit()
# Queue reindex task
reindex_document_task.delay(document_id, str(user.id))
return {
"status": "saved",
"document_id": document_id,
"message": "Document saved and will be reindexed in the background",
"last_edited_at": document.last_edited_at.isoformat(),
}

View file

@ -8,67 +8,22 @@ from sqlalchemy.future import select
from app.config import config
from app.db import (
LLMConfig,
Permission,
SearchSpace,
User,
UserSearchSpacePreference,
get_async_session,
)
from app.schemas import LLMConfigCreate, LLMConfigRead, LLMConfigUpdate
from app.services.llm_service import validate_llm_config
from app.users import current_active_user
from app.utils.rbac import check_permission
router = APIRouter()
logger = logging.getLogger(__name__)
# Helper function to check search space access
async def check_search_space_access(
session: AsyncSession, search_space_id: int, user: User
) -> SearchSpace:
"""Verify that the user has access to the search space"""
result = await session.execute(
select(SearchSpace).filter(
SearchSpace.id == search_space_id, SearchSpace.user_id == user.id
)
)
search_space = result.scalars().first()
if not search_space:
raise HTTPException(
status_code=404,
detail="Search space not found or you don't have permission to access it",
)
return search_space
# Helper function to get or create user search space preference
async def get_or_create_user_preference(
session: AsyncSession, user_id, search_space_id: int
) -> UserSearchSpacePreference:
"""Get or create user preference for a search space"""
result = await session.execute(
select(UserSearchSpacePreference).filter(
UserSearchSpacePreference.user_id == user_id,
UserSearchSpacePreference.search_space_id == search_space_id,
)
# Removed selectinload options since relationships no longer exist
)
preference = result.scalars().first()
if not preference:
# Create new preference entry
preference = UserSearchSpacePreference(
user_id=user_id,
search_space_id=search_space_id,
)
session.add(preference)
await session.commit()
await session.refresh(preference)
return preference
class LLMPreferencesUpdate(BaseModel):
"""Schema for updating user LLM preferences"""
"""Schema for updating search space LLM preferences"""
long_context_llm_id: int | None = None
fast_llm_id: int | None = None
@ -76,7 +31,7 @@ class LLMPreferencesUpdate(BaseModel):
class LLMPreferencesRead(BaseModel):
"""Schema for reading user LLM preferences"""
"""Schema for reading search space LLM preferences"""
long_context_llm_id: int | None = None
fast_llm_id: int | None = None
@ -144,10 +99,19 @@ async def create_llm_config(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""Create a new LLM configuration for a search space"""
"""
Create a new LLM configuration for a search space.
Requires LLM_CONFIGS_CREATE permission.
"""
try:
# Verify user has access to the search space
await check_search_space_access(session, llm_config.search_space_id, user)
# Verify user has permission to create LLM configs
await check_permission(
session,
user,
llm_config.search_space_id,
Permission.LLM_CONFIGS_CREATE.value,
"You don't have permission to create LLM configurations in this search space",
)
# Validate the LLM configuration by making a test API call
is_valid, error_message = await validate_llm_config(
@ -187,10 +151,19 @@ async def read_llm_configs(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""Get all LLM configurations for a search space"""
"""
Get all LLM configurations for a search space.
Requires LLM_CONFIGS_READ permission.
"""
try:
# Verify user has access to the search space
await check_search_space_access(session, search_space_id, user)
# Verify user has permission to read LLM configs
await check_permission(
session,
user,
search_space_id,
Permission.LLM_CONFIGS_READ.value,
"You don't have permission to view LLM configurations in this search space",
)
result = await session.execute(
select(LLMConfig)
@ -213,7 +186,10 @@ async def read_llm_config(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""Get a specific LLM configuration by ID"""
"""
Get a specific LLM configuration by ID.
Requires LLM_CONFIGS_READ permission.
"""
try:
# Get the LLM config
result = await session.execute(
@ -224,8 +200,14 @@ async def read_llm_config(
if not llm_config:
raise HTTPException(status_code=404, detail="LLM configuration not found")
# Verify user has access to the search space
await check_search_space_access(session, llm_config.search_space_id, user)
# Verify user has permission to read LLM configs
await check_permission(
session,
user,
llm_config.search_space_id,
Permission.LLM_CONFIGS_READ.value,
"You don't have permission to view LLM configurations in this search space",
)
return llm_config
except HTTPException:
@ -243,7 +225,10 @@ async def update_llm_config(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""Update an existing LLM configuration"""
"""
Update an existing LLM configuration.
Requires LLM_CONFIGS_UPDATE permission.
"""
try:
# Get the LLM config
result = await session.execute(
@ -254,8 +239,14 @@ async def update_llm_config(
if not db_llm_config:
raise HTTPException(status_code=404, detail="LLM configuration not found")
# Verify user has access to the search space
await check_search_space_access(session, db_llm_config.search_space_id, user)
# Verify user has permission to update LLM configs
await check_permission(
session,
user,
db_llm_config.search_space_id,
Permission.LLM_CONFIGS_UPDATE.value,
"You don't have permission to update LLM configurations in this search space",
)
update_data = llm_config_update.model_dump(exclude_unset=True)
@ -311,7 +302,10 @@ async def delete_llm_config(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""Delete an LLM configuration"""
"""
Delete an LLM configuration.
Requires LLM_CONFIGS_DELETE permission.
"""
try:
# Get the LLM config
result = await session.execute(
@ -322,8 +316,14 @@ async def delete_llm_config(
if not db_llm_config:
raise HTTPException(status_code=404, detail="LLM configuration not found")
# Verify user has access to the search space
await check_search_space_access(session, db_llm_config.search_space_id, user)
# Verify user has permission to delete LLM configs
await check_permission(
session,
user,
db_llm_config.search_space_id,
Permission.LLM_CONFIGS_DELETE.value,
"You don't have permission to delete LLM configurations in this search space",
)
await session.delete(db_llm_config)
await session.commit()
@ -337,28 +337,42 @@ async def delete_llm_config(
) from e
# User LLM Preferences endpoints
# Search Space LLM Preferences endpoints
@router.get(
"/search-spaces/{search_space_id}/llm-preferences",
response_model=LLMPreferencesRead,
)
async def get_user_llm_preferences(
async def get_llm_preferences(
search_space_id: int,
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""Get the current user's LLM preferences for a specific search space"""
"""
Get the LLM preferences for a specific search space.
LLM preferences are shared by all members of the search space.
Requires LLM_CONFIGS_READ permission.
"""
try:
# Verify user has access to the search space
await check_search_space_access(session, search_space_id, user)
# Get or create user preference for this search space
preference = await get_or_create_user_preference(
session, user.id, search_space_id
# Verify user has permission to read LLM configs
await check_permission(
session,
user,
search_space_id,
Permission.LLM_CONFIGS_READ.value,
"You don't have permission to view LLM preferences in this search space",
)
# Get the search space
result = await session.execute(
select(SearchSpace).filter(SearchSpace.id == search_space_id)
)
search_space = result.scalars().first()
if not search_space:
raise HTTPException(status_code=404, detail="Search space not found")
# Helper function to get config (global or custom)
async def get_config_for_id(config_id):
if config_id is None:
@ -391,14 +405,14 @@ async def get_user_llm_preferences(
return result.scalars().first()
# Get the configs (from DB for custom, or constructed for global)
long_context_llm = await get_config_for_id(preference.long_context_llm_id)
fast_llm = await get_config_for_id(preference.fast_llm_id)
strategic_llm = await get_config_for_id(preference.strategic_llm_id)
long_context_llm = await get_config_for_id(search_space.long_context_llm_id)
fast_llm = await get_config_for_id(search_space.fast_llm_id)
strategic_llm = await get_config_for_id(search_space.strategic_llm_id)
return {
"long_context_llm_id": preference.long_context_llm_id,
"fast_llm_id": preference.fast_llm_id,
"strategic_llm_id": preference.strategic_llm_id,
"long_context_llm_id": search_space.long_context_llm_id,
"fast_llm_id": search_space.fast_llm_id,
"strategic_llm_id": search_space.strategic_llm_id,
"long_context_llm": long_context_llm,
"fast_llm": fast_llm,
"strategic_llm": strategic_llm,
@ -415,22 +429,37 @@ async def get_user_llm_preferences(
"/search-spaces/{search_space_id}/llm-preferences",
response_model=LLMPreferencesRead,
)
async def update_user_llm_preferences(
async def update_llm_preferences(
search_space_id: int,
preferences: LLMPreferencesUpdate,
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""Update the current user's LLM preferences for a specific search space"""
"""
Update the LLM preferences for a specific search space.
LLM preferences are shared by all members of the search space.
Requires SETTINGS_UPDATE permission (only users with settings access can change).
"""
try:
# Verify user has access to the search space
await check_search_space_access(session, search_space_id, user)
# Get or create user preference for this search space
preference = await get_or_create_user_preference(
session, user.id, search_space_id
# Verify user has permission to update settings (not just LLM configs)
# This ensures only users with settings access can change shared LLM preferences
await check_permission(
session,
user,
search_space_id,
Permission.SETTINGS_UPDATE.value,
"You don't have permission to update LLM preferences in this search space",
)
# Get the search space
result = await session.execute(
select(SearchSpace).filter(SearchSpace.id == search_space_id)
)
search_space = result.scalars().first()
if not search_space:
raise HTTPException(status_code=404, detail="Search space not found")
# Validate that all provided LLM config IDs belong to the search space
update_data = preferences.model_dump(exclude_unset=True)
@ -485,18 +514,13 @@ async def update_user_llm_preferences(
f"Multiple languages detected in LLM selection for search_space {search_space_id}: {languages}. "
"This may affect response quality."
)
# Don't raise an exception - allow users to proceed
# raise HTTPException(
# status_code=400,
# detail="All selected LLM configurations must have the same language setting",
# )
# Update user preferences
# Update search space LLM preferences
for key, value in update_data.items():
setattr(preference, key, value)
setattr(search_space, key, value)
await session.commit()
await session.refresh(preference)
await session.refresh(search_space)
# Helper function to get config (global or custom)
async def get_config_for_id(config_id):
@ -530,15 +554,15 @@ async def update_user_llm_preferences(
return result.scalars().first()
# Get the configs (from DB for custom, or constructed for global)
long_context_llm = await get_config_for_id(preference.long_context_llm_id)
fast_llm = await get_config_for_id(preference.fast_llm_id)
strategic_llm = await get_config_for_id(preference.strategic_llm_id)
long_context_llm = await get_config_for_id(search_space.long_context_llm_id)
fast_llm = await get_config_for_id(search_space.fast_llm_id)
strategic_llm = await get_config_for_id(search_space.strategic_llm_id)
# Return updated preferences
return {
"long_context_llm_id": preference.long_context_llm_id,
"fast_llm_id": preference.fast_llm_id,
"strategic_llm_id": preference.strategic_llm_id,
"long_context_llm_id": search_space.long_context_llm_id,
"fast_llm_id": search_space.fast_llm_id,
"strategic_llm_id": search_space.strategic_llm_id,
"long_context_llm": long_context_llm,
"fast_llm": fast_llm,
"strategic_llm": strategic_llm,

View file

@ -5,10 +5,19 @@ from sqlalchemy import and_, desc
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from app.db import Log, LogLevel, LogStatus, SearchSpace, User, get_async_session
from app.db import (
Log,
LogLevel,
LogStatus,
Permission,
SearchSpace,
SearchSpaceMembership,
User,
get_async_session,
)
from app.schemas import LogCreate, LogRead, LogUpdate
from app.users import current_active_user
from app.utils.check_ownership import check_ownership
from app.utils.rbac import check_permission
router = APIRouter()
@ -19,10 +28,19 @@ async def create_log(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""Create a new log entry."""
"""
Create a new log entry.
Note: This is typically called internally. Requires LOGS_READ permission (since logs are usually system-generated).
"""
try:
# Check if the user owns the search space
await check_ownership(session, SearchSpace, log.search_space_id, user)
# Check if the user has access to the search space
await check_permission(
session,
user,
log.search_space_id,
Permission.LOGS_READ.value,
"You don't have permission to access logs in this search space",
)
db_log = Log(**log.model_dump())
session.add(db_log)
@ -51,22 +69,38 @@ async def read_logs(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""Get logs with optional filtering."""
"""
Get logs with optional filtering.
Requires LOGS_READ permission for the search space(s).
"""
try:
# Build base query - only logs from user's search spaces
query = (
select(Log)
.join(SearchSpace)
.filter(SearchSpace.user_id == user.id)
.order_by(desc(Log.created_at)) # Most recent first
)
# Apply filters
filters = []
if search_space_id is not None:
await check_ownership(session, SearchSpace, search_space_id, user)
filters.append(Log.search_space_id == search_space_id)
# Check permission for specific search space
await check_permission(
session,
user,
search_space_id,
Permission.LOGS_READ.value,
"You don't have permission to read logs in this search space",
)
# Build query for specific search space
query = (
select(Log)
.filter(Log.search_space_id == search_space_id)
.order_by(desc(Log.created_at))
)
else:
# Build base query - logs from search spaces user has membership in
query = (
select(Log)
.join(SearchSpace)
.join(SearchSpaceMembership)
.filter(SearchSpaceMembership.user_id == user.id)
.order_by(desc(Log.created_at))
)
if level is not None:
filters.append(Log.level == level)
@ -104,19 +138,26 @@ async def read_log(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""Get a specific log by ID."""
"""
Get a specific log by ID.
Requires LOGS_READ permission for the search space.
"""
try:
# Get log and verify user owns the search space
result = await session.execute(
select(Log)
.join(SearchSpace)
.filter(Log.id == log_id, SearchSpace.user_id == user.id)
)
result = await session.execute(select(Log).filter(Log.id == log_id))
log = result.scalars().first()
if not log:
raise HTTPException(status_code=404, detail="Log not found")
# Check permission for the search space
await check_permission(
session,
user,
log.search_space_id,
Permission.LOGS_READ.value,
"You don't have permission to read logs in this search space",
)
return log
except HTTPException:
raise
@ -133,19 +174,26 @@ async def update_log(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""Update a log entry."""
"""
Update a log entry.
Requires LOGS_READ permission (logs are typically updated by system).
"""
try:
# Get log and verify user owns the search space
result = await session.execute(
select(Log)
.join(SearchSpace)
.filter(Log.id == log_id, SearchSpace.user_id == user.id)
)
result = await session.execute(select(Log).filter(Log.id == log_id))
db_log = result.scalars().first()
if not db_log:
raise HTTPException(status_code=404, detail="Log not found")
# Check permission for the search space
await check_permission(
session,
user,
db_log.search_space_id,
Permission.LOGS_READ.value,
"You don't have permission to access logs in this search space",
)
# Update only provided fields
update_data = log_update.model_dump(exclude_unset=True)
for field, value in update_data.items():
@ -169,19 +217,26 @@ async def delete_log(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""Delete a log entry."""
"""
Delete a log entry.
Requires LOGS_DELETE permission for the search space.
"""
try:
# Get log and verify user owns the search space
result = await session.execute(
select(Log)
.join(SearchSpace)
.filter(Log.id == log_id, SearchSpace.user_id == user.id)
)
result = await session.execute(select(Log).filter(Log.id == log_id))
db_log = result.scalars().first()
if not db_log:
raise HTTPException(status_code=404, detail="Log not found")
# Check permission for the search space
await check_permission(
session,
user,
db_log.search_space_id,
Permission.LOGS_DELETE.value,
"You don't have permission to delete logs in this search space",
)
await session.delete(db_log)
await session.commit()
return {"message": "Log deleted successfully"}
@ -201,10 +256,19 @@ async def get_logs_summary(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""Get a summary of logs for a search space in the last X hours."""
"""
Get a summary of logs for a search space in the last X hours.
Requires LOGS_READ permission for the search space.
"""
try:
# Check ownership
await check_ownership(session, SearchSpace, search_space_id, user)
# Check permission
await check_permission(
session,
user,
search_space_id,
Permission.LOGS_READ.value,
"You don't have permission to read logs in this search space",
)
# Calculate time window
since = datetime.utcnow().replace(microsecond=0) - timedelta(hours=hours)

View file

@ -7,7 +7,15 @@ from sqlalchemy.exc import IntegrityError, SQLAlchemyError
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from app.db import Chat, Podcast, SearchSpace, User, get_async_session
from app.db import (
Chat,
Permission,
Podcast,
SearchSpace,
SearchSpaceMembership,
User,
get_async_session,
)
from app.schemas import (
PodcastCreate,
PodcastGenerateRequest,
@ -16,7 +24,7 @@ from app.schemas import (
)
from app.tasks.podcast_tasks import generate_chat_podcast
from app.users import current_active_user
from app.utils.check_ownership import check_ownership
from app.utils.rbac import check_permission
router = APIRouter()
@ -27,8 +35,18 @@ async def create_podcast(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""
Create a new podcast.
Requires PODCASTS_CREATE permission.
"""
try:
await check_ownership(session, SearchSpace, podcast.search_space_id, user)
await check_permission(
session,
user,
podcast.search_space_id,
Permission.PODCASTS_CREATE.value,
"You don't have permission to create podcasts in this search space",
)
db_podcast = Podcast(**podcast.model_dump())
session.add(db_podcast)
await session.commit()
@ -58,20 +76,45 @@ async def create_podcast(
async def read_podcasts(
skip: int = 0,
limit: int = 100,
search_space_id: int | None = None,
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""
List podcasts the user has access to.
Requires PODCASTS_READ permission for the search space(s).
"""
if skip < 0 or limit < 1:
raise HTTPException(status_code=400, detail="Invalid pagination parameters")
try:
result = await session.execute(
select(Podcast)
.join(SearchSpace)
.filter(SearchSpace.user_id == user.id)
.offset(skip)
.limit(limit)
)
if search_space_id is not None:
# Check permission for specific search space
await check_permission(
session,
user,
search_space_id,
Permission.PODCASTS_READ.value,
"You don't have permission to read podcasts in this search space",
)
result = await session.execute(
select(Podcast)
.filter(Podcast.search_space_id == search_space_id)
.offset(skip)
.limit(limit)
)
else:
# Get podcasts from all search spaces user has membership in
result = await session.execute(
select(Podcast)
.join(SearchSpace)
.join(SearchSpaceMembership)
.filter(SearchSpaceMembership.user_id == user.id)
.offset(skip)
.limit(limit)
)
return result.scalars().all()
except HTTPException:
raise
except SQLAlchemyError:
raise HTTPException(
status_code=500, detail="Database error occurred while fetching podcasts"
@ -84,18 +127,29 @@ async def read_podcast(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""
Get a specific podcast by ID.
Requires PODCASTS_READ permission for the search space.
"""
try:
result = await session.execute(
select(Podcast)
.join(SearchSpace)
.filter(Podcast.id == podcast_id, SearchSpace.user_id == user.id)
)
result = await session.execute(select(Podcast).filter(Podcast.id == podcast_id))
podcast = result.scalars().first()
if not podcast:
raise HTTPException(
status_code=404,
detail="Podcast not found or you don't have permission to access it",
detail="Podcast not found",
)
# Check permission for the search space
await check_permission(
session,
user,
podcast.search_space_id,
Permission.PODCASTS_READ.value,
"You don't have permission to read podcasts in this search space",
)
return podcast
except HTTPException as he:
raise he
@ -112,8 +166,26 @@ async def update_podcast(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""
Update a podcast.
Requires PODCASTS_UPDATE permission for the search space.
"""
try:
db_podcast = await read_podcast(podcast_id, session, user)
result = await session.execute(select(Podcast).filter(Podcast.id == podcast_id))
db_podcast = result.scalars().first()
if not db_podcast:
raise HTTPException(status_code=404, detail="Podcast not found")
# Check permission for the search space
await check_permission(
session,
user,
db_podcast.search_space_id,
Permission.PODCASTS_UPDATE.value,
"You don't have permission to update podcasts in this search space",
)
update_data = podcast_update.model_dump(exclude_unset=True)
for key, value in update_data.items():
setattr(db_podcast, key, value)
@ -140,8 +212,26 @@ async def delete_podcast(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""
Delete a podcast.
Requires PODCASTS_DELETE permission for the search space.
"""
try:
db_podcast = await read_podcast(podcast_id, session, user)
result = await session.execute(select(Podcast).filter(Podcast.id == podcast_id))
db_podcast = result.scalars().first()
if not db_podcast:
raise HTTPException(status_code=404, detail="Podcast not found")
# Check permission for the search space
await check_permission(
session,
user,
db_podcast.search_space_id,
Permission.PODCASTS_DELETE.value,
"You don't have permission to delete podcasts in this search space",
)
await session.delete(db_podcast)
await session.commit()
return {"message": "Podcast deleted successfully"}
@ -181,9 +271,19 @@ async def generate_podcast(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""
Generate a podcast from a chat or document.
Requires PODCASTS_CREATE permission.
"""
try:
# Check if the user owns the search space
await check_ownership(session, SearchSpace, request.search_space_id, user)
# Check if the user has permission to create podcasts
await check_permission(
session,
user,
request.search_space_id,
Permission.PODCASTS_CREATE.value,
"You don't have permission to create podcasts in this search space",
)
if request.type == "CHAT":
# Verify that all chat IDs belong to this user and search space
@ -251,22 +351,29 @@ async def stream_podcast(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""Stream a podcast audio file."""
"""
Stream a podcast audio file.
Requires PODCASTS_READ permission for the search space.
"""
try:
# Get the podcast and check if user has access
result = await session.execute(
select(Podcast)
.join(SearchSpace)
.filter(Podcast.id == podcast_id, SearchSpace.user_id == user.id)
)
result = await session.execute(select(Podcast).filter(Podcast.id == podcast_id))
podcast = result.scalars().first()
if not podcast:
raise HTTPException(
status_code=404,
detail="Podcast not found or you don't have permission to access it",
detail="Podcast not found",
)
# Check permission for the search space
await check_permission(
session,
user,
podcast.search_space_id,
Permission.PODCASTS_READ.value,
"You don't have permission to access podcasts in this search space",
)
# Get the file path
file_path = podcast.file_location
@ -303,12 +410,30 @@ async def get_podcast_by_chat_id(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""
Get a podcast by its associated chat ID.
Requires PODCASTS_READ permission for the search space.
"""
try:
# Get the podcast and check if user has access
# First get the chat to find its search space
chat_result = await session.execute(select(Chat).filter(Chat.id == chat_id))
chat = chat_result.scalars().first()
if not chat:
return None
# Check permission for the search space
await check_permission(
session,
user,
chat.search_space_id,
Permission.PODCASTS_READ.value,
"You don't have permission to read podcasts in this search space",
)
# Get the podcast
result = await session.execute(
select(Podcast)
.join(SearchSpace)
.filter(Podcast.chat_id == chat_id, SearchSpace.user_id == user.id)
select(Podcast).filter(Podcast.chat_id == chat_id)
)
podcast = result.scalars().first()

File diff suppressed because it is too large Load diff

View file

@ -22,9 +22,9 @@ from sqlalchemy.future import select
from app.connectors.github_connector import GitHubConnector
from app.db import (
Permission,
SearchSourceConnector,
SearchSourceConnectorType,
SearchSpace,
User,
async_session_maker,
get_async_session,
@ -52,12 +52,12 @@ from app.tasks.connector_indexers import (
index_slack_messages,
)
from app.users import current_active_user
from app.utils.check_ownership import check_ownership
from app.utils.periodic_scheduler import (
create_periodic_schedule,
delete_periodic_schedule,
update_periodic_schedule,
)
from app.utils.rbac import check_permission
# Set up logging
logger = logging.getLogger(__name__)
@ -108,19 +108,25 @@ async def create_search_source_connector(
):
"""
Create a new search source connector.
Requires CONNECTORS_CREATE permission.
Each search space can have only one connector of each type per user (based on search_space_id, user_id, and connector_type).
Each search space can have only one connector of each type (based on search_space_id and connector_type).
The config must contain the appropriate keys for the connector type.
"""
try:
# Check if the search space belongs to the user
await check_ownership(session, SearchSpace, search_space_id, user)
# Check if user has permission to create connectors
await check_permission(
session,
user,
search_space_id,
Permission.CONNECTORS_CREATE.value,
"You don't have permission to create connectors in this search space",
)
# Check if a connector with the same type already exists for this search space and user
# Check if a connector with the same type already exists for this search space
result = await session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user.id,
SearchSourceConnector.connector_type == connector.connector_type,
)
)
@ -128,7 +134,7 @@ async def create_search_source_connector(
if existing_connector:
raise HTTPException(
status_code=409,
detail=f"A connector with type {connector.connector_type} already exists in this search space. Each search space can have only one connector of each type per user.",
detail=f"A connector with type {connector.connector_type} already exists in this search space.",
)
# Prepare connector data
@ -198,22 +204,34 @@ async def read_search_source_connectors(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""List all search source connectors for the current user, optionally filtered by search space."""
"""
List all search source connectors for a search space.
Requires CONNECTORS_READ permission.
"""
try:
query = select(SearchSourceConnector).filter(
SearchSourceConnector.user_id == user.id
if search_space_id is None:
raise HTTPException(
status_code=400,
detail="search_space_id is required",
)
# Check if user has permission to read connectors
await check_permission(
session,
user,
search_space_id,
Permission.CONNECTORS_READ.value,
"You don't have permission to view connectors in this search space",
)
# Filter by search_space_id if provided
if search_space_id is not None:
# Verify the search space belongs to the user
await check_ownership(session, SearchSpace, search_space_id, user)
query = query.filter(
SearchSourceConnector.search_space_id == search_space_id
)
query = select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id == search_space_id
)
result = await session.execute(query.offset(skip).limit(limit))
return result.scalars().all()
except HTTPException:
raise
except Exception as e:
raise HTTPException(
status_code=500,
@ -229,9 +247,32 @@ async def read_search_source_connector(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""Get a specific search source connector by ID."""
"""
Get a specific search source connector by ID.
Requires CONNECTORS_READ permission.
"""
try:
return await check_ownership(session, SearchSourceConnector, connector_id, user)
# Get the connector first
result = await session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == connector_id
)
)
connector = result.scalars().first()
if not connector:
raise HTTPException(status_code=404, detail="Connector not found")
# Check permission
await check_permission(
session,
user,
connector.search_space_id,
Permission.CONNECTORS_READ.value,
"You don't have permission to view this connector",
)
return connector
except HTTPException:
raise
except Exception as e:
@ -251,10 +292,25 @@ async def update_search_source_connector(
):
"""
Update a search source connector.
Requires CONNECTORS_UPDATE permission.
Handles partial updates, including merging changes into the 'config' field.
"""
db_connector = await check_ownership(
session, SearchSourceConnector, connector_id, user
# Get the connector first
result = await session.execute(
select(SearchSourceConnector).filter(SearchSourceConnector.id == connector_id)
)
db_connector = result.scalars().first()
if not db_connector:
raise HTTPException(status_code=404, detail="Connector not found")
# Check permission
await check_permission(
session,
user,
db_connector.search_space_id,
Permission.CONNECTORS_UPDATE.value,
"You don't have permission to update this connector",
)
# Convert the sparse update data (only fields present in request) to a dict
@ -349,20 +405,19 @@ async def update_search_source_connector(
for key, value in update_data.items():
# Prevent changing connector_type if it causes a duplicate (check moved here)
if key == "connector_type" and value != db_connector.connector_type:
result = await session.execute(
check_result = await session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id
== db_connector.search_space_id,
SearchSourceConnector.user_id == user.id,
SearchSourceConnector.connector_type == value,
SearchSourceConnector.id != connector_id,
)
)
existing_connector = result.scalars().first()
existing_connector = check_result.scalars().first()
if existing_connector:
raise HTTPException(
status_code=409,
detail=f"A connector with type {value} already exists in this search space. Each search space can have only one connector of each type per user.",
detail=f"A connector with type {value} already exists in this search space.",
)
setattr(db_connector, key, value)
@ -425,10 +480,29 @@ async def delete_search_source_connector(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""Delete a search source connector."""
"""
Delete a search source connector.
Requires CONNECTORS_DELETE permission.
"""
try:
db_connector = await check_ownership(
session, SearchSourceConnector, connector_id, user
# Get the connector first
result = await session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == connector_id
)
)
db_connector = result.scalars().first()
if not db_connector:
raise HTTPException(status_code=404, detail="Connector not found")
# Check permission
await check_permission(
session,
user,
db_connector.search_space_id,
Permission.CONNECTORS_DELETE.value,
"You don't have permission to delete this connector",
)
# Delete any periodic schedule associated with this connector
@ -473,6 +547,7 @@ async def index_connector_content(
):
"""
Index content from a connector to a search space.
Requires CONNECTORS_UPDATE permission (to trigger indexing).
Currently supports:
- SLACK_CONNECTOR: Indexes messages from all accessible Slack channels
@ -488,20 +563,29 @@ async def index_connector_content(
Args:
connector_id: ID of the connector to use
search_space_id: ID of the search space to store indexed content
background_tasks: FastAPI background tasks
Returns:
Dictionary with indexing status
"""
try:
# Check if the connector belongs to the user
connector = await check_ownership(
session, SearchSourceConnector, connector_id, user
# Get the connector first
result = await session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == connector_id
)
)
connector = result.scalars().first()
# Check if the search space belongs to the user
_search_space = await check_ownership(
session, SearchSpace, search_space_id, user
if not connector:
raise HTTPException(status_code=404, detail="Connector not found")
# Check if user has permission to update connectors (indexing is an update operation)
await check_permission(
session,
user,
search_space_id,
Permission.CONNECTORS_UPDATE.value,
"You don't have permission to index content in this search space",
)
# Handle different connector types

View file

@ -1,18 +1,77 @@
import logging
from pathlib import Path
import yaml
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy import func
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from app.db import SearchSpace, User, get_async_session
from app.schemas import SearchSpaceCreate, SearchSpaceRead, SearchSpaceUpdate
from app.db import (
Permission,
SearchSpace,
SearchSpaceMembership,
SearchSpaceRole,
User,
get_async_session,
get_default_roles_config,
)
from app.schemas import (
SearchSpaceCreate,
SearchSpaceRead,
SearchSpaceUpdate,
SearchSpaceWithStats,
)
from app.users import current_active_user
from app.utils.check_ownership import check_ownership
from app.utils.rbac import check_permission, check_search_space_access
logger = logging.getLogger(__name__)
router = APIRouter()
async def create_default_roles_and_membership(
session: AsyncSession,
search_space_id: int,
owner_user_id,
) -> None:
"""
Create default system roles for a search space and add the owner as a member.
Args:
session: Database session
search_space_id: The ID of the newly created search space
owner_user_id: The UUID of the user who created the search space
"""
# Create default roles
default_roles = get_default_roles_config()
owner_role_id = None
for role_config in default_roles:
db_role = SearchSpaceRole(
name=role_config["name"],
description=role_config["description"],
permissions=role_config["permissions"],
is_default=role_config["is_default"],
is_system_role=role_config["is_system_role"],
search_space_id=search_space_id,
)
session.add(db_role)
await session.flush() # Get the ID
if role_config["name"] == "Owner":
owner_role_id = db_role.id
# Create owner membership
owner_membership = SearchSpaceMembership(
user_id=owner_user_id,
search_space_id=search_space_id,
role_id=owner_role_id,
is_owner=True,
)
session.add(owner_membership)
@router.post("/searchspaces", response_model=SearchSpaceRead)
async def create_search_space(
search_space: SearchSpaceCreate,
@ -27,6 +86,11 @@ async def create_search_space(
db_search_space = SearchSpace(**search_space_data, user_id=user.id)
session.add(db_search_space)
await session.flush() # Get the search space ID
# Create default roles and owner membership
await create_default_roles_and_membership(session, db_search_space.id, user.id)
await session.commit()
await session.refresh(db_search_space)
return db_search_space
@ -34,26 +98,86 @@ async def create_search_space(
raise
except Exception as e:
await session.rollback()
logger.error(f"Failed to create search space: {e!s}", exc_info=True)
raise HTTPException(
status_code=500, detail=f"Failed to create search space: {e!s}"
) from e
@router.get("/searchspaces", response_model=list[SearchSpaceRead])
@router.get("/searchspaces", response_model=list[SearchSpaceWithStats])
async def read_search_spaces(
skip: int = 0,
limit: int = 200,
owned_only: bool = False,
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""
Get all search spaces the user has access to, with member count and ownership info.
Args:
skip: Number of items to skip
limit: Maximum number of items to return
owned_only: If True, only return search spaces owned by the user.
If False (default), return all search spaces the user has access to.
"""
try:
result = await session.execute(
select(SearchSpace)
.filter(SearchSpace.user_id == user.id)
.offset(skip)
.limit(limit)
)
return result.scalars().all()
if owned_only:
# Return only search spaces where user is the original creator (user_id)
result = await session.execute(
select(SearchSpace)
.filter(SearchSpace.user_id == user.id)
.offset(skip)
.limit(limit)
)
else:
# Return all search spaces the user has membership in
result = await session.execute(
select(SearchSpace)
.join(SearchSpaceMembership)
.filter(SearchSpaceMembership.user_id == user.id)
.offset(skip)
.limit(limit)
)
search_spaces = result.scalars().all()
# Get member counts and ownership info for each search space
search_spaces_with_stats = []
for space in search_spaces:
# Get member count
count_result = await session.execute(
select(func.count(SearchSpaceMembership.id)).filter(
SearchSpaceMembership.search_space_id == space.id
)
)
member_count = count_result.scalar() or 1
# Check if current user is owner
ownership_result = await session.execute(
select(SearchSpaceMembership).filter(
SearchSpaceMembership.search_space_id == space.id,
SearchSpaceMembership.user_id == user.id,
SearchSpaceMembership.is_owner == True, # noqa: E712
)
)
is_owner = ownership_result.scalars().first() is not None
search_spaces_with_stats.append(
SearchSpaceWithStats(
id=space.id,
name=space.name,
description=space.description,
created_at=space.created_at,
user_id=space.user_id,
citations_enabled=space.citations_enabled,
qna_custom_instructions=space.qna_custom_instructions,
member_count=member_count,
is_owner=is_owner,
)
)
return search_spaces_with_stats
except Exception as e:
raise HTTPException(
status_code=500, detail=f"Failed to fetch search spaces: {e!s}"
@ -97,10 +221,22 @@ async def read_search_space(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""
Get a specific search space by ID.
Requires SETTINGS_VIEW permission or membership.
"""
try:
search_space = await check_ownership(
session, SearchSpace, search_space_id, user
# Check if user has access (is a member)
await check_search_space_access(session, user, search_space_id)
result = await session.execute(
select(SearchSpace).filter(SearchSpace.id == search_space_id)
)
search_space = result.scalars().first()
if not search_space:
raise HTTPException(status_code=404, detail="Search space not found")
return search_space
except HTTPException:
@ -118,10 +254,28 @@ async def update_search_space(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""
Update a search space.
Requires SETTINGS_UPDATE permission.
"""
try:
db_search_space = await check_ownership(
session, SearchSpace, search_space_id, user
# Check permission
await check_permission(
session,
user,
search_space_id,
Permission.SETTINGS_UPDATE.value,
"You don't have permission to update this search space",
)
result = await session.execute(
select(SearchSpace).filter(SearchSpace.id == search_space_id)
)
db_search_space = result.scalars().first()
if not db_search_space:
raise HTTPException(status_code=404, detail="Search space not found")
update_data = search_space_update.model_dump(exclude_unset=True)
for key, value in update_data.items():
setattr(db_search_space, key, value)
@ -143,10 +297,28 @@ async def delete_search_space(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""
Delete a search space.
Requires SETTINGS_DELETE permission (only owners have this by default).
"""
try:
db_search_space = await check_ownership(
session, SearchSpace, search_space_id, user
# Check permission - only those with SETTINGS_DELETE can delete
await check_permission(
session,
user,
search_space_id,
Permission.SETTINGS_DELETE.value,
"You don't have permission to delete this search space",
)
result = await session.execute(
select(SearchSpace).filter(SearchSpace.id == search_space_id)
)
db_search_space = result.scalars().first()
if not db_search_space:
raise HTTPException(status_code=404, detail="Search space not found")
await session.delete(db_search_space)
await session.commit()
return {"message": "Search space deleted successfully"}

View file

@ -27,6 +27,23 @@ from .podcasts import (
PodcastRead,
PodcastUpdate,
)
from .rbac_schemas import (
InviteAcceptRequest,
InviteAcceptResponse,
InviteCreate,
InviteInfoResponse,
InviteRead,
InviteUpdate,
MembershipRead,
MembershipReadWithUser,
MembershipUpdate,
PermissionInfo,
PermissionsListResponse,
RoleCreate,
RoleRead,
RoleUpdate,
UserSearchSpaceAccess,
)
from .search_source_connector import (
SearchSourceConnectorBase,
SearchSourceConnectorCreate,
@ -38,6 +55,7 @@ from .search_space import (
SearchSpaceCreate,
SearchSpaceRead,
SearchSpaceUpdate,
SearchSpaceWithStats,
)
from .users import UserCreate, UserRead, UserUpdate
@ -60,6 +78,13 @@ __all__ = [
"ExtensionDocumentContent",
"ExtensionDocumentMetadata",
"IDModel",
# RBAC schemas
"InviteAcceptRequest",
"InviteAcceptResponse",
"InviteCreate",
"InviteInfoResponse",
"InviteRead",
"InviteUpdate",
"LLMConfigBase",
"LLMConfigCreate",
"LLMConfigRead",
@ -69,12 +94,20 @@ __all__ = [
"LogFilter",
"LogRead",
"LogUpdate",
"MembershipRead",
"MembershipReadWithUser",
"MembershipUpdate",
"PaginatedResponse",
"PermissionInfo",
"PermissionsListResponse",
"PodcastBase",
"PodcastCreate",
"PodcastGenerateRequest",
"PodcastRead",
"PodcastUpdate",
"RoleCreate",
"RoleRead",
"RoleUpdate",
"SearchSourceConnectorBase",
"SearchSourceConnectorCreate",
"SearchSourceConnectorRead",
@ -83,8 +116,10 @@ __all__ = [
"SearchSpaceCreate",
"SearchSpaceRead",
"SearchSpaceUpdate",
"SearchSpaceWithStats",
"TimestampModel",
"UserCreate",
"UserRead",
"UserSearchSpaceAccess",
"UserUpdate",
]

View file

@ -0,0 +1,186 @@
"""
Pydantic schemas for RBAC (Role-Based Access Control) endpoints.
"""
from datetime import datetime
from uuid import UUID
from pydantic import BaseModel, Field
# ============ Role Schemas ============
class RoleBase(BaseModel):
"""Base schema for roles."""
name: str = Field(..., min_length=1, max_length=100)
description: str | None = Field(None, max_length=500)
permissions: list[str] = Field(default_factory=list)
is_default: bool = False
class RoleCreate(RoleBase):
"""Schema for creating a new role."""
pass
class RoleUpdate(BaseModel):
"""Schema for updating a role (partial update)."""
name: str | None = Field(None, min_length=1, max_length=100)
description: str | None = Field(None, max_length=500)
permissions: list[str] | None = None
is_default: bool | None = None
class RoleRead(RoleBase):
"""Schema for reading a role."""
id: int
search_space_id: int
is_system_role: bool
created_at: datetime
class Config:
from_attributes = True
# ============ Membership Schemas ============
class MembershipBase(BaseModel):
"""Base schema for memberships."""
pass
class MembershipUpdate(BaseModel):
"""Schema for updating a membership (change role)."""
role_id: int | None = None
class MembershipRead(BaseModel):
"""Schema for reading a membership."""
id: int
user_id: UUID
search_space_id: int
role_id: int | None
is_owner: bool
joined_at: datetime
created_at: datetime
# Nested role info
role: RoleRead | None = None
# User email (populated separately)
user_email: str | None = None
class Config:
from_attributes = True
class MembershipReadWithUser(MembershipRead):
"""Schema for reading a membership with user details."""
user_email: str | None = None
user_is_active: bool | None = None
# ============ Invite Schemas ============
class InviteBase(BaseModel):
"""Base schema for invites."""
name: str | None = Field(None, max_length=100)
role_id: int | None = None
expires_at: datetime | None = None
max_uses: int | None = Field(None, ge=1)
class InviteCreate(InviteBase):
"""Schema for creating a new invite."""
pass
class InviteUpdate(BaseModel):
"""Schema for updating an invite (partial update)."""
name: str | None = Field(None, max_length=100)
role_id: int | None = None
expires_at: datetime | None = None
max_uses: int | None = Field(None, ge=1)
is_active: bool | None = None
class InviteRead(InviteBase):
"""Schema for reading an invite."""
id: int
invite_code: str
search_space_id: int
created_by_id: UUID | None
uses_count: int
is_active: bool
created_at: datetime
# Nested role info
role: RoleRead | None = None
class Config:
from_attributes = True
class InviteAcceptRequest(BaseModel):
"""Schema for accepting an invite."""
invite_code: str = Field(..., min_length=1)
class InviteAcceptResponse(BaseModel):
"""Response schema for accepting an invite."""
message: str
search_space_id: int
search_space_name: str
role_name: str | None
class InviteInfoResponse(BaseModel):
"""Response schema for getting invite info (public endpoint)."""
search_space_name: str
role_name: str | None
is_valid: bool
message: str | None = None
# ============ Permission Schemas ============
class PermissionInfo(BaseModel):
"""Schema for permission information."""
value: str
name: str
category: str
class PermissionsListResponse(BaseModel):
"""Response schema for listing all available permissions."""
permissions: list[PermissionInfo]
# ============ User Access Info ============
class UserSearchSpaceAccess(BaseModel):
"""Schema for user's access info in a search space."""
search_space_id: int
search_space_name: str
is_owner: bool
role_name: str | None
permissions: list[str]

View file

@ -34,3 +34,10 @@ class SearchSpaceRead(SearchSpaceBase, IDModel, TimestampModel):
qna_custom_instructions: str | None = None
model_config = ConfigDict(from_attributes=True)
class SearchSpaceWithStats(SearchSpaceRead):
"""Extended search space info with member count and ownership status."""
member_count: int = 1
is_owner: bool = False

View file

@ -15,18 +15,17 @@ from app.db import (
Document,
SearchSourceConnector,
SearchSourceConnectorType,
SearchSpace,
)
from app.retriver.chunks_hybrid_search import ChucksHybridSearchRetriever
from app.retriver.documents_hybrid_search import DocumentHybridSearchRetriever
class ConnectorService:
def __init__(self, session: AsyncSession, user_id: str | None = None):
def __init__(self, session: AsyncSession, search_space_id: int | None = None):
self.session = session
self.chunk_retriever = ChucksHybridSearchRetriever(session)
self.document_retriever = DocumentHybridSearchRetriever(session)
self.user_id = user_id
self.search_space_id = search_space_id
self.source_id_counter = (
100000 # High starting value to avoid collisions with existing IDs
)
@ -36,23 +35,22 @@ class ConnectorService:
async def initialize_counter(self):
"""
Initialize the source_id_counter based on the total number of chunks for the user.
Initialize the source_id_counter based on the total number of chunks for the search space.
This ensures unique IDs across different sessions.
"""
if self.user_id:
if self.search_space_id:
try:
# Count total chunks for documents belonging to this user
# Count total chunks for documents belonging to this search space
result = await self.session.execute(
select(func.count(Chunk.id))
.join(Document)
.join(SearchSpace)
.filter(SearchSpace.user_id == self.user_id)
.filter(Document.search_space_id == self.search_space_id)
)
chunk_count = result.scalar() or 0
self.source_id_counter = chunk_count + 1
print(
f"Initialized source_id_counter to {self.source_id_counter} for user {self.user_id}"
f"Initialized source_id_counter to {self.source_id_counter} for search space {self.search_space_id}"
)
except Exception as e:
print(f"Error initializing source_id_counter: {e!s}")
@ -62,7 +60,6 @@ class ConnectorService:
async def search_crawled_urls(
self,
user_query: str,
user_id: str,
search_space_id: int,
top_k: int = 20,
search_mode: SearchMode = SearchMode.CHUNKS,
@ -72,7 +69,6 @@ class ConnectorService:
Args:
user_query: The user's query
user_id: The user's ID
search_space_id: The search space ID to search in
top_k: Maximum number of results to return
search_mode: Search mode (CHUNKS or DOCUMENTS)
@ -84,7 +80,6 @@ class ConnectorService:
crawled_urls_chunks = await self.chunk_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="CRAWLED_URL",
)
@ -92,7 +87,6 @@ class ConnectorService:
crawled_urls_chunks = await self.document_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="CRAWLED_URL",
)
@ -171,7 +165,6 @@ class ConnectorService:
async def search_files(
self,
user_query: str,
user_id: str,
search_space_id: int,
top_k: int = 20,
search_mode: SearchMode = SearchMode.CHUNKS,
@ -186,7 +179,6 @@ class ConnectorService:
files_chunks = await self.chunk_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="FILE",
)
@ -194,7 +186,6 @@ class ConnectorService:
files_chunks = await self.document_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="FILE",
)
@ -274,43 +265,35 @@ class ConnectorService:
async def get_connector_by_type(
self,
user_id: str,
connector_type: SearchSourceConnectorType,
search_space_id: int | None = None,
search_space_id: int,
) -> SearchSourceConnector | None:
"""
Get a connector by type for a specific user and optionally a search space
Get a connector by type for a specific search space
Args:
user_id: The user's ID
connector_type: The connector type to retrieve
search_space_id: Optional search space ID to filter by
search_space_id: The search space ID to filter by
Returns:
Optional[SearchSourceConnector]: The connector if found, None otherwise
"""
query = select(SearchSourceConnector).filter(
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.connector_type == connector_type,
)
if search_space_id is not None:
query = query.filter(
SearchSourceConnector.search_space_id == search_space_id
)
result = await self.session.execute(query)
return result.scalars().first()
async def search_tavily(
self, user_query: str, user_id: str, search_space_id: int, top_k: int = 20
self, user_query: str, search_space_id: int, top_k: int = 20
) -> tuple:
"""
Search using Tavily API and return both the source information and documents
Args:
user_query: The user's query
user_id: The user's ID
search_space_id: The search space ID
top_k: Maximum number of results to return
@ -319,7 +302,7 @@ class ConnectorService:
"""
# Get Tavily connector configuration
tavily_connector = await self.get_connector_by_type(
user_id, SearchSourceConnectorType.TAVILY_API, search_space_id
SearchSourceConnectorType.TAVILY_API, search_space_id
)
if not tavily_connector:
@ -412,7 +395,6 @@ class ConnectorService:
async def search_searxng(
self,
user_query: str,
user_id: str,
search_space_id: int,
top_k: int = 20,
) -> tuple:
@ -420,7 +402,7 @@ class ConnectorService:
Search using a configured SearxNG instance and return both sources and documents.
"""
searx_connector = await self.get_connector_by_type(
user_id, SearchSourceConnectorType.SEARXNG_API, search_space_id
SearchSourceConnectorType.SEARXNG_API, search_space_id
)
if not searx_connector:
@ -598,7 +580,6 @@ class ConnectorService:
async def search_baidu(
self,
user_query: str,
user_id: str,
search_space_id: int,
top_k: int = 20,
) -> tuple:
@ -610,7 +591,6 @@ class ConnectorService:
Args:
user_query: User's search query
user_id: User ID
search_space_id: Search space ID
top_k: Maximum number of results to return
@ -619,7 +599,7 @@ class ConnectorService:
"""
# Get Baidu connector configuration
baidu_connector = await self.get_connector_by_type(
user_id, SearchSourceConnectorType.BAIDU_SEARCH_API, search_space_id
SearchSourceConnectorType.BAIDU_SEARCH_API, search_space_id
)
if not baidu_connector:
@ -824,7 +804,6 @@ class ConnectorService:
async def search_slack(
self,
user_query: str,
user_id: str,
search_space_id: int,
top_k: int = 20,
search_mode: SearchMode = SearchMode.CHUNKS,
@ -839,7 +818,6 @@ class ConnectorService:
slack_chunks = await self.chunk_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="SLACK_CONNECTOR",
)
@ -847,7 +825,6 @@ class ConnectorService:
slack_chunks = await self.document_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="SLACK_CONNECTOR",
)
@ -912,7 +889,6 @@ class ConnectorService:
async def search_notion(
self,
user_query: str,
user_id: str,
search_space_id: int,
top_k: int = 20,
search_mode: SearchMode = SearchMode.CHUNKS,
@ -922,7 +898,6 @@ class ConnectorService:
Args:
user_query: The user's query
user_id: The user's ID
search_space_id: The search space ID to search in
top_k: Maximum number of results to return
@ -933,7 +908,6 @@ class ConnectorService:
notion_chunks = await self.chunk_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="NOTION_CONNECTOR",
)
@ -941,7 +915,6 @@ class ConnectorService:
notion_chunks = await self.document_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="NOTION_CONNECTOR",
)
@ -1009,7 +982,6 @@ class ConnectorService:
async def search_extension(
self,
user_query: str,
user_id: str,
search_space_id: int,
top_k: int = 20,
search_mode: SearchMode = SearchMode.CHUNKS,
@ -1019,7 +991,6 @@ class ConnectorService:
Args:
user_query: The user's query
user_id: The user's ID
search_space_id: The search space ID to search in
top_k: Maximum number of results to return
@ -1030,7 +1001,6 @@ class ConnectorService:
extension_chunks = await self.chunk_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="EXTENSION",
)
@ -1038,7 +1008,6 @@ class ConnectorService:
extension_chunks = await self.document_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="EXTENSION",
)
@ -1130,7 +1099,6 @@ class ConnectorService:
async def search_youtube(
self,
user_query: str,
user_id: str,
search_space_id: int,
top_k: int = 20,
search_mode: SearchMode = SearchMode.CHUNKS,
@ -1140,7 +1108,6 @@ class ConnectorService:
Args:
user_query: The user's query
user_id: The user's ID
search_space_id: The search space ID to search in
top_k: Maximum number of results to return
@ -1151,7 +1118,6 @@ class ConnectorService:
youtube_chunks = await self.chunk_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="YOUTUBE_VIDEO",
)
@ -1159,7 +1125,6 @@ class ConnectorService:
youtube_chunks = await self.document_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="YOUTUBE_VIDEO",
)
@ -1227,7 +1192,6 @@ class ConnectorService:
async def search_github(
self,
user_query: str,
user_id: int,
search_space_id: int,
top_k: int = 20,
search_mode: SearchMode = SearchMode.CHUNKS,
@ -1242,7 +1206,6 @@ class ConnectorService:
github_chunks = await self.chunk_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="GITHUB_CONNECTOR",
)
@ -1250,7 +1213,6 @@ class ConnectorService:
github_chunks = await self.document_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="GITHUB_CONNECTOR",
)
@ -1302,7 +1264,6 @@ class ConnectorService:
async def search_linear(
self,
user_query: str,
user_id: str,
search_space_id: int,
top_k: int = 20,
search_mode: SearchMode = SearchMode.CHUNKS,
@ -1312,7 +1273,6 @@ class ConnectorService:
Args:
user_query: The user's query
user_id: The user's ID
search_space_id: The search space ID to search in
top_k: Maximum number of results to return
@ -1323,7 +1283,6 @@ class ConnectorService:
linear_chunks = await self.chunk_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="LINEAR_CONNECTOR",
)
@ -1331,7 +1290,6 @@ class ConnectorService:
linear_chunks = await self.document_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="LINEAR_CONNECTOR",
)
@ -1411,7 +1369,6 @@ class ConnectorService:
async def search_jira(
self,
user_query: str,
user_id: str,
search_space_id: int,
top_k: int = 20,
search_mode: SearchMode = SearchMode.CHUNKS,
@ -1421,7 +1378,6 @@ class ConnectorService:
Args:
user_query: The user's query
user_id: The user's ID
search_space_id: The search space ID to search in
top_k: Maximum number of results to return
search_mode: Search mode (CHUNKS or DOCUMENTS)
@ -1433,7 +1389,6 @@ class ConnectorService:
jira_chunks = await self.chunk_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="JIRA_CONNECTOR",
)
@ -1441,7 +1396,6 @@ class ConnectorService:
jira_chunks = await self.document_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="JIRA_CONNECTOR",
)
@ -1532,7 +1486,6 @@ class ConnectorService:
async def search_google_calendar(
self,
user_query: str,
user_id: str,
search_space_id: int,
top_k: int = 20,
search_mode: SearchMode = SearchMode.CHUNKS,
@ -1542,7 +1495,6 @@ class ConnectorService:
Args:
user_query: The user's query
user_id: The user's ID
search_space_id: The search space ID to search in
top_k: Maximum number of results to return
search_mode: Search mode (CHUNKS or DOCUMENTS)
@ -1554,7 +1506,6 @@ class ConnectorService:
calendar_chunks = await self.chunk_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="GOOGLE_CALENDAR_CONNECTOR",
)
@ -1562,7 +1513,6 @@ class ConnectorService:
calendar_chunks = await self.document_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="GOOGLE_CALENDAR_CONNECTOR",
)
@ -1665,7 +1615,6 @@ class ConnectorService:
async def search_airtable(
self,
user_query: str,
user_id: str,
search_space_id: int,
top_k: int = 20,
search_mode: SearchMode = SearchMode.CHUNKS,
@ -1675,7 +1624,6 @@ class ConnectorService:
Args:
user_query: The user's query
user_id: The user's ID
search_space_id: The search space ID to search in
top_k: Maximum number of results to return
search_mode: Search mode (CHUNKS or DOCUMENTS)
@ -1687,7 +1635,6 @@ class ConnectorService:
airtable_chunks = await self.chunk_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="AIRTABLE_CONNECTOR",
)
@ -1695,7 +1642,6 @@ class ConnectorService:
airtable_chunks = await self.document_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="AIRTABLE_CONNECTOR",
)
@ -1753,7 +1699,6 @@ class ConnectorService:
async def search_google_gmail(
self,
user_query: str,
user_id: str,
search_space_id: int,
top_k: int = 20,
search_mode: SearchMode = SearchMode.CHUNKS,
@ -1763,7 +1708,6 @@ class ConnectorService:
Args:
user_query: The user's query
user_id: The user's ID
search_space_id: The search space ID to search in
top_k: Maximum number of results to return
search_mode: Search mode (CHUNKS or DOCUMENTS)
@ -1775,7 +1719,6 @@ class ConnectorService:
gmail_chunks = await self.chunk_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="GOOGLE_GMAIL_CONNECTOR",
)
@ -1783,7 +1726,6 @@ class ConnectorService:
gmail_chunks = await self.document_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="GOOGLE_GMAIL_CONNECTOR",
)
@ -1877,7 +1819,6 @@ class ConnectorService:
async def search_confluence(
self,
user_query: str,
user_id: str,
search_space_id: int,
top_k: int = 20,
search_mode: SearchMode = SearchMode.CHUNKS,
@ -1887,7 +1828,6 @@ class ConnectorService:
Args:
user_query: The user's query
user_id: The user's ID
search_space_id: The search space ID to search in
top_k: Maximum number of results to return
search_mode: Search mode (CHUNKS or DOCUMENTS)
@ -1899,7 +1839,6 @@ class ConnectorService:
confluence_chunks = await self.chunk_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="CONFLUENCE_CONNECTOR",
)
@ -1907,7 +1846,6 @@ class ConnectorService:
confluence_chunks = await self.document_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="CONFLUENCE_CONNECTOR",
)
@ -1972,7 +1910,6 @@ class ConnectorService:
async def search_clickup(
self,
user_query: str,
user_id: str,
search_space_id: int,
top_k: int = 20,
search_mode: SearchMode = SearchMode.CHUNKS,
@ -1982,7 +1919,6 @@ class ConnectorService:
Args:
user_query: The user's query
user_id: The user's ID
search_space_id: The search space ID to search in
top_k: Maximum number of results to return
search_mode: Search mode (CHUNKS or DOCUMENTS)
@ -1994,7 +1930,6 @@ class ConnectorService:
clickup_chunks = await self.chunk_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="CLICKUP_CONNECTOR",
)
@ -2002,7 +1937,6 @@ class ConnectorService:
clickup_chunks = await self.document_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="CLICKUP_CONNECTOR",
)
@ -2088,7 +2022,6 @@ class ConnectorService:
async def search_linkup(
self,
user_query: str,
user_id: str,
search_space_id: int,
mode: str = "standard",
) -> tuple:
@ -2097,7 +2030,6 @@ class ConnectorService:
Args:
user_query: The user's query
user_id: The user's ID
search_space_id: The search space ID
mode: Search depth mode, can be "standard" or "deep"
@ -2106,7 +2038,7 @@ class ConnectorService:
"""
# Get Linkup connector configuration
linkup_connector = await self.get_connector_by_type(
user_id, SearchSourceConnectorType.LINKUP_API, search_space_id
SearchSourceConnectorType.LINKUP_API, search_space_id
)
if not linkup_connector:
@ -2211,7 +2143,6 @@ class ConnectorService:
async def search_discord(
self,
user_query: str,
user_id: str,
search_space_id: int,
top_k: int = 20,
search_mode: SearchMode = SearchMode.CHUNKS,
@ -2221,7 +2152,6 @@ class ConnectorService:
Args:
user_query: The user's query
user_id: The user's ID
search_space_id: The search space ID to search in
top_k: Maximum number of results to return
@ -2232,7 +2162,6 @@ class ConnectorService:
discord_chunks = await self.chunk_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="DISCORD_CONNECTOR",
)
@ -2240,7 +2169,6 @@ class ConnectorService:
discord_chunks = await self.document_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="DISCORD_CONNECTOR",
)
@ -2308,7 +2236,6 @@ class ConnectorService:
async def search_luma(
self,
user_query: str,
user_id: str,
search_space_id: int,
top_k: int = 20,
search_mode: SearchMode = SearchMode.CHUNKS,
@ -2318,7 +2245,6 @@ class ConnectorService:
Args:
user_query: The user's query
user_id: The user's ID
search_space_id: The search space ID to search in
top_k: Maximum number of results to return
search_mode: Search mode (CHUNKS or DOCUMENTS)
@ -2330,7 +2256,6 @@ class ConnectorService:
luma_chunks = await self.chunk_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="LUMA_CONNECTOR",
)
@ -2338,7 +2263,6 @@ class ConnectorService:
luma_chunks = await self.document_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="LUMA_CONNECTOR",
)
@ -2466,7 +2390,6 @@ class ConnectorService:
async def search_elasticsearch(
self,
user_query: str,
user_id: str,
search_space_id: int,
top_k: int = 20,
search_mode: SearchMode = SearchMode.CHUNKS,
@ -2476,7 +2399,6 @@ class ConnectorService:
Args:
user_query: The user's query
user_id: The user's ID
search_space_id: The search space ID to search in
top_k: Maximum number of results to return
search_mode: Search mode (CHUNKS or DOCUMENTS)
@ -2488,7 +2410,6 @@ class ConnectorService:
elasticsearch_chunks = await self.chunk_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="ELASTICSEARCH_CONNECTOR",
)
@ -2496,7 +2417,6 @@ class ConnectorService:
elasticsearch_chunks = await self.document_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="ELASTICSEARCH_CONNECTOR",
)

View file

@ -7,7 +7,7 @@ from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from app.config import config
from app.db import LLMConfig, UserSearchSpacePreference
from app.db import LLMConfig, SearchSpace
# Configure litellm to automatically drop unsupported parameters
litellm.drop_params = True
@ -144,15 +144,16 @@ async def validate_llm_config(
return False, error_msg
async def get_user_llm_instance(
session: AsyncSession, user_id: str, search_space_id: int, role: str
async def get_search_space_llm_instance(
session: AsyncSession, search_space_id: int, role: str
) -> ChatLiteLLM | None:
"""
Get a ChatLiteLLM instance for a specific user, search space, and role.
Get a ChatLiteLLM instance for a specific search space and role.
LLM preferences are stored at the search space level and shared by all members.
Args:
session: Database session
user_id: User ID
search_space_id: Search Space ID
role: LLM role ('long_context', 'fast', or 'strategic')
@ -160,37 +161,30 @@ async def get_user_llm_instance(
ChatLiteLLM instance or None if not found
"""
try:
# Get user's LLM preferences for this search space
# Get the search space with its LLM preferences
result = await session.execute(
select(UserSearchSpacePreference).where(
UserSearchSpacePreference.user_id == user_id,
UserSearchSpacePreference.search_space_id == search_space_id,
)
select(SearchSpace).where(SearchSpace.id == search_space_id)
)
preference = result.scalars().first()
search_space = result.scalars().first()
if not preference:
logger.error(
f"No LLM preferences found for user {user_id} in search space {search_space_id}"
)
if not search_space:
logger.error(f"Search space {search_space_id} not found")
return None
# Get the appropriate LLM config ID based on role
llm_config_id = None
if role == LLMRole.LONG_CONTEXT:
llm_config_id = preference.long_context_llm_id
llm_config_id = search_space.long_context_llm_id
elif role == LLMRole.FAST:
llm_config_id = preference.fast_llm_id
llm_config_id = search_space.fast_llm_id
elif role == LLMRole.STRATEGIC:
llm_config_id = preference.strategic_llm_id
llm_config_id = search_space.strategic_llm_id
else:
logger.error(f"Invalid LLM role: {role}")
return None
if not llm_config_id:
logger.error(
f"No {role} LLM configured for user {user_id} in search space {search_space_id}"
)
logger.error(f"No {role} LLM configured for search space {search_space_id}")
return None
# Check if this is a global config (negative ID)
@ -331,31 +325,63 @@ async def get_user_llm_instance(
except Exception as e:
logger.error(
f"Error getting LLM instance for user {user_id}, role {role}: {e!s}"
f"Error getting LLM instance for search space {search_space_id}, role {role}: {e!s}"
)
return None
async def get_long_context_llm(
session: AsyncSession, search_space_id: int
) -> ChatLiteLLM | None:
"""Get the search space's long context LLM instance."""
return await get_search_space_llm_instance(
session, search_space_id, LLMRole.LONG_CONTEXT
)
async def get_fast_llm(
session: AsyncSession, search_space_id: int
) -> ChatLiteLLM | None:
"""Get the search space's fast LLM instance."""
return await get_search_space_llm_instance(session, search_space_id, LLMRole.FAST)
async def get_strategic_llm(
session: AsyncSession, search_space_id: int
) -> ChatLiteLLM | None:
"""Get the search space's strategic LLM instance."""
return await get_search_space_llm_instance(
session, search_space_id, LLMRole.STRATEGIC
)
# Backward-compatible aliases (deprecated - will be removed in future versions)
async def get_user_llm_instance(
session: AsyncSession, user_id: str, search_space_id: int, role: str
) -> ChatLiteLLM | None:
"""
Deprecated: Use get_search_space_llm_instance instead.
LLM preferences are now stored at the search space level, not per-user.
"""
return await get_search_space_llm_instance(session, search_space_id, role)
async def get_user_long_context_llm(
session: AsyncSession, user_id: str, search_space_id: int
) -> ChatLiteLLM | None:
"""Get user's long context LLM instance for a specific search space."""
return await get_user_llm_instance(
session, user_id, search_space_id, LLMRole.LONG_CONTEXT
)
"""Deprecated: Use get_long_context_llm instead."""
return await get_long_context_llm(session, search_space_id)
async def get_user_fast_llm(
session: AsyncSession, user_id: str, search_space_id: int
) -> ChatLiteLLM | None:
"""Get user's fast LLM instance for a specific search space."""
return await get_user_llm_instance(session, user_id, search_space_id, LLMRole.FAST)
"""Deprecated: Use get_fast_llm instead."""
return await get_fast_llm(session, search_space_id)
async def get_user_strategic_llm(
session: AsyncSession, user_id: str, search_space_id: int
) -> ChatLiteLLM | None:
"""Get user's strategic LLM instance for a specific search space."""
return await get_user_llm_instance(
session, user_id, search_space_id, LLMRole.STRATEGIC
)
"""Deprecated: Use get_strategic_llm instead."""
return await get_strategic_llm(session, search_space_id)

View file

@ -4,7 +4,7 @@ from typing import Any
from langchain.schema import AIMessage, HumanMessage, SystemMessage
from sqlalchemy.ext.asyncio import AsyncSession
from app.services.llm_service import get_user_strategic_llm
from app.services.llm_service import get_strategic_llm
class QueryService:
@ -16,19 +16,17 @@ class QueryService:
async def reformulate_query_with_chat_history(
user_query: str,
session: AsyncSession,
user_id: str,
search_space_id: int,
chat_history_str: str | None = None,
) -> str:
"""
Reformulate the user query using the user's strategic LLM to make it more
Reformulate the user query using the search space's strategic LLM to make it more
effective for information retrieval and research purposes.
Args:
user_query: The original user query
session: Database session for accessing user LLM configs
user_id: User ID to get their specific LLM configuration
search_space_id: Search Space ID to get user's LLM preferences
session: Database session for accessing LLM configs
search_space_id: Search Space ID to get LLM preferences
chat_history_str: Optional chat history string
Returns:
@ -38,11 +36,11 @@ class QueryService:
return user_query
try:
# Get the user's strategic LLM instance
llm = await get_user_strategic_llm(session, user_id, search_space_id)
# Get the search space's strategic LLM instance
llm = await get_strategic_llm(session, search_space_id)
if not llm:
print(
f"Warning: No strategic LLM configured for user {user_id} in search space {search_space_id}. Using original query."
f"Warning: No strategic LLM configured for search space {search_space_id}. Using original query."
)
return user_query

View file

@ -0,0 +1,168 @@
"""Celery tasks for populating blocknote_document for existing documents."""
import logging
from sqlalchemy import select
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
from sqlalchemy.orm import selectinload
from sqlalchemy.pool import NullPool
from app.celery_app import celery_app
from app.config import config
from app.db import Document
from app.utils.blocknote_converter import convert_markdown_to_blocknote
logger = logging.getLogger(__name__)
def get_celery_session_maker():
"""
Create a new async session maker for Celery tasks.
This is necessary because Celery tasks run in a new event loop,
and the default session maker is bound to the main app's event loop.
"""
engine = create_async_engine(
config.DATABASE_URL,
poolclass=NullPool,
echo=False,
)
return async_sessionmaker(engine, expire_on_commit=False)
@celery_app.task(name="populate_blocknote_for_documents", bind=True)
def populate_blocknote_for_documents_task(
self, document_ids: list[int] | None = None, batch_size: int = 50
):
"""
Celery task to populate blocknote_document for existing documents.
Args:
document_ids: Optional list of specific document IDs to process.
If None, processes all documents with blocknote_document IS NULL.
batch_size: Number of documents to process in each batch (default: 50)
"""
import asyncio
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
loop.run_until_complete(
_populate_blocknote_for_documents(document_ids, batch_size)
)
finally:
loop.close()
async def _populate_blocknote_for_documents(
document_ids: list[int] | None = None, batch_size: int = 50
):
"""
Async function to populate blocknote_document for documents.
Args:
document_ids: Optional list of specific document IDs to process
batch_size: Number of documents to process per batch
"""
async with get_celery_session_maker()() as session:
try:
# Build query for documents that need blocknote_document populated
query = select(Document).where(Document.blocknote_document.is_(None))
# If specific document IDs provided, filter by them
if document_ids:
query = query.where(Document.id.in_(document_ids))
# Load chunks relationship to avoid N+1 queries
query = query.options(selectinload(Document.chunks))
# Execute query
result = await session.execute(query)
documents = result.scalars().all()
total_documents = len(documents)
logger.info(f"Found {total_documents} documents to process")
if total_documents == 0:
logger.info("No documents to process")
return
# Process documents in batches
processed = 0
failed = 0
for i in range(0, total_documents, batch_size):
batch = documents[i : i + batch_size]
logger.info(
f"Processing batch {i // batch_size + 1}: documents {i + 1}-{min(i + batch_size, total_documents)}"
)
for document in batch:
try:
# Use preloaded chunks from selectinload - no need to query again
chunks = sorted(document.chunks, key=lambda c: c.id)
if not chunks:
logger.warning(
f"Document {document.id} ({document.title}) has no chunks, skipping"
)
failed += 1
continue
# Reconstruct markdown by concatenating chunk contents
markdown_content = "\n\n".join(
chunk.content for chunk in chunks
)
if not markdown_content or not markdown_content.strip():
logger.warning(
f"Document {document.id} ({document.title}) has empty markdown content, skipping"
)
failed += 1
continue
# Convert markdown to BlockNote JSON
blocknote_json = await convert_markdown_to_blocknote(
markdown_content
)
if not blocknote_json:
logger.warning(
f"Failed to convert markdown to BlockNote for document {document.id} ({document.title})"
)
failed += 1
continue
# Update document with blocknote_document (other fields already have correct defaults)
document.blocknote_document = blocknote_json
processed += 1
# Commit every batch_size documents to avoid long transactions
if processed % batch_size == 0:
await session.commit()
logger.info(
f"Committed batch: {processed} documents processed so far"
)
except Exception as e:
logger.error(
f"Error processing document {document.id} ({document.title}): {e}",
exc_info=True,
)
failed += 1
# Continue with next document instead of failing entire batch
continue
# Commit remaining changes in the batch
await session.commit()
logger.info(f"Completed batch {i // batch_size + 1}")
logger.info(
f"Migration complete: {processed} documents processed, {failed} failed"
)
except Exception as e:
await session.rollback()
logger.error(f"Error in blocknote migration task: {e}", exc_info=True)
raise

View file

@ -0,0 +1,126 @@
"""Celery tasks for reindexing edited documents."""
import logging
from sqlalchemy import delete, select
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
from sqlalchemy.orm import selectinload
from sqlalchemy.pool import NullPool
from app.celery_app import celery_app
from app.config import config
from app.db import Document
from app.services.llm_service import get_user_long_context_llm
from app.utils.blocknote_converter import convert_blocknote_to_markdown
from app.utils.document_converters import (
create_document_chunks,
generate_document_summary,
)
logger = logging.getLogger(__name__)
def get_celery_session_maker():
"""Create async session maker for Celery tasks."""
engine = create_async_engine(
config.DATABASE_URL,
poolclass=NullPool,
echo=False,
)
return async_sessionmaker(engine, expire_on_commit=False)
@celery_app.task(name="reindex_document", bind=True)
def reindex_document_task(self, document_id: int, user_id: str):
"""
Celery task to reindex a document after editing.
Args:
document_id: ID of document to reindex
user_id: ID of user who edited the document
"""
import asyncio
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
loop.run_until_complete(_reindex_document(document_id, user_id))
finally:
loop.close()
async def _reindex_document(document_id: int, user_id: str):
"""Async function to reindex a document."""
async with get_celery_session_maker()() as session:
try:
# Get document
result = await session.execute(
select(Document)
.options(selectinload(Document.chunks)) # Eagerly load chunks
.where(Document.id == document_id)
)
document = result.scalars().first()
if not document:
logger.error(f"Document {document_id} not found")
return
if not document.blocknote_document:
logger.warning(f"Document {document_id} has no BlockNote content")
return
logger.info(f"Reindexing document {document_id} ({document.title})")
# 1. Convert BlockNote → Markdown
markdown_content = await convert_blocknote_to_markdown(
document.blocknote_document
)
if not markdown_content:
logger.error(f"Failed to convert document {document_id} to markdown")
return
# 2. Delete old chunks explicitly
from app.db import Chunk
await session.execute(delete(Chunk).where(Chunk.document_id == document_id))
await session.flush() # Ensure old chunks are deleted
# 3. Create new chunks
new_chunks = await create_document_chunks(markdown_content)
# 4. Add new chunks to session
for chunk in new_chunks:
chunk.document_id = document_id
session.add(chunk)
logger.info(f"Created {len(new_chunks)} chunks for document {document_id}")
# 5. Regenerate summary
user_llm = await get_user_long_context_llm(
session, user_id, document.search_space_id
)
document_metadata = {
"title": document.title,
"document_type": document.document_type.value,
}
summary_content, summary_embedding = await generate_document_summary(
markdown_content, user_llm, document_metadata
)
# 6. Update document
document.content = summary_content
document.embedding = summary_embedding
document.content_needs_reindexing = False
await session.commit()
logger.info(f"Successfully reindexed document {document_id}")
except Exception as e:
await session.rollback()
logger.error(f"Error reindexing document {document_id}: {e}", exc_info=True)
raise

View file

@ -62,17 +62,22 @@ async def _process_extension_document(
individual_document_dict, search_space_id: int, user_id: str
):
"""Process extension document with new session."""
from pydantic import BaseModel
from pydantic import BaseModel, ConfigDict, Field
# Reconstruct the document object from dict
# You'll need to define the proper model for this
class DocumentMetadata(BaseModel):
VisitedWebPageTitle: str
VisitedWebPageURL: str
BrowsingSessionId: str
VisitedWebPageDateWithTimeInISOString: str
VisitedWebPageReffererURL: str
VisitedWebPageVisitDurationInMilliseconds: str
class IndividualDocument(BaseModel):
model_config = ConfigDict(populate_by_name=True)
metadata: DocumentMetadata
content: str
page_content: str = Field(alias="pageContent")
individual_document = IndividualDocument(**individual_document_dict)

View file

@ -145,6 +145,16 @@ async def add_extension_received_document(
# Process chunks
chunks = await create_document_chunks(content.pageContent)
from app.utils.blocknote_converter import convert_markdown_to_blocknote
# Convert markdown to BlockNote JSON
blocknote_json = await convert_markdown_to_blocknote(combined_document_string)
if not blocknote_json:
logging.warning(
f"Failed to convert extension document '{content.metadata.VisitedWebPageTitle}' "
f"to BlockNote JSON, document will not be editable"
)
# Update or create document
if existing_document:
# Update existing document
@ -154,6 +164,7 @@ async def add_extension_received_document(
existing_document.embedding = summary_embedding
existing_document.document_metadata = content.metadata.model_dump()
existing_document.chunks = chunks
existing_document.blocknote_document = blocknote_json
await session.commit()
await session.refresh(existing_document)
@ -170,6 +181,7 @@ async def add_extension_received_document(
chunks=chunks,
content_hash=content_hash,
unique_identifier_hash=unique_identifier_hash,
blocknote_document=blocknote_json,
)
session.add(document)

View file

@ -100,6 +100,15 @@ async def add_received_file_document_using_unstructured(
# Process chunks
chunks = await create_document_chunks(file_in_markdown)
from app.utils.blocknote_converter import convert_markdown_to_blocknote
# Convert markdown to BlockNote JSON
blocknote_json = await convert_markdown_to_blocknote(file_in_markdown)
if not blocknote_json:
logging.warning(
f"Failed to convert {file_name} to BlockNote JSON, document will not be editable"
)
# Update or create document
if existing_document:
# Update existing document
@ -112,6 +121,9 @@ async def add_received_file_document_using_unstructured(
"ETL_SERVICE": "UNSTRUCTURED",
}
existing_document.chunks = chunks
existing_document.blocknote_document = blocknote_json
existing_document.content_needs_reindexing = False
existing_document.last_edited_at = None
await session.commit()
await session.refresh(existing_document)
@ -131,6 +143,9 @@ async def add_received_file_document_using_unstructured(
chunks=chunks,
content_hash=content_hash,
unique_identifier_hash=unique_identifier_hash,
blocknote_document=blocknote_json,
content_needs_reindexing=False,
last_edited_at=None,
)
session.add(document)
@ -214,6 +229,15 @@ async def add_received_file_document_using_llamacloud(
# Process chunks
chunks = await create_document_chunks(file_in_markdown)
from app.utils.blocknote_converter import convert_markdown_to_blocknote
# Convert markdown to BlockNote JSON
blocknote_json = await convert_markdown_to_blocknote(file_in_markdown)
if not blocknote_json:
logging.warning(
f"Failed to convert {file_name} to BlockNote JSON, document will not be editable"
)
# Update or create document
if existing_document:
# Update existing document
@ -226,6 +250,9 @@ async def add_received_file_document_using_llamacloud(
"ETL_SERVICE": "LLAMACLOUD",
}
existing_document.chunks = chunks
existing_document.blocknote_document = blocknote_json
existing_document.content_needs_reindexing = False
existing_document.last_edited_at = None
await session.commit()
await session.refresh(existing_document)
@ -245,6 +272,9 @@ async def add_received_file_document_using_llamacloud(
chunks=chunks,
content_hash=content_hash,
unique_identifier_hash=unique_identifier_hash,
blocknote_document=blocknote_json,
content_needs_reindexing=False,
last_edited_at=None,
)
session.add(document)
@ -353,6 +383,15 @@ async def add_received_file_document_using_docling(
# Process chunks
chunks = await create_document_chunks(file_in_markdown)
from app.utils.blocknote_converter import convert_markdown_to_blocknote
# Convert markdown to BlockNote JSON
blocknote_json = await convert_markdown_to_blocknote(file_in_markdown)
if not blocknote_json:
logging.warning(
f"Failed to convert {file_name} to BlockNote JSON, document will not be editable"
)
# Update or create document
if existing_document:
# Update existing document
@ -365,6 +404,9 @@ async def add_received_file_document_using_docling(
"ETL_SERVICE": "DOCLING",
}
existing_document.chunks = chunks
existing_document.blocknote_document = blocknote_json
existing_document.content_needs_reindexing = False
existing_document.last_edited_at = None
await session.commit()
await session.refresh(existing_document)
@ -384,6 +426,9 @@ async def add_received_file_document_using_docling(
chunks=chunks,
content_hash=content_hash,
unique_identifier_hash=unique_identifier_hash,
blocknote_document=blocknote_json,
content_needs_reindexing=False,
last_edited_at=None,
)
session.add(document)

View file

@ -110,6 +110,15 @@ async def add_received_markdown_file_document(
# Process chunks
chunks = await create_document_chunks(file_in_markdown)
from app.utils.blocknote_converter import convert_markdown_to_blocknote
# Convert to BlockNote JSON
blocknote_json = await convert_markdown_to_blocknote(file_in_markdown)
if not blocknote_json:
logging.warning(
f"Failed to convert {file_name} to BlockNote JSON, document will not be editable"
)
# Update or create document
if existing_document:
# Update existing document
@ -121,6 +130,7 @@ async def add_received_markdown_file_document(
"FILE_NAME": file_name,
}
existing_document.chunks = chunks
existing_document.blocknote_document = blocknote_json
await session.commit()
await session.refresh(existing_document)
@ -139,6 +149,7 @@ async def add_received_markdown_file_document(
chunks=chunks,
content_hash=content_hash,
unique_identifier_hash=unique_identifier_hash,
blocknote_document=blocknote_json,
)
session.add(document)

View file

@ -291,6 +291,16 @@ async def add_youtube_video_document(
{"stage": "chunk_processing"},
)
from app.utils.blocknote_converter import convert_markdown_to_blocknote
# Convert transcript to BlockNote JSON
blocknote_json = await convert_markdown_to_blocknote(combined_document_string)
if not blocknote_json:
logging.warning(
f"Failed to convert YouTube video '{video_id}' to BlockNote JSON, "
"document will not be editable"
)
chunks = await create_document_chunks(combined_document_string)
# Update or create document
@ -314,6 +324,7 @@ async def add_youtube_video_document(
"thumbnail": video_data.get("thumbnail_url", ""),
}
existing_document.chunks = chunks
existing_document.blocknote_document = blocknote_json
await session.commit()
await session.refresh(existing_document)
@ -342,6 +353,7 @@ async def add_youtube_video_document(
search_space_id=search_space_id,
content_hash=content_hash,
unique_identifier_hash=unique_identifier_hash,
blocknote_document=blocknote_json,
)
session.add(document)

View file

@ -0,0 +1,123 @@
import logging
from typing import Any
import httpx
from app.config import config
logger = logging.getLogger(__name__)
async def convert_markdown_to_blocknote(markdown: str) -> dict[str, Any] | None:
"""
Convert markdown to BlockNote JSON via Next.js API.
Args:
markdown: Markdown string to convert
Returns:
BlockNote document as dict, or None if conversion fails
"""
if not markdown or not markdown.strip():
logger.warning("Empty markdown provided for conversion")
return None
if not markdown or len(markdown) < 10:
logger.warning("Markdown became too short after sanitization")
# Return a minimal BlockNote document
return [
{
"type": "paragraph",
"content": [
{
"type": "text",
"text": "Document content could not be converted for editing.",
"styles": {},
}
],
"children": [],
}
]
async with httpx.AsyncClient() as client:
try:
response = await client.post(
f"{config.NEXT_FRONTEND_URL}/api/convert-to-blocknote",
json={"markdown": markdown},
timeout=30.0,
)
response.raise_for_status()
data = response.json()
blocknote_document = data.get("blocknote_document")
if blocknote_document:
logger.info(
f"Successfully converted markdown to BlockNote (original: {len(markdown)} chars, sanitized: {len(markdown)} chars)"
)
return blocknote_document
else:
logger.warning("Next.js API returned empty blocknote_document")
return None
except httpx.TimeoutException:
logger.error("Timeout converting markdown to BlockNote after 30s")
return None
except httpx.HTTPStatusError as e:
logger.error(
f"HTTP error converting markdown to BlockNote: {e.response.status_code} - {e.response.text}"
)
# Log first 1000 chars of problematic markdown for debugging
logger.debug(f"Problematic markdown sample: {markdown[:1000]}")
return None
except Exception as e:
logger.error(f"Failed to convert markdown to BlockNote: {e}", exc_info=True)
return None
async def convert_blocknote_to_markdown(
blocknote_document: dict[str, Any] | list[dict[str, Any]],
) -> str | None:
"""
Convert BlockNote JSON to markdown via Next.js API.
Args:
blocknote_document: BlockNote document as dict or list of blocks
Returns:
Markdown string, or None if conversion fails
"""
if not blocknote_document:
logger.warning("Empty BlockNote document provided for conversion")
return None
async with httpx.AsyncClient() as client:
try:
response = await client.post(
f"{config.NEXT_FRONTEND_URL}/api/convert-to-markdown",
json={"blocknote_document": blocknote_document},
timeout=30.0,
)
response.raise_for_status()
data = response.json()
markdown = data.get("markdown")
if markdown:
logger.info(
f"Successfully converted BlockNote to markdown ({len(markdown)} chars)"
)
return markdown
else:
logger.warning("Next.js API returned empty markdown")
return None
except httpx.TimeoutException:
logger.error("Timeout converting BlockNote to markdown after 30s")
return None
except httpx.HTTPStatusError as e:
logger.error(
f"HTTP error converting BlockNote to markdown: {e.response.status_code} - {e.response.text}"
)
return None
except Exception as e:
logger.error(f"Failed to convert BlockNote to markdown: {e}", exc_info=True)
return None

View file

@ -1,19 +0,0 @@
from fastapi import HTTPException
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from app.db import User
# Helper function to check user ownership
async def check_ownership(session: AsyncSession, model, item_id: int, user: User):
item = await session.execute(
select(model).filter(model.id == item_id, model.user_id == user.id)
)
item = item.scalars().first()
if not item:
raise HTTPException(
status_code=404,
detail="Item not found or you don't have permission to access it",
)
return item

View file

@ -0,0 +1,274 @@
"""
RBAC (Role-Based Access Control) utility functions.
Provides helpers for checking user permissions in search spaces.
"""
import secrets
from uuid import UUID
from fastapi import HTTPException
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from sqlalchemy.orm import selectinload
from app.db import (
Permission,
SearchSpace,
SearchSpaceMembership,
SearchSpaceRole,
User,
has_permission,
)
async def get_user_membership(
session: AsyncSession,
user_id: UUID,
search_space_id: int,
) -> SearchSpaceMembership | None:
"""
Get the user's membership in a search space.
Args:
session: Database session
user_id: User UUID
search_space_id: Search space ID
Returns:
SearchSpaceMembership if found, None otherwise
"""
result = await session.execute(
select(SearchSpaceMembership)
.options(selectinload(SearchSpaceMembership.role))
.filter(
SearchSpaceMembership.user_id == user_id,
SearchSpaceMembership.search_space_id == search_space_id,
)
)
return result.scalars().first()
async def get_user_permissions(
session: AsyncSession,
user_id: UUID,
search_space_id: int,
) -> list[str]:
"""
Get the user's permissions in a search space.
Args:
session: Database session
user_id: User UUID
search_space_id: Search space ID
Returns:
List of permission strings
"""
membership = await get_user_membership(session, user_id, search_space_id)
if not membership:
return []
# Owners always have full access
if membership.is_owner:
return [Permission.FULL_ACCESS.value]
# Get permissions from role
if membership.role:
return membership.role.permissions or []
return []
async def check_permission(
session: AsyncSession,
user: User,
search_space_id: int,
required_permission: str,
error_message: str = "You don't have permission to perform this action",
) -> SearchSpaceMembership:
"""
Check if a user has a specific permission in a search space.
Raises HTTPException if permission is denied.
Args:
session: Database session
user: User object
search_space_id: Search space ID
required_permission: Permission string to check
error_message: Custom error message for permission denied
Returns:
SearchSpaceMembership if permission granted
Raises:
HTTPException: If user doesn't have access or permission
"""
membership = await get_user_membership(session, user.id, search_space_id)
if not membership:
raise HTTPException(
status_code=403,
detail="You don't have access to this search space",
)
# Get user's permissions
if membership.is_owner:
permissions = [Permission.FULL_ACCESS.value]
elif membership.role:
permissions = membership.role.permissions or []
else:
permissions = []
if not has_permission(permissions, required_permission):
raise HTTPException(status_code=403, detail=error_message)
return membership
async def check_search_space_access(
session: AsyncSession,
user: User,
search_space_id: int,
) -> SearchSpaceMembership:
"""
Check if a user has any access to a search space.
This is used for basic access control (user is a member).
Args:
session: Database session
user: User object
search_space_id: Search space ID
Returns:
SearchSpaceMembership if user has access
Raises:
HTTPException: If user doesn't have access
"""
membership = await get_user_membership(session, user.id, search_space_id)
if not membership:
raise HTTPException(
status_code=403,
detail="You don't have access to this search space",
)
return membership
async def is_search_space_owner(
session: AsyncSession,
user_id: UUID,
search_space_id: int,
) -> bool:
"""
Check if a user is the owner of a search space.
Args:
session: Database session
user_id: User UUID
search_space_id: Search space ID
Returns:
True if user is the owner, False otherwise
"""
membership = await get_user_membership(session, user_id, search_space_id)
return membership is not None and membership.is_owner
async def get_search_space_with_access_check(
session: AsyncSession,
user: User,
search_space_id: int,
required_permission: str | None = None,
) -> tuple[SearchSpace, SearchSpaceMembership]:
"""
Get a search space with access and optional permission check.
Args:
session: Database session
user: User object
search_space_id: Search space ID
required_permission: Optional permission to check
Returns:
Tuple of (SearchSpace, SearchSpaceMembership)
Raises:
HTTPException: If search space not found or user lacks access/permission
"""
# Get the search space
result = await session.execute(
select(SearchSpace).filter(SearchSpace.id == search_space_id)
)
search_space = result.scalars().first()
if not search_space:
raise HTTPException(status_code=404, detail="Search space not found")
# Check access
if required_permission:
membership = await check_permission(
session, user, search_space_id, required_permission
)
else:
membership = await check_search_space_access(session, user, search_space_id)
return search_space, membership
def generate_invite_code() -> str:
"""
Generate a unique invite code for search space invites.
Returns:
A 32-character URL-safe invite code
"""
return secrets.token_urlsafe(24)
async def get_default_role(
session: AsyncSession,
search_space_id: int,
) -> SearchSpaceRole | None:
"""
Get the default role for a search space (used when accepting invites without a specific role).
Args:
session: Database session
search_space_id: Search space ID
Returns:
Default SearchSpaceRole or None
"""
result = await session.execute(
select(SearchSpaceRole).filter(
SearchSpaceRole.search_space_id == search_space_id,
SearchSpaceRole.is_default == True, # noqa: E712
)
)
return result.scalars().first()
async def get_owner_role(
session: AsyncSession,
search_space_id: int,
) -> SearchSpaceRole | None:
"""
Get the Owner role for a search space.
Args:
session: Database session
search_space_id: Search space ID
Returns:
Owner SearchSpaceRole or None
"""
result = await session.execute(
select(SearchSpaceRole).filter(
SearchSpaceRole.search_space_id == search_space_id,
SearchSpaceRole.name == "Owner",
)
)
return result.scalars().first()

View file

@ -0,0 +1,40 @@
import { ServerBlockNoteEditor } from "@blocknote/server-util";
import { type NextRequest, NextResponse } from "next/server";
export async function POST(request: NextRequest) {
try {
const { markdown } = await request.json();
if (!markdown || typeof markdown !== "string") {
return NextResponse.json({ error: "Markdown string is required" }, { status: 400 });
}
// Log raw markdown input before conversion
// console.log(`\n${"=".repeat(80)}`);
// console.log("RAW MARKDOWN INPUT (BEFORE CONVERSION):");
// console.log("=".repeat(80));
// console.log(markdown);
// console.log(`${"=".repeat(80)}\n`);
// Create server-side editor instance
const editor = ServerBlockNoteEditor.create();
// Convert markdown directly to BlockNote blocks
const blocks = await editor.tryParseMarkdownToBlocks(markdown);
if (!blocks || blocks.length === 0) {
throw new Error("Markdown parsing returned no blocks");
}
return NextResponse.json({ blocknote_document: blocks });
} catch (error: any) {
console.error("Failed to convert markdown to BlockNote:", error);
return NextResponse.json(
{
error: "Failed to convert markdown to BlockNote blocks",
details: error.message,
},
{ status: 500 }
);
}
}

View file

@ -0,0 +1,28 @@
import { ServerBlockNoteEditor } from "@blocknote/server-util";
import { type NextRequest, NextResponse } from "next/server";
export async function POST(request: NextRequest) {
try {
const { blocknote_document } = await request.json();
if (!blocknote_document || !Array.isArray(blocknote_document)) {
return NextResponse.json({ error: "BlockNote document array is required" }, { status: 400 });
}
// Create server-side editor instance
const editor = ServerBlockNoteEditor.create();
// Convert BlockNote blocks to markdown
const markdown = await editor.blocksToMarkdownLossy(blocknote_document);
return NextResponse.json({
markdown,
});
} catch (error) {
console.error("Failed to convert BlockNote to markdown:", error);
return NextResponse.json(
{ error: "Failed to convert BlockNote blocks to markdown" },
{ status: 500 }
);
}
}

View file

@ -18,6 +18,7 @@ import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/com
import { Separator } from "@/components/ui/separator";
import { SidebarInset, SidebarProvider, SidebarTrigger } from "@/components/ui/sidebar";
import { useLLMPreferences } from "@/hooks/use-llm-configs";
import { useUserAccess } from "@/hooks/use-rbac";
import { cn } from "@/lib/utils";
export function DashboardClientLayout({
@ -60,11 +61,15 @@ export function DashboardClientLayout({
}, [activeChatId, isChatPannelOpen]);
const { loading, error, isOnboardingComplete } = useLLMPreferences(searchSpaceIdNum);
const { access, loading: accessLoading } = useUserAccess(searchSpaceIdNum);
const [hasCheckedOnboarding, setHasCheckedOnboarding] = useState(false);
// Skip onboarding check if we're already on the onboarding page
const isOnboardingPage = pathname?.includes("/onboard");
// Only owners should see onboarding - invited members use existing config
const isOwner = access?.is_owner ?? false;
// Translate navigation items
const tNavMenu = useTranslations("nav_menu");
const translatedNavMain = useMemo(() => {
@ -102,11 +107,13 @@ export function DashboardClientLayout({
return;
}
// Only check once after preferences have loaded
if (!loading && !hasCheckedOnboarding) {
// Wait for both preferences and access data to load
if (!loading && !accessLoading && !hasCheckedOnboarding) {
const onboardingComplete = isOnboardingComplete();
if (!onboardingComplete) {
// Only redirect to onboarding if user is the owner and onboarding is not complete
// Invited members (non-owners) should skip onboarding and use existing config
if (!onboardingComplete && isOwner) {
router.push(`/dashboard/${searchSpaceId}/onboard`);
}
@ -114,8 +121,10 @@ export function DashboardClientLayout({
}
}, [
loading,
accessLoading,
isOnboardingComplete,
isOnboardingPage,
isOwner,
router,
searchSpaceId,
hasCheckedOnboarding,
@ -145,7 +154,7 @@ export function DashboardClientLayout({
}, [chat_id, search_space_id]);
// Show loading screen while checking onboarding status (only on first load)
if (!hasCheckedOnboarding && loading && !isOnboardingPage) {
if (!hasCheckedOnboarding && (loading || accessLoading) && !isOnboardingPage) {
return (
<div className="flex flex-col items-center justify-center min-h-screen space-y-4">
<Card className="w-[350px] bg-background/60 backdrop-blur-sm">

View file

@ -22,6 +22,7 @@ import {
type SearchSourceConnector,
useSearchSourceConnectors,
} from "@/hooks/use-search-source-connectors";
import { authenticatedFetch } from "@/lib/auth-utils";
export default function AirtableConnectorPage() {
const router = useRouter();
@ -46,14 +47,9 @@ export default function AirtableConnectorPage() {
const handleConnectAirtable = async () => {
setIsConnecting(true);
try {
const response = await fetch(
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/auth/airtable/connector/add/?space_id=${searchSpaceId}`,
{
method: "GET",
headers: {
Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
},
}
{ method: "GET" }
);
if (!response.ok) {

View file

@ -40,6 +40,7 @@ import { EnumConnectorName } from "@/contracts/enums/connector";
import { getConnectorIcon } from "@/contracts/enums/connectorIcons";
// Assuming useSearchSourceConnectors hook exists and works similarly
import { useSearchSourceConnectors } from "@/hooks/use-search-source-connectors";
import { authenticatedFetch, redirectToLogin } from "@/lib/auth-utils";
// Define the form schema with Zod for GitHub PAT entry step
const githubPatFormSchema = z.object({
@ -101,19 +102,11 @@ export default function GithubConnectorPage() {
setConnectorName(values.name); // Store the name
setValidatedPat(values.github_pat); // Store the PAT temporarily
try {
const token = localStorage.getItem("surfsense_bearer_token");
if (!token) {
throw new Error("No authentication token found");
}
const response = await fetch(
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/github/repositories`,
{
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${token}`,
},
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ github_pat: values.github_pat }),
}
);

View file

@ -24,6 +24,7 @@ import {
type SearchSourceConnector,
useSearchSourceConnectors,
} from "@/hooks/use-search-source-connectors";
import { authenticatedFetch } from "@/lib/auth-utils";
export default function GoogleCalendarConnectorPage() {
const router = useRouter();
@ -51,14 +52,9 @@ export default function GoogleCalendarConnectorPage() {
try {
setIsConnecting(true);
// Call backend to initiate authorization flow
const response = await fetch(
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/auth/google/calendar/connector/add/?space_id=${searchSpaceId}`,
{
method: "GET",
headers: {
Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
},
}
{ method: "GET" }
);
if (!response.ok) {

View file

@ -24,6 +24,7 @@ import {
type SearchSourceConnector,
useSearchSourceConnectors,
} from "@/hooks/use-search-source-connectors";
import { authenticatedFetch } from "@/lib/auth-utils";
export default function GoogleGmailConnectorPage() {
const router = useRouter();
@ -50,14 +51,9 @@ export default function GoogleGmailConnectorPage() {
try {
setIsConnecting(true);
// Call backend to initiate authorization flow
const response = await fetch(
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/auth/google/gmail/connector/add/?space_id=${searchSpaceId}`,
{
method: "GET",
headers: {
Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
},
}
{ method: "GET" }
);
if (!response.ok) {

View file

@ -309,6 +309,7 @@ export function DocumentsTableShell({
refreshDocuments={async () => {
await onRefresh();
}}
searchSpaceId={searchSpaceId as string}
/>
</TableCell>
</motion.tr>
@ -340,6 +341,7 @@ export function DocumentsTableShell({
refreshDocuments={async () => {
await onRefresh();
}}
searchSpaceId={searchSpaceId as string}
/>
</div>
<div className="mt-1 flex flex-wrap items-center gap-2">

View file

@ -1,6 +1,8 @@
"use client";
import { MoreHorizontal } from "lucide-react";
import { FileText, Pencil, Trash2 } from "lucide-react";
import { motion } from "motion/react";
import { useRouter } from "next/navigation";
import { useState } from "react";
import { toast } from "sonner";
import { JsonMetadataViewer } from "@/components/json-metadata-viewer";
@ -12,29 +14,26 @@ import {
AlertDialogFooter,
AlertDialogHeader,
AlertDialogTitle,
AlertDialogTrigger,
} from "@/components/ui/alert-dialog";
import { Button } from "@/components/ui/button";
import {
DropdownMenu,
DropdownMenuContent,
DropdownMenuItem,
DropdownMenuSeparator,
DropdownMenuTrigger,
} from "@/components/ui/dropdown-menu";
import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip";
import type { Document } from "./types";
export function RowActions({
document,
deleteDocument,
refreshDocuments,
searchSpaceId,
}: {
document: Document;
deleteDocument: (id: number) => Promise<boolean>;
refreshDocuments: () => Promise<void>;
searchSpaceId: string;
}) {
const [isOpen, setIsOpen] = useState(false);
const [isDeleteOpen, setIsDeleteOpen] = useState(false);
const [isMetadataOpen, setIsMetadataOpen] = useState(false);
const [isDeleting, setIsDeleting] = useState(false);
const router = useRouter();
const handleDelete = async () => {
setIsDeleting(true);
@ -48,62 +47,114 @@ export function RowActions({
toast.error("Failed to delete document");
} finally {
setIsDeleting(false);
setIsOpen(false);
setIsDeleteOpen(false);
}
};
const handleEdit = () => {
router.push(`/dashboard/${searchSpaceId}/editor/${document.id}`);
};
return (
<div className="flex justify-end">
<DropdownMenu>
<DropdownMenuTrigger asChild>
<Button variant="ghost" className="h-8 w-8 p-0">
<span className="sr-only">Open menu</span>
<MoreHorizontal className="h-4 w-4" />
</Button>
</DropdownMenuTrigger>
<DropdownMenuContent align="end">
<JsonMetadataViewer
title={document.title}
metadata={document.document_metadata}
trigger={
<DropdownMenuItem onSelect={(e) => e.preventDefault()}>
View Metadata
</DropdownMenuItem>
}
/>
<DropdownMenuSeparator />
<AlertDialog open={isOpen} onOpenChange={setIsOpen}>
<AlertDialogTrigger asChild>
<DropdownMenuItem
className="text-destructive focus:text-destructive"
onSelect={(e) => {
e.preventDefault();
setIsOpen(true);
}}
>
Delete
</DropdownMenuItem>
</AlertDialogTrigger>
<AlertDialogContent>
<AlertDialogHeader>
<AlertDialogTitle>Are you sure?</AlertDialogTitle>
</AlertDialogHeader>
<AlertDialogFooter>
<AlertDialogCancel>Cancel</AlertDialogCancel>
<AlertDialogAction
onClick={(e) => {
e.preventDefault();
handleDelete();
}}
disabled={isDeleting}
>
{isDeleting ? "Deleting..." : "Delete"}
</AlertDialogAction>
</AlertDialogFooter>
</AlertDialogContent>
</AlertDialog>
</DropdownMenuContent>
</DropdownMenu>
<div className="flex items-center justify-end gap-1">
{/* Edit Button */}
<Tooltip>
<TooltipTrigger asChild>
<motion.div
whileHover={{ scale: 1.1 }}
whileTap={{ scale: 0.95 }}
transition={{ type: "spring", stiffness: 400, damping: 17 }}
>
<Button
variant="ghost"
size="icon"
className="h-8 w-8 text-muted-foreground hover:text-foreground hover:bg-muted/80"
onClick={handleEdit}
>
<Pencil className="h-4 w-4" />
<span className="sr-only">Edit Document</span>
</Button>
</motion.div>
</TooltipTrigger>
<TooltipContent side="top">
<p>Edit Document</p>
</TooltipContent>
</Tooltip>
{/* View Metadata Button */}
<Tooltip>
<TooltipTrigger asChild>
<motion.div
whileHover={{ scale: 1.1 }}
whileTap={{ scale: 0.95 }}
transition={{ type: "spring", stiffness: 400, damping: 17 }}
>
<Button
variant="ghost"
size="icon"
className="h-8 w-8 text-muted-foreground hover:text-foreground hover:bg-muted/80"
onClick={() => setIsMetadataOpen(true)}
>
<FileText className="h-4 w-4" />
<span className="sr-only">View Metadata</span>
</Button>
</motion.div>
</TooltipTrigger>
<TooltipContent side="top">
<p>View Metadata</p>
</TooltipContent>
</Tooltip>
<JsonMetadataViewer
title={document.title}
metadata={document.document_metadata}
open={isMetadataOpen}
onOpenChange={setIsMetadataOpen}
/>
{/* Delete Button */}
<Tooltip>
<TooltipTrigger asChild>
<motion.div
whileHover={{ scale: 1.1 }}
whileTap={{ scale: 0.95 }}
transition={{ type: "spring", stiffness: 400, damping: 17 }}
>
<Button
variant="ghost"
size="icon"
className="h-8 w-8 text-muted-foreground hover:text-destructive hover:bg-destructive/10"
onClick={() => setIsDeleteOpen(true)}
disabled={isDeleting}
>
<Trash2 className="h-4 w-4" />
<span className="sr-only">Delete</span>
</Button>
</motion.div>
</TooltipTrigger>
<TooltipContent side="top">
<p>Delete</p>
</TooltipContent>
</Tooltip>
<AlertDialog open={isDeleteOpen} onOpenChange={setIsDeleteOpen}>
<AlertDialogContent>
<AlertDialogHeader>
<AlertDialogTitle>Are you sure?</AlertDialogTitle>
</AlertDialogHeader>
<AlertDialogFooter>
<AlertDialogCancel>Cancel</AlertDialogCancel>
<AlertDialogAction
onClick={(e) => {
e.preventDefault();
handleDelete();
}}
disabled={isDeleting}
className="bg-destructive text-destructive-foreground hover:bg-destructive/90"
>
{isDeleting ? "Deleting..." : "Delete"}
</AlertDialogAction>
</AlertDialogFooter>
</AlertDialogContent>
</AlertDialog>
</div>
);
}

View file

@ -0,0 +1,256 @@
"use client";
import { AlertCircle, FileText, Loader2, Save, X } from "lucide-react";
import { motion } from "motion/react";
import { useParams, useRouter } from "next/navigation";
import { useEffect, useState } from "react";
import { toast } from "sonner";
import { BlockNoteEditor } from "@/components/DynamicBlockNoteEditor";
import { Button } from "@/components/ui/button";
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card";
import { Separator } from "@/components/ui/separator";
import { authenticatedFetch, getBearerToken, redirectToLogin } from "@/lib/auth-utils";
interface EditorContent {
document_id: number;
title: string;
blocknote_document: any;
last_edited_at: string | null;
}
export default function EditorPage() {
const params = useParams();
const router = useRouter();
const documentId = params.documentId as string;
const [document, setDocument] = useState<EditorContent | null>(null);
const [loading, setLoading] = useState(true);
const [saving, setSaving] = useState(false);
const [editorContent, setEditorContent] = useState<any>(null);
const [error, setError] = useState<string | null>(null);
const [hasUnsavedChanges, setHasUnsavedChanges] = useState(false);
// Fetch document content - DIRECT CALL TO FASTAPI
useEffect(() => {
async function fetchDocument() {
const token = getBearerToken();
if (!token) {
console.error("No auth token found");
// Redirect to login with current path saved
redirectToLogin();
return;
}
try {
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-spaces/${params.search_space_id}/documents/${documentId}/editor-content`,
{ method: "GET" }
);
if (!response.ok) {
const errorData = await response
.json()
.catch(() => ({ detail: "Failed to fetch document" }));
throw new Error(errorData.detail || "Failed to fetch document");
}
const data = await response.json();
// Check if blocknote_document exists
if (!data.blocknote_document) {
setError(
"This document does not have BlockNote content. Please re-upload the document to enable editing."
);
setLoading(false);
return;
}
setDocument(data);
setEditorContent(data.blocknote_document);
setError(null);
} catch (error) {
console.error("Error fetching document:", error);
setError(
error instanceof Error ? error.message : "Failed to fetch document. Please try again."
);
} finally {
setLoading(false);
}
}
if (documentId) {
fetchDocument();
}
}, [documentId, params.search_space_id]);
// Track changes to mark as unsaved
useEffect(() => {
if (editorContent && document) {
setHasUnsavedChanges(true);
}
}, [editorContent, document]);
// TODO: Maybe add Auto-save every 30 seconds - DIRECT CALL TO FASTAPI
// Save and exit - DIRECT CALL TO FASTAPI
const handleSave = async () => {
const token = getBearerToken();
if (!token) {
toast.error("Please login to save");
redirectToLogin();
return;
}
if (!editorContent) {
toast.error("No content to save");
return;
}
setSaving(true);
try {
// Save blocknote_document and trigger reindexing in background
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-spaces/${params.search_space_id}/documents/${documentId}/save`,
{
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ blocknote_document: editorContent }),
}
);
if (!response.ok) {
const errorData = await response
.json()
.catch(() => ({ detail: "Failed to save document" }));
throw new Error(errorData.detail || "Failed to save document");
}
setHasUnsavedChanges(false);
toast.success("Document saved! Reindexing in background...");
// Small delay before redirect to show success message
setTimeout(() => {
router.push(`/dashboard/${params.search_space_id}/documents`);
}, 500);
} catch (error) {
console.error("Error saving document:", error);
toast.error(
error instanceof Error ? error.message : "Failed to save document. Please try again."
);
} finally {
setSaving(false);
}
};
const handleCancel = () => {
if (hasUnsavedChanges) {
if (confirm("You have unsaved changes. Are you sure you want to leave?")) {
router.back();
}
} else {
router.back();
}
};
if (loading) {
return (
<div className="flex items-center justify-center min-h-[400px] p-6">
<Card className="w-full max-w-md">
<CardContent className="flex flex-col items-center justify-center py-12">
<Loader2 className="h-12 w-12 text-primary animate-spin mb-4" />
<p className="text-muted-foreground">Loading editor...</p>
</CardContent>
</Card>
</div>
);
}
if (error) {
return (
<div className="flex items-center justify-center min-h-[400px] p-6">
<motion.div
initial={{ opacity: 0, y: 20 }}
animate={{ opacity: 1, y: 0 }}
className="w-full max-w-md"
>
<Card className="border-destructive/50">
<CardHeader>
<div className="flex items-center gap-2">
<AlertCircle className="h-5 w-5 text-destructive" />
<CardTitle className="text-destructive">Error</CardTitle>
</div>
<CardDescription>{error}</CardDescription>
</CardHeader>
<CardContent>
<Button onClick={() => router.back()} variant="outline" className="w-full">
<X className="mr-2 h-4 w-4" />
Go Back
</Button>
</CardContent>
</Card>
</motion.div>
</div>
);
}
if (!document) {
return (
<div className="flex items-center justify-center min-h-[400px] p-6">
<Card className="w-full max-w-md">
<CardContent className="flex flex-col items-center justify-center py-12">
<FileText className="h-12 w-12 text-muted-foreground mb-4" />
<p className="text-muted-foreground">Document not found</p>
</CardContent>
</Card>
</div>
);
}
return (
<motion.div
initial={{ opacity: 0 }}
animate={{ opacity: 1 }}
className="flex flex-col h-full w-full"
>
{/* Toolbar */}
<div className="sticky top-0 z-40 flex h-16 shrink-0 items-center gap-4 border-b bg-background/95 backdrop-blur supports-backdrop-filter:bg-background/60 px-6">
<div className="flex items-center gap-3 flex-1 min-w-0">
<FileText className="h-5 w-5 text-muted-foreground shrink-0" />
<div className="flex flex-col min-w-0">
<h1 className="text-lg font-semibold truncate">{document.title}</h1>
{hasUnsavedChanges && <p className="text-xs text-muted-foreground">Unsaved changes</p>}
</div>
</div>
<Separator orientation="vertical" className="h-6" />
<div className="flex items-center gap-2">
<Button variant="outline" onClick={handleCancel} disabled={saving} className="gap-2">
<X className="h-4 w-4" />
Cancel
</Button>
<Button onClick={handleSave} disabled={saving} className="gap-2">
{saving ? (
<>
<Loader2 className="h-4 w-4 animate-spin" />
Saving...
</>
) : (
<>
<Save className="h-4 w-4" />
Save & Exit
</>
)}
</Button>
</div>
</div>
{/* Editor Container */}
<div className="flex-1 overflow-hidden relative">
<div className="h-full w-full overflow-auto p-6">
<div className="max-w-4xl mx-auto">
<BlockNoteEditor initialContent={editorContent} onChange={setEditorContent} />
</div>
</div>
</div>
</motion.div>
);
}

View file

@ -52,6 +52,12 @@ export default function DashboardLayout({
},
],
},
{
title: "Team",
url: `/dashboard/${search_space_id}/team`,
icon: "Users",
items: [],
},
{
title: "Settings",
url: `/dashboard/${search_space_id}/settings`,

View file

@ -1126,7 +1126,7 @@ function LogRowActions({ row, t }: { row: Row<Log>; t: (key: string) => string }
setIsDeleting(true);
try {
await deleteLog(log.id);
toast.success(t("log_deleted_success"));
// toast.success(t("log_deleted_success"));
await refreshLogs();
} catch (error) {
console.error("Error deleting log:", error);

View file

@ -1,20 +1,19 @@
"use client";
import { ArrowLeft, ArrowRight, Bot, CheckCircle, MessageSquare, Sparkles } from "lucide-react";
import { AnimatePresence, motion } from "motion/react";
import { FileText, MessageSquare, UserPlus, Users } from "lucide-react";
import { motion } from "motion/react";
import { useParams, useRouter } from "next/navigation";
import { useTranslations } from "next-intl";
import { useEffect, useRef, useState } from "react";
import { Logo } from "@/components/Logo";
import { CompletionStep } from "@/components/onboard/completion-step";
import { SetupLLMStep } from "@/components/onboard/setup-llm-step";
import { SetupPromptStep } from "@/components/onboard/setup-prompt-step";
import { Button } from "@/components/ui/button";
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card";
import { Progress } from "@/components/ui/progress";
import { useCallback, useEffect, useRef, useState } from "react";
import { toast } from "sonner";
import { OnboardActionCard } from "@/components/onboard/onboard-action-card";
import { OnboardAdvancedSettings } from "@/components/onboard/onboard-advanced-settings";
import { OnboardHeader } from "@/components/onboard/onboard-header";
import { OnboardLLMSetup } from "@/components/onboard/onboard-llm-setup";
import { OnboardLoading } from "@/components/onboard/onboard-loading";
import { OnboardStats } from "@/components/onboard/onboard-stats";
import { useGlobalLLMConfigs, useLLMConfigs, useLLMPreferences } from "@/hooks/use-llm-configs";
const TOTAL_STEPS = 3;
import { getBearerToken, redirectToLogin } from "@/lib/auth-utils";
const OnboardPage = () => {
const t = useTranslations("onboard");
@ -28,10 +27,17 @@ const OnboardPage = () => {
preferences,
loading: preferencesLoading,
isOnboardingComplete,
updatePreferences,
refreshPreferences,
} = useLLMPreferences(searchSpaceId);
const [currentStep, setCurrentStep] = useState(1);
const [hasUserProgressed, setHasUserProgressed] = useState(false);
const [isAutoConfiguring, setIsAutoConfiguring] = useState(false);
const [autoConfigComplete, setAutoConfigComplete] = useState(false);
const [showAdvancedSettings, setShowAdvancedSettings] = useState(false);
const [showPromptSettings, setShowPromptSettings] = useState(false);
// Track if we've already attempted auto-configuration
const hasAttemptedAutoConfig = useRef(false);
// Track if onboarding was complete on initial mount
const wasCompleteOnMount = useRef<boolean | null>(null);
@ -39,12 +45,13 @@ const OnboardPage = () => {
// Check if user is authenticated
useEffect(() => {
const token = localStorage.getItem("surfsense_bearer_token");
const token = getBearerToken();
if (!token) {
router.push("/login");
// Save current path and redirect to login
redirectToLogin();
return;
}
}, [router]);
}, []);
// Capture onboarding state on first load
useEffect(() => {
@ -59,231 +66,215 @@ const OnboardPage = () => {
}
}, [preferencesLoading, configsLoading, globalConfigsLoading, isOnboardingComplete]);
// Track if user has progressed beyond step 1
// Redirect to dashboard if onboarding was already complete
useEffect(() => {
if (currentStep > 1) {
setHasUserProgressed(true);
}
}, [currentStep]);
// Redirect to dashboard if onboarding was already complete on mount (not during this session)
useEffect(() => {
// Only redirect if:
// 1. Onboarding was complete when page loaded
// 2. User hasn't progressed past step 1
// 3. All data is loaded
if (
wasCompleteOnMount.current === true &&
!hasUserProgressed &&
!preferencesLoading &&
!configsLoading &&
!globalConfigsLoading
) {
// Small delay to ensure the check is stable on initial load
const timer = setTimeout(() => {
router.push(`/dashboard/${searchSpaceId}`);
}, 300);
return () => clearTimeout(timer);
}
}, [
hasUserProgressed,
preferencesLoading,
configsLoading,
globalConfigsLoading,
router,
searchSpaceId,
]);
}, [preferencesLoading, configsLoading, globalConfigsLoading, router, searchSpaceId]);
const progress = (currentStep / TOTAL_STEPS) * 100;
const stepTitles = [t("setup_llm_configuration"), "Configure AI Responses", t("setup_complete")];
const stepDescriptions = [
t("configure_providers_and_assign_roles"),
"Customize how the AI responds to your queries (Optional)",
t("all_set"),
];
// User can proceed to step 2 if all roles are assigned
const canProceedToStep2 =
!preferencesLoading &&
preferences.long_context_llm_id &&
preferences.fast_llm_id &&
preferences.strategic_llm_id;
// User can always proceed from step 2 to step 3 (prompt config is optional)
const canProceedToStep3 = true;
const handleNext = () => {
if (currentStep < TOTAL_STEPS) {
setCurrentStep(currentStep + 1);
// Auto-configure LLM roles if global configs are available
const autoConfigureLLMs = useCallback(async () => {
if (hasAttemptedAutoConfig.current) return;
if (globalConfigs.length === 0) return;
if (isOnboardingComplete()) {
setAutoConfigComplete(true);
return;
}
};
const handlePrevious = () => {
if (currentStep > 1) {
setCurrentStep(currentStep - 1);
hasAttemptedAutoConfig.current = true;
setIsAutoConfiguring(true);
try {
const allConfigs = [...globalConfigs, ...llmConfigs];
if (allConfigs.length === 0) {
setIsAutoConfiguring(false);
return;
}
// Use first available config for all roles
const defaultConfigId = allConfigs[0].id;
const newPreferences = {
long_context_llm_id: defaultConfigId,
fast_llm_id: defaultConfigId,
strategic_llm_id: defaultConfigId,
};
const success = await updatePreferences(newPreferences);
if (success) {
await refreshPreferences();
setAutoConfigComplete(true);
toast.success("AI models configured automatically!", {
description: "You can customize these in advanced settings.",
});
}
} catch (error) {
console.error("Auto-configuration failed:", error);
} finally {
setIsAutoConfiguring(false);
}
};
}, [globalConfigs, llmConfigs, isOnboardingComplete, updatePreferences, refreshPreferences]);
if (configsLoading || preferencesLoading || globalConfigsLoading) {
// Trigger auto-configuration once data is loaded
useEffect(() => {
if (!configsLoading && !globalConfigsLoading && !preferencesLoading) {
autoConfigureLLMs();
}
}, [configsLoading, globalConfigsLoading, preferencesLoading, autoConfigureLLMs]);
const allConfigs = [...globalConfigs, ...llmConfigs];
const isReady = autoConfigComplete || isOnboardingComplete();
// Loading state
if (configsLoading || preferencesLoading || globalConfigsLoading || isAutoConfiguring) {
return (
<div className="flex flex-col items-center justify-center min-h-screen">
<Card className="w-[350px] bg-background/60 backdrop-blur-sm">
<CardContent className="flex flex-col items-center justify-center py-12">
<Bot className="h-12 w-12 text-primary animate-pulse mb-4" />
<p className="text-sm text-muted-foreground">{t("loading_config")}</p>
</CardContent>
</Card>
</div>
<OnboardLoading
title={isAutoConfiguring ? "Setting up your AI assistant..." : t("loading_config")}
subtitle={
isAutoConfiguring
? "Auto-configuring optimal settings for you"
: "Please wait while we load your configuration"
}
/>
);
}
// No configs available - show LLM setup
if (allConfigs.length === 0) {
return (
<OnboardLLMSetup
searchSpaceId={searchSpaceId}
title={t("welcome_title")}
configTitle={t("setup_llm_configuration")}
configDescription={t("configure_providers_and_assign_roles")}
onConfigCreated={refreshConfigs}
onConfigDeleted={refreshConfigs}
onPreferencesUpdated={refreshPreferences}
/>
);
}
// Main onboarding view
return (
<div className="min-h-screen bg-gradient-to-br from-background via-background to-muted/20 flex items-center justify-center p-4">
<motion.div
initial={{ opacity: 0, y: 20 }}
animate={{ opacity: 1, y: 0 }}
transition={{ duration: 0.5 }}
className="w-full max-w-4xl"
>
{/* Header */}
<div className="text-center mb-8">
<div className="flex items-center justify-center mb-4">
<Logo className="w-12 h-12 mr-3 rounded-full" />
<h1 className="text-3xl font-bold">{t("welcome_title")}</h1>
</div>
<p className="text-muted-foreground text-lg">{t("welcome_subtitle")}</p>
</div>
<div className="min-h-screen bg-background">
<div className="flex items-center justify-center min-h-screen p-4 md:p-8">
<motion.div
initial={{ opacity: 0 }}
animate={{ opacity: 1 }}
transition={{ duration: 0.6 }}
className="w-full max-w-5xl"
>
{/* Header */}
<OnboardHeader
title={t("welcome_title")}
subtitle={
isReady ? "You're all set! Choose what you'd like to do next." : t("welcome_subtitle")
}
isReady={isReady}
/>
{/* Progress */}
<Card className="mb-8 bg-background/60 backdrop-blur-sm">
<CardContent className="pt-6">
<div className="flex items-center justify-between mb-4">
<div className="text-sm font-medium">
{t("step_of", { current: currentStep, total: TOTAL_STEPS })}
</div>
<div className="text-sm text-muted-foreground">
{t("percent_complete", { percent: Math.round(progress) })}
</div>
</div>
<Progress value={progress} className="mb-4" />
<div className="grid grid-cols-3 gap-4">
{Array.from({ length: TOTAL_STEPS }, (_, i) => {
const stepNum = i + 1;
const isCompleted = stepNum < currentStep;
const isCurrent = stepNum === currentStep;
{/* Quick Stats */}
<OnboardStats
globalConfigsCount={globalConfigs.length}
userConfigsCount={llmConfigs.length}
/>
return (
<div key={stepNum} className="flex items-center space-x-2">
<div
className={`w-8 h-8 rounded-full flex items-center justify-center text-sm font-medium ${
isCompleted
? "bg-primary text-primary-foreground"
: isCurrent
? "bg-primary/20 text-primary border-2 border-primary"
: "bg-muted text-muted-foreground"
}`}
>
{isCompleted ? <CheckCircle className="w-4 h-4" /> : stepNum}
</div>
<div className="flex-1 min-w-0">
<p
className={`text-sm font-medium truncate ${
isCurrent ? "text-foreground" : "text-muted-foreground"
}`}
>
{stepTitles[i]}
</p>
</div>
</div>
);
})}
</div>
</CardContent>
</Card>
{/* Action Cards */}
<motion.div
initial={{ opacity: 0 }}
animate={{ opacity: 1 }}
transition={{ delay: 0.6 }}
className="grid grid-cols-1 md:grid-cols-3 gap-6 mb-10"
>
<OnboardActionCard
title="Manage Team"
description="Invite team members and collaborate on your search space"
icon={Users}
features={[
"Invite team members",
"Assign roles & permissions",
"Collaborate together",
]}
buttonText="Manage Team"
onClick={() => router.push(`/dashboard/${searchSpaceId}/team`)}
colorScheme="emerald"
delay={0.7}
/>
{/* Step Content */}
<Card className="min-h-[500px] bg-background/60 backdrop-blur-sm">
<CardHeader className="text-center">
<CardTitle className="text-2xl flex items-center justify-center gap-2">
{currentStep === 1 && <Sparkles className="w-6 h-6" />}
{currentStep === 2 && <MessageSquare className="w-6 h-6" />}
{currentStep === 3 && <CheckCircle className="w-6 h-6" />}
{stepTitles[currentStep - 1]}
</CardTitle>
<CardDescription className="text-base">
{stepDescriptions[currentStep - 1]}
</CardDescription>
</CardHeader>
<CardContent>
<AnimatePresence mode="wait">
<motion.div
key={currentStep}
initial={{ opacity: 0, x: 20 }}
animate={{ opacity: 1, x: 0 }}
exit={{ opacity: 0, x: -20 }}
transition={{ duration: 0.3 }}
>
{currentStep === 1 && (
<SetupLLMStep
searchSpaceId={searchSpaceId}
onConfigCreated={refreshConfigs}
onConfigDeleted={refreshConfigs}
onPreferencesUpdated={refreshPreferences}
/>
)}
{currentStep === 2 && (
<SetupPromptStep searchSpaceId={searchSpaceId} onComplete={handleNext} />
)}
{currentStep === 3 && <CompletionStep searchSpaceId={searchSpaceId} />}
</motion.div>
</AnimatePresence>
</CardContent>
</Card>
<OnboardActionCard
title="Add Sources"
description="Connect your data sources to start building your knowledge base"
icon={FileText}
features={[
"Connect documents and files",
"Import from various sources",
"Build your knowledge base",
]}
buttonText="Add Sources"
onClick={() => router.push(`/dashboard/${searchSpaceId}/sources/add`)}
colorScheme="blue"
delay={0.8}
/>
{/* Navigation */}
<div className="flex justify-between mt-8">
{currentStep === 1 ? (
<>
<div />
<Button
onClick={handleNext}
disabled={!canProceedToStep2}
className="flex items-center gap-2"
<OnboardActionCard
title="Start Chatting"
description="Jump right into the AI researcher and start asking questions"
icon={MessageSquare}
features={[
"AI-powered conversations",
"Research and explore topics",
"Get instant insights",
]}
buttonText="Start Chatting"
onClick={() => router.push(`/dashboard/${searchSpaceId}/researcher`)}
colorScheme="violet"
delay={0.9}
/>
</motion.div>
{/* Advanced Settings */}
<OnboardAdvancedSettings
searchSpaceId={searchSpaceId}
showLLMSettings={showAdvancedSettings}
setShowLLMSettings={setShowAdvancedSettings}
showPromptSettings={showPromptSettings}
setShowPromptSettings={setShowPromptSettings}
onConfigCreated={refreshConfigs}
onConfigDeleted={refreshConfigs}
onPreferencesUpdated={refreshPreferences}
/>
{/* Footer */}
<motion.div
initial={{ opacity: 0 }}
animate={{ opacity: 1 }}
transition={{ delay: 1.1 }}
className="text-center mt-10 text-muted-foreground text-sm"
>
<p>
You can always adjust these settings later in{" "}
<button
type="button"
onClick={() => router.push(`/dashboard/${searchSpaceId}/settings`)}
className="text-primary hover:underline underline-offset-2 transition-colors"
>
{t("next")}
<ArrowRight className="w-4 h-4" />
</Button>
</>
) : currentStep === 2 ? (
<>
<Button
variant="outline"
onClick={handlePrevious}
className="flex items-center gap-2"
>
<ArrowLeft className="w-4 h-4" />
{t("previous")}
</Button>
{/* Next button is handled by SetupPromptStep component */}
<div />
</>
) : (
<>
<Button
variant="outline"
onClick={handlePrevious}
className="flex items-center gap-2"
>
<ArrowLeft className="w-4 h-4" />
{t("previous")}
</Button>
<div />
</>
)}
</div>
</motion.div>
Settings
</button>
</p>
</motion.div>
</motion.div>
</div>
</div>
);
};

View file

@ -17,13 +17,14 @@ export default function ResearcherPage() {
const { search_space_id } = useParams();
const router = useRouter();
const hasSetInitialConnectors = useRef(false);
const hasInitiatedResponse = useRef<string | null>(null);
const activeChatId = useAtomValue(activeChatIdAtom);
const { data: activeChatState, isFetching: isChatLoading } = useAtomValue(activeChatAtom);
const { mutateAsync: createChat } = useAtomValue(createChatMutationAtom);
const { mutateAsync: updateChat } = useAtomValue(updateChatMutationAtom);
const isNewChat = !activeChatId;
// Reset the flag when chat ID changes
// Reset the flag when chat ID changes (but not hasInitiatedResponse - we need to remember if we already initiated)
useEffect(() => {
hasSetInitialConnectors.current = false;
}, [activeChatId]);
@ -167,10 +168,14 @@ export default function ResearcherPage() {
if (chatData.messages && Array.isArray(chatData.messages)) {
if (chatData.messages.length === 1 && chatData.messages[0].role === "user") {
// Single user message - append to trigger LLM response
handler.append({
role: "user",
content: chatData.messages[0].content,
});
// Only if we haven't already initiated for this chat and handler doesn't have messages yet
if (hasInitiatedResponse.current !== activeChatId && handler.messages.length === 0) {
hasInitiatedResponse.current = activeChatId;
handler.append({
role: "user",
content: chatData.messages[0].content,
});
}
} else if (chatData.messages.length > 1) {
// Multiple messages - set them all
handler.setMessages(chatData.messages);

File diff suppressed because it is too large Load diff

View file

@ -1,28 +1,28 @@
"use client";
import { Loader2 } from "lucide-react";
import { useRouter } from "next/navigation";
import { useEffect, useState } from "react";
import { AnnouncementBanner } from "@/components/announcement-banner";
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card";
import { getBearerToken, redirectToLogin } from "@/lib/auth-utils";
interface DashboardLayoutProps {
children: React.ReactNode;
}
export default function DashboardLayout({ children }: DashboardLayoutProps) {
const router = useRouter();
const [isCheckingAuth, setIsCheckingAuth] = useState(true);
useEffect(() => {
// Check if user is authenticated
const token = localStorage.getItem("surfsense_bearer_token");
const token = getBearerToken();
if (!token) {
router.push("/login");
// Save current path and redirect to login
redirectToLogin();
return;
}
setIsCheckingAuth(false);
}, [router]);
}, []);
// Show loading screen while checking authentication
if (isCheckingAuth) {

View file

@ -1,6 +1,6 @@
"use client";
import { AlertCircle, Loader2, Plus, Search, Trash2 } from "lucide-react";
import { AlertCircle, Loader2, Plus, Search, Trash2, UserCheck, Users } from "lucide-react";
import { motion, type Variants } from "motion/react";
import Image from "next/image";
import Link from "next/link";
@ -22,6 +22,7 @@ import {
AlertDialogTitle,
AlertDialogTrigger,
} from "@/components/ui/alert-dialog";
import { Badge } from "@/components/ui/badge";
import { Button } from "@/components/ui/button";
import {
Card,
@ -35,6 +36,7 @@ import { Spotlight } from "@/components/ui/spotlight";
import { Tilt } from "@/components/ui/tilt";
import { useUser } from "@/hooks";
import { useSearchSpaces } from "@/hooks/use-search-spaces";
import { authenticatedFetch } from "@/lib/auth-utils";
/**
* Formats a date string into a readable format
@ -172,14 +174,9 @@ const DashboardPage = () => {
const handleDeleteSearchSpace = async (id: number) => {
// Send DELETE request to the API
try {
const response = await fetch(
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${id}`,
{
method: "DELETE",
headers: {
Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
},
}
{ method: "DELETE" }
);
if (!response.ok) {
@ -308,16 +305,30 @@ const DashboardPage = () => {
>
<div className="flex flex-1 flex-col justify-between p-1">
<div>
<h3 className="font-medium text-lg">{space.name}</h3>
<div className="flex items-center gap-2">
<h3 className="font-medium text-lg">{space.name}</h3>
{!space.is_owner && (
<Badge variant="secondary" className="text-xs font-normal">
{t("shared")}
</Badge>
)}
</div>
<p className="mt-1 text-sm text-muted-foreground">
{space.description}
</p>
</div>
<div className="mt-4 text-xs text-muted-foreground">
{/* <span>{space.title}</span> */}
<div className="mt-4 flex items-center justify-between text-xs text-muted-foreground">
<span>
{t("created")} {formatDate(space.created_at)}
</span>
<div className="flex items-center gap-1">
{space.is_owner ? (
<UserCheck className="h-3.5 w-3.5" />
) : (
<Users className="h-3.5 w-3.5" />
)}
<span>{space.member_count}</span>
</div>
</div>
</div>
</Link>

View file

@ -4,19 +4,21 @@ import { motion } from "motion/react";
import { useRouter } from "next/navigation";
import { toast } from "sonner";
import { SearchSpaceForm } from "@/components/search-space-form";
import { authenticatedFetch } from "@/lib/auth-utils";
export default function SearchSpacesPage() {
const router = useRouter();
const handleCreateSearchSpace = async (data: { name: string; description: string }) => {
const handleCreateSearchSpace = async (data: { name: string; description?: string }) => {
try {
const response = await fetch(
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces`,
{
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
},
body: JSON.stringify(data),
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
name: data.name,
description: data.description || "",
}),
}
);
@ -31,7 +33,8 @@ export default function SearchSpacesPage() {
description: `"${data.name}" has been created.`,
});
router.push(`/dashboard`);
// Redirect to the newly created search space's onboarding
router.push(`/dashboard/${result.id}/onboard`);
return result;
} catch (error) {

View file

@ -27,7 +27,7 @@
--accent: oklch(0.97 0 0);
--accent-foreground: oklch(0.205 0 0);
--destructive: oklch(0.577 0.245 27.325);
--destructive-foreground: oklch(0.577 0.245 27.325);
--destructive-foreground: oklch(0.985 0 0);
--border: oklch(0.922 0 0);
--input: oklch(0.922 0 0);
--ring: oklch(0.708 0 0);
@ -63,8 +63,8 @@
--muted-foreground: oklch(0.708 0 0);
--accent: oklch(0.269 0 0);
--accent-foreground: oklch(0.985 0 0);
--destructive: oklch(0.396 0.141 25.723);
--destructive-foreground: oklch(0.637 0.237 25.331);
--destructive: oklch(0.577 0.245 27.325);
--destructive-foreground: oklch(0.985 0 0);
--border: oklch(0.269 0 0);
--input: oklch(0.269 0 0);
--ring: oklch(0.439 0 0);

View file

@ -0,0 +1,340 @@
"use client";
import {
AlertCircle,
ArrowRight,
CheckCircle2,
Clock,
Loader2,
LogIn,
Shield,
Sparkles,
Users,
XCircle,
} from "lucide-react";
import { motion } from "motion/react";
import Image from "next/image";
import Link from "next/link";
import { useParams, useRouter } from "next/navigation";
import { use, useEffect, useState } from "react";
import { Button } from "@/components/ui/button";
import {
Card,
CardContent,
CardDescription,
CardFooter,
CardHeader,
CardTitle,
} from "@/components/ui/card";
import { useInviteInfo } from "@/hooks/use-rbac";
import { getBearerToken } from "@/lib/auth-utils";
export default function InviteAcceptPage() {
const params = useParams();
const router = useRouter();
const inviteCode = params.invite_code as string;
const { inviteInfo, loading, acceptInvite } = useInviteInfo(inviteCode);
const [accepting, setAccepting] = useState(false);
const [accepted, setAccepted] = useState(false);
const [acceptedData, setAcceptedData] = useState<{
search_space_id: number;
search_space_name: string;
role_name: string;
} | null>(null);
const [error, setError] = useState<string | null>(null);
const [isLoggedIn, setIsLoggedIn] = useState<boolean | null>(null);
// Check if user is logged in
useEffect(() => {
if (typeof window !== "undefined") {
const token = getBearerToken();
setIsLoggedIn(!!token);
}
}, []);
const handleAccept = async () => {
setAccepting(true);
setError(null);
try {
const result = await acceptInvite();
if (result) {
setAccepted(true);
setAcceptedData(result);
}
} catch (err: any) {
setError(err.message || "Failed to accept invite");
} finally {
setAccepting(false);
}
};
const handleLoginRedirect = () => {
// Store the invite code to redirect back after login
localStorage.setItem("pending_invite_code", inviteCode);
// Save the current invite page URL so we can return after authentication
localStorage.setItem("surfsense_redirect_path", `/invite/${inviteCode}`);
// Redirect to login (we manually set the path above since invite pages need special handling)
window.location.href = "/login";
};
// Check for pending invite after login
useEffect(() => {
if (isLoggedIn && typeof window !== "undefined") {
const pendingInvite = localStorage.getItem("pending_invite_code");
if (pendingInvite === inviteCode) {
localStorage.removeItem("pending_invite_code");
// Auto-accept the invite after redirect
handleAccept();
}
}
}, [isLoggedIn, inviteCode]);
return (
<div className="min-h-screen flex items-center justify-center p-4 bg-gradient-to-br from-background via-background to-primary/5">
{/* Background decoration */}
<div className="absolute inset-0 overflow-hidden pointer-events-none">
<div className="absolute -top-1/2 -right-1/2 w-full h-full bg-gradient-to-bl from-primary/10 via-transparent to-transparent rounded-full blur-3xl" />
<div className="absolute -bottom-1/2 -left-1/2 w-full h-full bg-gradient-to-tr from-violet-500/10 via-transparent to-transparent rounded-full blur-3xl" />
</div>
<motion.div
initial={{ opacity: 0, y: 20, scale: 0.95 }}
animate={{ opacity: 1, y: 0, scale: 1 }}
transition={{ duration: 0.5, ease: "easeOut" }}
className="w-full max-w-md relative z-10"
>
<Card className="border-none shadow-2xl bg-card/80 backdrop-blur-xl">
{loading || isLoggedIn === null ? (
<CardContent className="flex flex-col items-center justify-center py-16">
<motion.div
animate={{ rotate: 360 }}
transition={{ duration: 1, repeat: Infinity, ease: "linear" }}
>
<Loader2 className="h-12 w-12 text-primary" />
</motion.div>
<p className="mt-4 text-muted-foreground">Loading invite details...</p>
</CardContent>
) : accepted && acceptedData ? (
<>
<CardHeader className="text-center pb-4">
<motion.div
initial={{ scale: 0 }}
animate={{ scale: 1 }}
transition={{ type: "spring", stiffness: 200, damping: 15 }}
className="mx-auto mb-4 h-20 w-20 rounded-full bg-gradient-to-br from-emerald-500/20 to-emerald-500/5 flex items-center justify-center ring-4 ring-emerald-500/20"
>
<CheckCircle2 className="h-10 w-10 text-emerald-500" />
</motion.div>
<CardTitle className="text-2xl">Welcome to the team!</CardTitle>
<CardDescription>
You've successfully joined {acceptedData.search_space_name}
</CardDescription>
</CardHeader>
<CardContent className="space-y-4">
<div className="bg-muted/50 rounded-lg p-4 space-y-3">
<div className="flex items-center gap-3">
<div className="h-10 w-10 rounded-lg bg-primary/10 flex items-center justify-center">
<Users className="h-5 w-5 text-primary" />
</div>
<div>
<p className="font-medium">{acceptedData.search_space_name}</p>
<p className="text-sm text-muted-foreground">Search Space</p>
</div>
</div>
<div className="flex items-center gap-3">
<div className="h-10 w-10 rounded-lg bg-violet-500/10 flex items-center justify-center">
<Shield className="h-5 w-5 text-violet-500" />
</div>
<div>
<p className="font-medium">{acceptedData.role_name}</p>
<p className="text-sm text-muted-foreground">Your Role</p>
</div>
</div>
</div>
</CardContent>
<CardFooter>
<Button
className="w-full gap-2"
onClick={() => router.push(`/dashboard/${acceptedData.search_space_id}`)}
>
Go to Search Space
<ArrowRight className="h-4 w-4" />
</Button>
</CardFooter>
</>
) : !inviteInfo?.is_valid ? (
<>
<CardHeader className="text-center pb-4">
<motion.div
initial={{ scale: 0 }}
animate={{ scale: 1 }}
transition={{ type: "spring", stiffness: 200, damping: 15 }}
className="mx-auto mb-4 h-20 w-20 rounded-full bg-gradient-to-br from-destructive/20 to-destructive/5 flex items-center justify-center ring-4 ring-destructive/20"
>
<XCircle className="h-10 w-10 text-destructive" />
</motion.div>
<CardTitle className="text-2xl">Invalid Invite</CardTitle>
<CardDescription>
{inviteInfo?.message || "This invite link is no longer valid"}
</CardDescription>
</CardHeader>
<CardContent className="text-center">
<p className="text-sm text-muted-foreground">
The invite may have expired, reached its maximum uses, or been revoked by the
owner.
</p>
</CardContent>
<CardFooter>
<Button
variant="outline"
className="w-full"
onClick={() => router.push("/dashboard")}
>
Go to Dashboard
</Button>
</CardFooter>
</>
) : !isLoggedIn ? (
<>
<CardHeader className="text-center pb-4">
<motion.div
initial={{ scale: 0 }}
animate={{ scale: 1 }}
transition={{ type: "spring", stiffness: 200, damping: 15 }}
className="mx-auto mb-4 h-20 w-20 rounded-full bg-gradient-to-br from-primary/20 to-primary/5 flex items-center justify-center ring-4 ring-primary/20"
>
<Sparkles className="h-10 w-10 text-primary" />
</motion.div>
<CardTitle className="text-2xl">You're Invited!</CardTitle>
<CardDescription>
Sign in to join {inviteInfo?.search_space_name || "this search space"}
</CardDescription>
</CardHeader>
<CardContent className="space-y-4">
<div className="bg-muted/50 rounded-lg p-4 space-y-3">
<div className="flex items-center gap-3">
<div className="h-10 w-10 rounded-lg bg-primary/10 flex items-center justify-center">
<Users className="h-5 w-5 text-primary" />
</div>
<div>
<p className="font-medium">{inviteInfo?.search_space_name}</p>
<p className="text-sm text-muted-foreground">Search Space</p>
</div>
</div>
{inviteInfo?.role_name && (
<div className="flex items-center gap-3">
<div className="h-10 w-10 rounded-lg bg-violet-500/10 flex items-center justify-center">
<Shield className="h-5 w-5 text-violet-500" />
</div>
<div>
<p className="font-medium">{inviteInfo.role_name}</p>
<p className="text-sm text-muted-foreground">Role you'll receive</p>
</div>
</div>
)}
</div>
</CardContent>
<CardFooter>
<Button className="w-full gap-2" onClick={handleLoginRedirect}>
<LogIn className="h-4 w-4" />
Sign in to Accept
</Button>
</CardFooter>
</>
) : (
<>
<CardHeader className="text-center pb-4">
<motion.div
initial={{ scale: 0 }}
animate={{ scale: 1 }}
transition={{ type: "spring", stiffness: 200, damping: 15 }}
className="mx-auto mb-4 h-20 w-20 rounded-full bg-gradient-to-br from-primary/20 to-primary/5 flex items-center justify-center ring-4 ring-primary/20"
>
<Sparkles className="h-10 w-10 text-primary" />
</motion.div>
<CardTitle className="text-2xl">You're Invited!</CardTitle>
<CardDescription>
Accept this invite to join {inviteInfo?.search_space_name || "this search space"}
</CardDescription>
</CardHeader>
<CardContent className="space-y-4">
<div className="bg-muted/50 rounded-lg p-4 space-y-3">
<div className="flex items-center gap-3">
<div className="h-10 w-10 rounded-lg bg-primary/10 flex items-center justify-center">
<Users className="h-5 w-5 text-primary" />
</div>
<div>
<p className="font-medium">{inviteInfo?.search_space_name}</p>
<p className="text-sm text-muted-foreground">Search Space</p>
</div>
</div>
{inviteInfo?.role_name && (
<div className="flex items-center gap-3">
<div className="h-10 w-10 rounded-lg bg-violet-500/10 flex items-center justify-center">
<Shield className="h-5 w-5 text-violet-500" />
</div>
<div>
<p className="font-medium">{inviteInfo.role_name}</p>
<p className="text-sm text-muted-foreground">Role you'll receive</p>
</div>
</div>
)}
</div>
{error && (
<motion.div
initial={{ opacity: 0, y: -10 }}
animate={{ opacity: 1, y: 0 }}
className="flex items-center gap-2 p-3 bg-destructive/10 text-destructive rounded-lg text-sm"
>
<AlertCircle className="h-4 w-4 shrink-0" />
{error}
</motion.div>
)}
</CardContent>
<CardFooter className="flex gap-2">
<Button
variant="outline"
className="flex-1"
onClick={() => router.push("/dashboard")}
>
Cancel
</Button>
<Button className="flex-1 gap-2" onClick={handleAccept} disabled={accepting}>
{accepting ? (
<>
<Loader2 className="h-4 w-4 animate-spin" />
Accepting...
</>
) : (
<>
<CheckCircle2 className="h-4 w-4" />
Accept Invite
</>
)}
</Button>
</CardFooter>
</>
)}
</Card>
{/* Branding */}
<motion.div
initial={{ opacity: 0 }}
animate={{ opacity: 1 }}
transition={{ delay: 0.3 }}
className="mt-6 text-center"
>
<Link
href="/"
className="inline-flex items-center gap-2 text-muted-foreground hover:text-foreground transition-colors"
>
<Image src="/icon-128.png" alt="SurfSense" width={24} height={24} className="rounded" />
<span className="text-sm font-medium">SurfSense</span>
</Link>
</motion.div>
</motion.div>
</div>
);
}

View file

@ -7,6 +7,7 @@ import type {
UpdateChatRequest,
} from "@/contracts/types/chat.types";
import { chatsApiService } from "@/lib/apis/chats-api.service";
import { getBearerToken } from "@/lib/auth-utils";
import { cacheKeys } from "@/lib/query-client/cache-keys";
import { queryClient } from "@/lib/query-client/client";
import { activeSearchSpaceIdAtom } from "../seach-spaces/seach-space-queries.atom";
@ -14,7 +15,7 @@ import { globalChatsQueryParamsAtom } from "./ui.atoms";
export const deleteChatMutationAtom = atomWithMutation((get) => {
const searchSpaceId = get(activeSearchSpaceIdAtom);
const authToken = localStorage.getItem("surfsense_bearer_token");
const authToken = getBearerToken();
const chatsQueryParams = get(globalChatsQueryParamsAtom);
return {
@ -38,7 +39,7 @@ export const deleteChatMutationAtom = atomWithMutation((get) => {
export const createChatMutationAtom = atomWithMutation((get) => {
const searchSpaceId = get(activeSearchSpaceIdAtom);
const authToken = localStorage.getItem("surfsense_bearer_token");
const authToken = getBearerToken();
const chatsQueryParams = get(globalChatsQueryParamsAtom);
return {
@ -58,7 +59,7 @@ export const createChatMutationAtom = atomWithMutation((get) => {
export const updateChatMutationAtom = atomWithMutation((get) => {
const searchSpaceId = get(activeSearchSpaceIdAtom);
const authToken = localStorage.getItem("surfsense_bearer_token");
const authToken = getBearerToken();
const chatsQueryParams = get(globalChatsQueryParamsAtom);
return {

View file

@ -2,12 +2,13 @@ import { atomWithQuery } from "jotai-tanstack-query";
import { activeSearchSpaceIdAtom } from "@/atoms/seach-spaces/seach-space-queries.atom";
import { chatsApiService } from "@/lib/apis/chats-api.service";
import { podcastsApiService } from "@/lib/apis/podcasts-api.service";
import { getBearerToken } from "@/lib/auth-utils";
import { cacheKeys } from "@/lib/query-client/cache-keys";
import { activeChatIdAtom, globalChatsQueryParamsAtom } from "./ui.atoms";
export const activeChatAtom = atomWithQuery((get) => {
const activeChatId = get(activeChatIdAtom);
const authToken = localStorage.getItem("surfsense_bearer_token");
const authToken = getBearerToken();
return {
queryKey: cacheKeys.chats.activeChat(activeChatId ?? ""),
@ -32,7 +33,7 @@ export const activeChatAtom = atomWithQuery((get) => {
export const chatsAtom = atomWithQuery((get) => {
const searchSpaceId = get(activeSearchSpaceIdAtom);
const authToken = localStorage.getItem("surfsense_bearer_token");
const authToken = getBearerToken();
const queryParams = get(globalChatsQueryParamsAtom);
return {

View file

@ -7,13 +7,14 @@ import type {
Podcast,
} from "@/contracts/types/podcast.types";
import { podcastsApiService } from "@/lib/apis/podcasts-api.service";
import { getBearerToken } from "@/lib/auth-utils";
import { cacheKeys } from "@/lib/query-client/cache-keys";
import { queryClient } from "@/lib/query-client/client";
import { globalPodcastsQueryParamsAtom } from "./ui.atoms";
export const deletePodcastMutationAtom = atomWithMutation((get) => {
const searchSpaceId = get(activeSearchSpaceIdAtom);
const authToken = localStorage.getItem("surfsense_bearer_token");
const authToken = getBearerToken();
const podcastsQueryParams = get(globalPodcastsQueryParamsAtom);
return {
@ -37,7 +38,7 @@ export const deletePodcastMutationAtom = atomWithMutation((get) => {
export const generatePodcastMutationAtom = atomWithMutation((get) => {
const searchSpaceId = get(activeSearchSpaceIdAtom);
const authToken = localStorage.getItem("surfsense_bearer_token");
const authToken = getBearerToken();
const podcastsQueryParams = get(globalPodcastsQueryParamsAtom);
return {

View file

@ -0,0 +1,68 @@
"use client";
import { useTheme } from "next-themes";
import { useEffect, useMemo, useRef } from "react";
import "@blocknote/core/fonts/inter.css";
import "@blocknote/mantine/style.css";
import { BlockNoteView } from "@blocknote/mantine";
import { useCreateBlockNote } from "@blocknote/react";
interface BlockNoteEditorProps {
initialContent?: any;
onChange?: (content: any) => void;
}
export default function BlockNoteEditor({ initialContent, onChange }: BlockNoteEditorProps) {
const { resolvedTheme } = useTheme();
// Track the initial content to prevent re-initialization
const initialContentRef = useRef<any>(null);
const isInitializedRef = useRef(false);
// Creates a new editor instance - only use initialContent on first render
const editor = useCreateBlockNote({
initialContent: initialContentRef.current === null ? initialContent || undefined : undefined,
});
// Store initial content on first render only
useEffect(() => {
if (initialContent && initialContentRef.current === null) {
initialContentRef.current = initialContent;
isInitializedRef.current = true;
}
}, [initialContent]);
// Call onChange when document changes (but don't update from props)
useEffect(() => {
if (!onChange || !editor || !isInitializedRef.current) return;
const handleChange = () => {
onChange(editor.document);
};
// Subscribe to document changes
const unsubscribe = editor.onChange(handleChange);
return () => {
unsubscribe();
};
}, [editor, onChange]);
// Determine theme for BlockNote with custom dark mode background
const blockNoteTheme = useMemo(() => {
if (resolvedTheme === "dark") {
// Custom dark theme - only override editor background, let BlockNote handle the rest
return {
colors: {
editor: {
background: "#0A0A0A", // Custom dark background
},
},
};
}
return "light" as const;
}, [resolvedTheme]);
// Renders the editor instance
return <BlockNoteView editor={editor} theme={blockNoteTheme} />;
}

View file

@ -0,0 +1,6 @@
"use client";
import dynamic from "next/dynamic";
// Dynamically import BlockNote editor with SSR disabled
export const BlockNoteEditor = dynamic(() => import("./BlockNoteEditor"), { ssr: false });

View file

@ -2,22 +2,25 @@
import { useRouter, useSearchParams } from "next/navigation";
import { useEffect } from "react";
import { getAndClearRedirectPath, setBearerToken } from "@/lib/auth-utils";
interface TokenHandlerProps {
redirectPath?: string; // Path to redirect after storing token
redirectPath?: string; // Default path to redirect after storing token (if no saved path)
tokenParamName?: string; // Name of the URL parameter containing the token
storageKey?: string; // Key to use when storing in localStorage
storageKey?: string; // Key to use when storing in localStorage (kept for backwards compatibility)
}
/**
* Client component that extracts a token from URL parameters and stores it in localStorage
* After storing the token, it redirects the user back to the page they were on before
* being redirected to login (if available), or to the default redirectPath.
*
* @param redirectPath - Path to redirect after storing token (default: '/')
* @param redirectPath - Default path to redirect after storing token (default: '/dashboard')
* @param tokenParamName - Name of the URL parameter containing the token (default: 'token')
* @param storageKey - Key to use when storing in localStorage (default: 'auth_token')
* @param storageKey - Key to use when storing in localStorage (default: 'surfsense_bearer_token')
*/
const TokenHandler = ({
redirectPath = "/",
redirectPath = "/dashboard",
tokenParamName = "token",
storageKey = "surfsense_bearer_token",
}: TokenHandlerProps) => {
@ -33,14 +36,22 @@ const TokenHandler = ({
if (token) {
try {
// Store token in localStorage
// Store token in localStorage using both methods for compatibility
localStorage.setItem(storageKey, token);
// console.log(`Token stored in localStorage with key: ${storageKey}`);
setBearerToken(token);
// Redirect to specified path
router.push(redirectPath);
// Check if there's a saved redirect path from before the auth flow
const savedRedirectPath = getAndClearRedirectPath();
// Use the saved path if available, otherwise use the default redirectPath
const finalRedirectPath = savedRedirectPath || redirectPath;
// Redirect to the appropriate path
router.push(finalRedirectPath);
} catch (error) {
console.error("Error storing token in localStorage:", error);
// Even if there's an error, try to redirect to the default path
router.push(redirectPath);
}
}
}, [searchParams, tokenParamName, storageKey, redirectPath, router]);

View file

@ -1,8 +1,8 @@
"use client";
import { ChatInput } from "@llamaindex/chat-ui";
import { Brain, Check, FolderOpen, Minus, Plus, Zap } from "lucide-react";
import { useParams } from "next/navigation";
import { Brain, Check, FolderOpen, Minus, Plus, PlusCircle, Zap } from "lucide-react";
import { useParams, useRouter } from "next/navigation";
import React, { Suspense, useCallback, useState } from "react";
import { DocumentsDataTable } from "@/components/chat/DocumentsDataTable";
import { Badge } from "@/components/ui/badge";
@ -115,6 +115,7 @@ const ConnectorSelector = React.memo(
selectedConnectors?: string[];
}) => {
const { search_space_id } = useParams();
const router = useRouter();
const [isOpen, setIsOpen] = useState(false);
// Fetch immediately (not lazy) so the button can show the correct count
@ -247,9 +248,19 @@ const ConnectorSelector = React.memo(
<Brain className="h-8 w-8 text-muted-foreground" />
</div>
<h4 className="text-sm font-medium mb-1">No sources found</h4>
<p className="text-xs text-muted-foreground max-w-xs">
<p className="text-xs text-muted-foreground max-w-xs mb-4">
Add documents or configure search connectors for this search space
</p>
<Button
onClick={() => {
setIsOpen(false);
router.push(`/dashboard/${search_space_id}/sources/add`);
}}
className="gap-2"
>
<PlusCircle className="h-4 w-4" />
Add Sources
</Button>
</div>
) : (
<>

View file

@ -3,7 +3,7 @@
import { useAtomValue } from "jotai";
import { usePathname } from "next/navigation";
import { useTranslations } from "next-intl";
import React, { useEffect } from "react";
import React, { useEffect, useState } from "react";
import { activeChatAtom } from "@/atoms/chats/chat-query.atoms";
import {
Breadcrumb,
@ -14,6 +14,7 @@ import {
BreadcrumbSeparator,
} from "@/components/ui/breadcrumb";
import { useSearchSpace } from "@/hooks/use-search-space";
import { authenticatedFetch, getBearerToken } from "@/lib/auth-utils";
interface BreadcrumbItemInterface {
label: string;
@ -34,6 +35,36 @@ export function DashboardBreadcrumb() {
autoFetch: !!searchSpaceId,
});
// State to store document title for editor breadcrumb
const [documentTitle, setDocumentTitle] = useState<string | null>(null);
// Fetch document title when on editor page
useEffect(() => {
if (segments[2] === "editor" && segments[3] && searchSpaceId) {
const documentId = segments[3];
const token = getBearerToken();
if (token) {
authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-spaces/${searchSpaceId}/documents/${documentId}/editor-content`,
{ method: "GET" }
)
.then((res) => res.json())
.then((data) => {
if (data.title) {
setDocumentTitle(data.title);
}
})
.catch(() => {
// If fetch fails, just use the document ID
setDocumentTitle(null);
});
}
} else {
setDocumentTitle(null);
}
}, [segments, searchSpaceId]);
// Parse the pathname to create breadcrumb items
const generateBreadcrumbs = (path: string): BreadcrumbItemInterface[] => {
const segments = path.split("/").filter(Boolean);
@ -66,6 +97,7 @@ export function DashboardBreadcrumb() {
logs: t("logs"),
chats: t("chats"),
settings: t("settings"),
editor: t("editor"),
};
sectionLabel = sectionLabels[section] || sectionLabel;
@ -73,7 +105,21 @@ export function DashboardBreadcrumb() {
// Handle sub-sections
if (segments[3]) {
const subSection = segments[3];
let subSectionLabel = subSection.charAt(0).toUpperCase() + subSection.slice(1);
// Handle editor sub-sections (document ID)
if (section === "editor") {
const documentLabel = documentTitle || subSection;
breadcrumbs.push({
label: t("documents"),
href: `/dashboard/${segments[1]}/documents`,
});
breadcrumbs.push({
label: sectionLabel,
href: `/dashboard/${segments[1]}/documents`,
});
breadcrumbs.push({ label: documentLabel });
return breadcrumbs;
}
// Handle sources sub-sections
if (section === "sources") {
@ -81,7 +127,7 @@ export function DashboardBreadcrumb() {
add: "Add Sources",
};
const sourceLabel = sourceLabels[subSection] || subSectionLabel;
const sourceLabel = sourceLabels[subSection] || subSection;
breadcrumbs.push({
label: "Sources",
href: `/dashboard/${segments[1]}/sources`,
@ -98,7 +144,7 @@ export function DashboardBreadcrumb() {
webpage: t("add_webpages"),
};
const documentLabel = documentLabels[subSection] || subSectionLabel;
const documentLabel = documentLabels[subSection] || subSection;
breadcrumbs.push({
label: t("documents"),
href: `/dashboard/${segments[1]}/documents`,
@ -160,7 +206,7 @@ export function DashboardBreadcrumb() {
manage: t("manage_connectors"),
};
const connectorLabel = connectorLabels[subSection] || subSectionLabel;
const connectorLabel = connectorLabels[subSection] || subSection;
breadcrumbs.push({
label: t("connectors"),
href: `/dashboard/${segments[1]}/connectors`,
@ -170,6 +216,7 @@ export function DashboardBreadcrumb() {
}
// Handle other sub-sections
let subSectionLabel = subSection.charAt(0).toUpperCase() + subSection.slice(1);
const subSectionLabels: Record<string, string> = {
upload: t("upload_documents"),
youtube: t("add_youtube"),

View file

@ -15,9 +15,17 @@ interface JsonMetadataViewerProps {
title: string;
metadata: any;
trigger?: React.ReactNode;
open?: boolean;
onOpenChange?: (open: boolean) => void;
}
export function JsonMetadataViewer({ title, metadata, trigger }: JsonMetadataViewerProps) {
export function JsonMetadataViewer({
title,
metadata,
trigger,
open,
onOpenChange,
}: JsonMetadataViewerProps) {
// Ensure metadata is a valid object
const jsonData = React.useMemo(() => {
if (!metadata) return {};
@ -35,6 +43,23 @@ export function JsonMetadataViewer({ title, metadata, trigger }: JsonMetadataVie
}
}, [metadata]);
// Controlled mode: when open and onOpenChange are provided
if (open !== undefined && onOpenChange !== undefined) {
return (
<Dialog open={open} onOpenChange={onOpenChange}>
<DialogContent className="max-w-4xl max-h-[80vh] overflow-y-auto">
<DialogHeader>
<DialogTitle>{title} - Metadata</DialogTitle>
</DialogHeader>
<div className="mt-4 p-4 bg-muted/30 rounded-md">
<JsonView data={jsonData} style={defaultStyles} />
</div>
</DialogContent>
</Dialog>
);
}
// Uncontrolled mode: when using trigger
return (
<Dialog>
<DialogTrigger asChild>

View file

@ -1,161 +0,0 @@
"use client";
import {
ArrowRight,
Bot,
Brain,
CheckCircle,
FileText,
MessageSquare,
Sparkles,
Zap,
} from "lucide-react";
import { motion } from "motion/react";
import { useRouter } from "next/navigation";
import { Badge } from "@/components/ui/badge";
import { Button } from "@/components/ui/button";
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card";
import { useGlobalLLMConfigs, useLLMConfigs, useLLMPreferences } from "@/hooks/use-llm-configs";
interface CompletionStepProps {
searchSpaceId: number;
}
export function CompletionStep({ searchSpaceId }: CompletionStepProps) {
const router = useRouter();
const { llmConfigs } = useLLMConfigs(searchSpaceId);
const { globalConfigs } = useGlobalLLMConfigs();
const { preferences } = useLLMPreferences(searchSpaceId);
// Combine global and user-specific configs
const allConfigs = [...globalConfigs, ...llmConfigs];
const assignedConfigs = {
long_context: allConfigs.find((c) => c.id === preferences.long_context_llm_id),
fast: allConfigs.find((c) => c.id === preferences.fast_llm_id),
strategic: allConfigs.find((c) => c.id === preferences.strategic_llm_id),
};
return (
<div className="space-y-8">
{/* Next Steps - What would you like to do? */}
<motion.div
initial={{ opacity: 0, y: 20 }}
animate={{ opacity: 1, y: 0 }}
transition={{ delay: 0.6 }}
className="space-y-4"
>
<div className="text-center">
<h3 className="text-xl font-semibold mb-2">What would you like to do next?</h3>
<p className="text-muted-foreground">Choose an option to continue</p>
</div>
<div className="grid grid-cols-1 md:grid-cols-2 gap-6">
{/* Add Sources Card */}
<motion.div
initial={{ opacity: 0, x: -20 }}
animate={{ opacity: 1, x: 0 }}
transition={{ delay: 0.7 }}
>
<Card className="h-full border-2 hover:border-primary/50 transition-all hover:shadow-lg cursor-pointer group">
<CardHeader>
<div className="w-12 h-12 bg-blue-100 dark:bg-blue-950 rounded-lg flex items-center justify-center mb-3 group-hover:scale-110 transition-transform">
<FileText className="w-6 h-6 text-blue-600 dark:text-blue-400" />
</div>
<CardTitle className="text-lg">Add Sources</CardTitle>
<CardDescription>
Connect your data sources to start building your knowledge base
</CardDescription>
</CardHeader>
<CardContent className="space-y-4">
<div className="space-y-2 text-sm text-muted-foreground">
<div className="flex items-center gap-2">
<CheckCircle className="w-4 h-4 text-green-600" />
<span>Connect documents and files</span>
</div>
<div className="flex items-center gap-2">
<CheckCircle className="w-4 h-4 text-green-600" />
<span>Import from various sources</span>
</div>
<div className="flex items-center gap-2">
<CheckCircle className="w-4 h-4 text-green-600" />
<span>Build your knowledge base</span>
</div>
</div>
<Button
className="w-full group-hover:bg-primary/90"
onClick={() => router.push(`/dashboard/${searchSpaceId}/sources/add`)}
>
Add Sources
<ArrowRight className="w-4 h-4 ml-2" />
</Button>
</CardContent>
</Card>
</motion.div>
{/* Start Chatting Card */}
<motion.div
initial={{ opacity: 0, x: 20 }}
animate={{ opacity: 1, x: 0 }}
transition={{ delay: 0.8 }}
>
<Card className="h-full border-2 hover:border-primary/50 transition-all hover:shadow-lg cursor-pointer group">
<CardHeader>
<div className="w-12 h-12 bg-purple-100 dark:bg-purple-950 rounded-lg flex items-center justify-center mb-3 group-hover:scale-110 transition-transform">
<MessageSquare className="w-6 h-6 text-purple-600 dark:text-purple-400" />
</div>
<CardTitle className="text-lg">Start Chatting</CardTitle>
<CardDescription>
Jump right into the AI researcher and start asking questions
</CardDescription>
</CardHeader>
<CardContent className="space-y-4">
<div className="space-y-2 text-sm text-muted-foreground">
<div className="flex items-center gap-2">
<CheckCircle className="w-4 h-4 text-green-600" />
<span>AI-powered conversations</span>
</div>
<div className="flex items-center gap-2">
<CheckCircle className="w-4 h-4 text-green-600" />
<span>Research and explore topics</span>
</div>
<div className="flex items-center gap-2">
<CheckCircle className="w-4 h-4 text-green-600" />
<span>Get instant insights</span>
</div>
</div>
<Button
className="w-full group-hover:bg-primary/90"
onClick={() => router.push(`/dashboard/${searchSpaceId}/researcher`)}
>
Start Chatting
<ArrowRight className="w-4 h-4 ml-2" />
</Button>
</CardContent>
</Card>
</motion.div>
</div>
{/* Quick Stats */}
<motion.div
initial={{ opacity: 0, y: 10 }}
animate={{ opacity: 1, y: 0 }}
transition={{ delay: 0.9 }}
className="flex flex-wrap justify-center gap-2 pt-4"
>
<Badge variant="secondary">
{allConfigs.length} LLM provider{allConfigs.length > 1 ? "s" : ""} available
</Badge>
{globalConfigs.length > 0 && (
<Badge variant="secondary"> {globalConfigs.length} Global config(s)</Badge>
)}
{llmConfigs.length > 0 && (
<Badge variant="secondary"> {llmConfigs.length} Custom config(s)</Badge>
)}
<Badge variant="secondary"> All roles assigned</Badge>
<Badge variant="secondary"> Ready to use</Badge>
</motion.div>
</motion.div>
</div>
);
}

View file

@ -0,0 +1,8 @@
export { OnboardActionCard } from "./onboard-action-card";
export { OnboardAdvancedSettings } from "./onboard-advanced-settings";
export { OnboardHeader } from "./onboard-header";
export { OnboardLLMSetup } from "./onboard-llm-setup";
export { OnboardLoading } from "./onboard-loading";
export { OnboardStats } from "./onboard-stats";
export { SetupLLMStep } from "./setup-llm-step";
export { SetupPromptStep } from "./setup-prompt-step";

View file

@ -0,0 +1,114 @@
"use client";
import { ArrowRight, CheckCircle, type LucideIcon } from "lucide-react";
import { motion } from "motion/react";
import { Button } from "@/components/ui/button";
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card";
import { cn } from "@/lib/utils";
interface OnboardActionCardProps {
title: string;
description: string;
icon: LucideIcon;
features: string[];
buttonText: string;
onClick: () => void;
colorScheme: "emerald" | "blue" | "violet";
delay?: number;
}
const colorSchemes = {
emerald: {
iconBg: "bg-emerald-500/10 dark:bg-emerald-500/20",
iconRing: "ring-emerald-500/20 dark:ring-emerald-500/30",
iconColor: "text-emerald-600 dark:text-emerald-400",
checkColor: "text-emerald-500",
buttonBg: "bg-emerald-600 hover:bg-emerald-500",
hoverBorder: "hover:border-emerald-500/50",
},
blue: {
iconBg: "bg-blue-500/10 dark:bg-blue-500/20",
iconRing: "ring-blue-500/20 dark:ring-blue-500/30",
iconColor: "text-blue-600 dark:text-blue-400",
checkColor: "text-blue-500",
buttonBg: "bg-blue-600 hover:bg-blue-500",
hoverBorder: "hover:border-blue-500/50",
},
violet: {
iconBg: "bg-violet-500/10 dark:bg-violet-500/20",
iconRing: "ring-violet-500/20 dark:ring-violet-500/30",
iconColor: "text-violet-600 dark:text-violet-400",
checkColor: "text-violet-500",
buttonBg: "bg-violet-600 hover:bg-violet-500",
hoverBorder: "hover:border-violet-500/50",
},
};
export function OnboardActionCard({
title,
description,
icon: Icon,
features,
buttonText,
onClick,
colorScheme,
delay = 0,
}: OnboardActionCardProps) {
const colors = colorSchemes[colorScheme];
return (
<motion.div
initial={{ opacity: 0, y: 30 }}
animate={{ opacity: 1, y: 0 }}
transition={{ delay, type: "spring", stiffness: 200 }}
whileHover={{ y: -6, transition: { duration: 0.2 } }}
>
<Card
className={cn(
"h-full cursor-pointer group relative overflow-hidden transition-all duration-300",
"border bg-card hover:shadow-lg",
colors.hoverBorder
)}
onClick={onClick}
>
<CardHeader className="relative pb-4">
<motion.div
className={cn(
"w-14 h-14 rounded-2xl flex items-center justify-center mb-4 ring-1 transition-all duration-300",
colors.iconBg,
colors.iconRing,
"group-hover:scale-110"
)}
whileHover={{ rotate: [0, -5, 5, 0] }}
transition={{ duration: 0.5 }}
>
<Icon className={cn("w-7 h-7", colors.iconColor)} />
</motion.div>
<CardTitle className="text-xl">{title}</CardTitle>
<CardDescription>{description}</CardDescription>
</CardHeader>
<CardContent className="relative space-y-4">
<div className="space-y-2.5 text-sm text-muted-foreground">
{features.map((feature, index) => (
<div key={index} className="flex items-center gap-2.5">
<CheckCircle className={cn("w-4 h-4", colors.checkColor)} />
<span>{feature}</span>
</div>
))}
</div>
<Button
className={cn(
"w-full text-white border-0 transition-all duration-300",
colors.buttonBg
)}
>
{buttonText}
<ArrowRight className="w-4 h-4 ml-2 group-hover:translate-x-1 transition-transform" />
</Button>
</CardContent>
</Card>
</motion.div>
);
}

View file

@ -0,0 +1,144 @@
"use client";
import { ChevronDown, MessageSquare, Settings2 } from "lucide-react";
import { AnimatePresence, motion } from "motion/react";
import { SetupLLMStep } from "@/components/onboard/setup-llm-step";
import { SetupPromptStep } from "@/components/onboard/setup-prompt-step";
import { Card, CardContent } from "@/components/ui/card";
import { Collapsible, CollapsibleContent, CollapsibleTrigger } from "@/components/ui/collapsible";
import { cn } from "@/lib/utils";
interface OnboardAdvancedSettingsProps {
searchSpaceId: number;
showLLMSettings: boolean;
setShowLLMSettings: (show: boolean) => void;
showPromptSettings: boolean;
setShowPromptSettings: (show: boolean) => void;
onConfigCreated: () => void;
onConfigDeleted: () => void;
onPreferencesUpdated: () => Promise<void>;
}
export function OnboardAdvancedSettings({
searchSpaceId,
showLLMSettings,
setShowLLMSettings,
showPromptSettings,
setShowPromptSettings,
onConfigCreated,
onConfigDeleted,
onPreferencesUpdated,
}: OnboardAdvancedSettingsProps) {
return (
<motion.div
initial={{ opacity: 0, y: 20 }}
animate={{ opacity: 1, y: 0 }}
transition={{ delay: 1 }}
className="space-y-4"
>
{/* LLM Configuration */}
<Collapsible open={showLLMSettings} onOpenChange={setShowLLMSettings}>
<CollapsibleTrigger asChild>
<Card className="hover:bg-muted/50 transition-colors cursor-pointer">
<CardContent className="py-4">
<div className="flex items-center justify-between">
<div className="flex items-center gap-3">
<div className="p-2 rounded-xl bg-fuchsia-500/10 dark:bg-fuchsia-500/20 border border-fuchsia-500/20">
<Settings2 className="w-5 h-5 text-fuchsia-600 dark:text-fuchsia-400" />
</div>
<div>
<h3 className="font-semibold">LLM Configuration</h3>
<p className="text-sm text-muted-foreground">
Customize AI models and role assignments
</p>
</div>
</div>
<motion.div
animate={{ rotate: showLLMSettings ? 180 : 0 }}
transition={{ duration: 0.2 }}
>
<ChevronDown className="w-5 h-5 text-muted-foreground" />
</motion.div>
</div>
</CardContent>
</Card>
</CollapsibleTrigger>
<CollapsibleContent>
<AnimatePresence>
{showLLMSettings && (
<motion.div
initial={{ opacity: 0, height: 0 }}
animate={{ opacity: 1, height: "auto" }}
exit={{ opacity: 0, height: 0 }}
transition={{ duration: 0.3 }}
>
<Card className="mt-2">
<CardContent className="pt-6">
<SetupLLMStep
searchSpaceId={searchSpaceId}
onConfigCreated={onConfigCreated}
onConfigDeleted={onConfigDeleted}
onPreferencesUpdated={onPreferencesUpdated}
/>
</CardContent>
</Card>
</motion.div>
)}
</AnimatePresence>
</CollapsibleContent>
</Collapsible>
{/* Prompt Configuration */}
<Collapsible open={showPromptSettings} onOpenChange={setShowPromptSettings}>
<CollapsibleTrigger asChild>
<Card className="hover:bg-muted/50 transition-colors cursor-pointer">
<CardContent className="py-4">
<div className="flex items-center justify-between">
<div className="flex items-center gap-3">
<div className="p-2 rounded-xl bg-cyan-500/10 dark:bg-cyan-500/20 border border-cyan-500/20">
<MessageSquare className="w-5 h-5 text-cyan-600 dark:text-cyan-400" />
</div>
<div>
<h3 className="font-semibold">AI Response Settings</h3>
<p className="text-sm text-muted-foreground">
Configure citations and custom instructions (Optional)
</p>
</div>
</div>
<motion.div
animate={{ rotate: showPromptSettings ? 180 : 0 }}
transition={{ duration: 0.2 }}
>
<ChevronDown className="w-5 h-5 text-muted-foreground" />
</motion.div>
</div>
</CardContent>
</Card>
</CollapsibleTrigger>
<CollapsibleContent>
<AnimatePresence>
{showPromptSettings && (
<motion.div
initial={{ opacity: 0, height: 0 }}
animate={{ opacity: 1, height: "auto" }}
exit={{ opacity: 0, height: 0 }}
transition={{ duration: 0.3 }}
>
<Card className="mt-2">
<CardContent className="pt-6">
<SetupPromptStep
searchSpaceId={searchSpaceId}
onComplete={() => setShowPromptSettings(false)}
/>
</CardContent>
</Card>
</motion.div>
)}
</AnimatePresence>
</CollapsibleContent>
</Collapsible>
</motion.div>
);
}

View file

@ -0,0 +1,56 @@
"use client";
import { CheckCircle } from "lucide-react";
import { motion } from "motion/react";
import { Logo } from "@/components/Logo";
import { Badge } from "@/components/ui/badge";
interface OnboardHeaderProps {
title: string;
subtitle: string;
isReady?: boolean;
}
export function OnboardHeader({ title, subtitle, isReady }: OnboardHeaderProps) {
return (
<motion.div
initial={{ opacity: 0, y: -20 }}
animate={{ opacity: 1, y: 0 }}
transition={{ duration: 0.5, delay: 0.1 }}
className="text-center mb-10"
>
<motion.div
initial={{ scale: 0 }}
animate={{ scale: 1 }}
transition={{ type: "spring", stiffness: 200, delay: 0.2 }}
className="inline-flex items-center justify-center mb-6"
>
<Logo className="w-20 h-20 rounded-2xl shadow-lg" />
</motion.div>
<motion.div
initial={{ opacity: 0, y: 10 }}
animate={{ opacity: 1, y: 0 }}
transition={{ delay: 0.3 }}
className="space-y-2"
>
<h1 className="text-4xl md:text-5xl font-bold text-foreground">{title}</h1>
<p className="text-muted-foreground text-lg md:text-xl max-w-2xl mx-auto">{subtitle}</p>
</motion.div>
{isReady && (
<motion.div
initial={{ opacity: 0, scale: 0.8 }}
animate={{ opacity: 1, scale: 1 }}
transition={{ delay: 0.4, type: "spring" }}
className="mt-4"
>
<Badge className="px-4 py-2 text-sm bg-emerald-500/10 border-emerald-500/30 text-emerald-600 dark:text-emerald-400">
<CheckCircle className="w-4 h-4 mr-2" />
AI Configuration Complete
</Badge>
</motion.div>
)}
</motion.div>
);
}

View file

@ -0,0 +1,93 @@
"use client";
import { Bot } from "lucide-react";
import { motion } from "motion/react";
import { Logo } from "@/components/Logo";
import { SetupLLMStep } from "@/components/onboard/setup-llm-step";
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card";
interface OnboardLLMSetupProps {
searchSpaceId: number;
title: string;
configTitle: string;
configDescription: string;
onConfigCreated: () => void;
onConfigDeleted: () => void;
onPreferencesUpdated: () => Promise<void>;
}
export function OnboardLLMSetup({
searchSpaceId,
title,
configTitle,
configDescription,
onConfigCreated,
onConfigDeleted,
onPreferencesUpdated,
}: OnboardLLMSetupProps) {
return (
<div className="min-h-screen bg-background flex items-center justify-center p-4">
<motion.div
initial={{ opacity: 0, y: 20 }}
animate={{ opacity: 1, y: 0 }}
transition={{ duration: 0.5 }}
className="w-full max-w-4xl"
>
{/* Header */}
<div className="text-center mb-8">
<motion.div
initial={{ scale: 0 }}
animate={{ scale: 1 }}
transition={{ type: "spring", stiffness: 200, delay: 0.1 }}
className="inline-flex items-center justify-center mb-6"
>
<Logo className="w-16 h-16 rounded-2xl shadow-lg" />
</motion.div>
<motion.h1
initial={{ opacity: 0, y: 10 }}
animate={{ opacity: 1, y: 0 }}
transition={{ delay: 0.2 }}
className="text-4xl font-bold text-foreground mb-3"
>
{title}
</motion.h1>
<motion.p
initial={{ opacity: 0, y: 10 }}
animate={{ opacity: 1, y: 0 }}
transition={{ delay: 0.3 }}
className="text-muted-foreground text-lg"
>
Configure your AI model to get started
</motion.p>
</div>
{/* LLM Setup Card */}
<motion.div
initial={{ opacity: 0, y: 20 }}
animate={{ opacity: 1, y: 0 }}
transition={{ delay: 0.4 }}
>
<Card className="shadow-lg">
<CardHeader className="text-center border-b pb-6">
<div className="flex items-center justify-center gap-3 mb-2">
<div className="p-2 rounded-xl bg-primary/10 border border-primary/20">
<Bot className="w-6 h-6 text-primary" />
</div>
<CardTitle className="text-2xl">{configTitle}</CardTitle>
</div>
<CardDescription>{configDescription}</CardDescription>
</CardHeader>
<CardContent className="pt-6">
<SetupLLMStep
searchSpaceId={searchSpaceId}
onConfigCreated={onConfigCreated}
onConfigDeleted={onConfigDeleted}
onPreferencesUpdated={onPreferencesUpdated}
/>
</CardContent>
</Card>
</motion.div>
</motion.div>
</div>
);
}

View file

@ -0,0 +1,47 @@
"use client";
import { Wand2 } from "lucide-react";
import { motion } from "motion/react";
interface OnboardLoadingProps {
title: string;
subtitle: string;
}
export function OnboardLoading({ title, subtitle }: OnboardLoadingProps) {
return (
<div className="min-h-screen bg-background flex items-center justify-center p-4">
<motion.div
initial={{ opacity: 0, scale: 0.9 }}
animate={{ opacity: 1, scale: 1 }}
transition={{ duration: 0.5 }}
className="text-center"
>
<div className="relative mb-8 flex justify-center">
<motion.div
animate={{ rotate: 360 }}
transition={{ duration: 2, repeat: Infinity, ease: "linear" }}
>
<Wand2 className="w-16 h-16 text-primary" />
</motion.div>
</div>
<h2 className="text-2xl font-bold text-foreground mb-2">{title}</h2>
<p className="text-muted-foreground">{subtitle}</p>
<div className="mt-6 flex justify-center gap-1.5">
{[0, 1, 2].map((i) => (
<motion.div
key={i}
className="w-2 h-2 rounded-full bg-primary"
animate={{ scale: [1, 1.5, 1], opacity: [0.5, 1, 0.5] }}
transition={{
duration: 1,
repeat: Infinity,
delay: i * 0.2,
}}
/>
))}
</div>
</motion.div>
</div>
);
}

View file

@ -0,0 +1,38 @@
"use client";
import { Bot, Brain, Sparkles } from "lucide-react";
import { motion } from "motion/react";
import { Badge } from "@/components/ui/badge";
interface OnboardStatsProps {
globalConfigsCount: number;
userConfigsCount: number;
}
export function OnboardStats({ globalConfigsCount, userConfigsCount }: OnboardStatsProps) {
return (
<motion.div
initial={{ opacity: 0, y: 20 }}
animate={{ opacity: 1, y: 0 }}
transition={{ delay: 0.5 }}
className="flex flex-wrap justify-center gap-3 mb-10"
>
{globalConfigsCount > 0 && (
<Badge variant="secondary" className="px-3 py-1.5">
<Sparkles className="w-3 h-3 mr-1.5 text-violet-500" />
{globalConfigsCount} Global Model{globalConfigsCount > 1 ? "s" : ""}
</Badge>
)}
{userConfigsCount > 0 && (
<Badge variant="secondary" className="px-3 py-1.5">
<Bot className="w-3 h-3 mr-1.5 text-blue-500" />
{userConfigsCount} Custom Config{userConfigsCount > 1 ? "s" : ""}
</Badge>
)}
<Badge variant="secondary" className="px-3 py-1.5">
<Brain className="w-3 h-3 mr-1.5 text-fuchsia-500" />
All Roles Assigned
</Badge>
</motion.div>
);
}

View file

@ -13,6 +13,7 @@ import { Switch } from "@/components/ui/switch";
import { Tabs, TabsList, TabsTrigger } from "@/components/ui/tabs";
import { Textarea } from "@/components/ui/textarea";
import { type CommunityPrompt, useCommunityPrompts } from "@/hooks/use-community-prompts";
import { authenticatedFetch } from "@/lib/auth-utils";
interface SetupPromptStepProps {
searchSpaceId: number;
@ -74,14 +75,11 @@ export function SetupPromptStep({ searchSpaceId, onComplete }: SetupPromptStepPr
// Only send update if there's something to update
if (Object.keys(payload).length > 0) {
const response = await fetch(
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}`,
{
method: "PUT",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
},
headers: { "Content-Type": "application/json" },
body: JSON.stringify(payload),
}
);

View file

@ -16,7 +16,8 @@ const demoPlans = [
"Podcasts support with local TTS providers.",
"Connects with 15+ external sources.",
"Cross-Browser Extension for dynamic webpages including authenticated content",
"Upcoming: Mergeable MindMaps",
"Role-based access permissions",
"Collaboration and multiplayer features",
"Upcoming: Note Management",
],
description: "Open source version with powerful features",
@ -32,9 +33,10 @@ const demoPlans = [
features: [
"Everything in Community",
"Priority Support",
"Role-based access permissions",
"Collaboration and multiplayer features",
"Advanced security features",
"Audit logs and compliance",
"SSO, OIDC & SAML",
"SLA guarantee",
],
description: "For large organizations with specific needs",
buttonText: "Contact Sales",

View file

@ -36,19 +36,19 @@ import { cn } from "@/lib/utils";
// Define the form schema with Zod
const searchSpaceFormSchema = z.object({
name: z.string().min(3, "Name is required"),
description: z.string().min(10, "Description is required"),
name: z.string().min(3, "Name must be at least 3 characters"),
description: z.string().optional(),
});
// Define the type for the form values
type SearchSpaceFormValues = z.infer<typeof searchSpaceFormSchema>;
interface SearchSpaceFormProps {
onSubmit?: (data: { name: string; description: string }) => void;
onSubmit?: (data: { name: string; description?: string }) => void;
onDelete?: () => void;
className?: string;
isEditing?: boolean;
initialData?: { name: string; description: string };
initialData?: { name: string; description?: string };
}
export function SearchSpaceForm({
@ -229,7 +229,9 @@ export function SearchSpaceForm({
name="description"
render={({ field }) => (
<FormItem>
<FormLabel>Description</FormLabel>
<FormLabel>
Description <span className="text-muted-foreground font-normal">(optional)</span>
</FormLabel>
<FormControl>
<Input placeholder="Enter search space description" {...field} />
</FormControl>

View file

@ -413,19 +413,6 @@ export function LLMRoleManager({ searchSpaceId }: LLMRoleManagerProps) {
</div>
</CardHeader>
<CardContent className="space-y-4">
<div className="space-y-2">
<div className="text-sm text-muted-foreground">
<strong>Use cases:</strong> {role.examples}
</div>
<div className="flex flex-wrap gap-1">
{role.characteristics.map((char, idx) => (
<Badge key={idx} variant="outline" className="text-xs">
{char}
</Badge>
))}
</div>
</div>
<div className="space-y-2">
<Label className="text-sm font-medium">Assign LLM Configuration:</Label>
<Select

View file

@ -25,6 +25,7 @@ import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
import { Textarea } from "@/components/ui/textarea";
import { type CommunityPrompt, useCommunityPrompts } from "@/hooks/use-community-prompts";
import { useSearchSpace } from "@/hooks/use-search-space";
import { authenticatedFetch } from "@/lib/auth-utils";
interface PromptConfigManagerProps {
searchSpaceId: number;
@ -78,14 +79,11 @@ export function PromptConfigManager({ searchSpaceId }: PromptConfigManagerProps)
// Only send request if we have something to update
if (Object.keys(payload).length > 0) {
const response = await fetch(
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}`,
{
method: "PUT",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
},
headers: { "Content-Type": "application/json" },
body: JSON.stringify(payload),
}
);

View file

@ -17,6 +17,7 @@ import {
SquareTerminal,
Trash2,
Undo2,
Users,
} from "lucide-react";
import Image from "next/image";
import Link from "next/link";
@ -54,6 +55,7 @@ export const iconMap: Record<string, LucideIcon> = {
Trash2,
Podcast,
FileText,
Users,
};
const defaultData = {

View file

@ -43,6 +43,7 @@ export function NavMain({ items }: { items: NavItem[] }) {
Podcasts: "podcasts",
Logs: "logs",
Platform: "platform",
Team: "team",
};
const key = titleMap[title];

View file

@ -14,6 +14,7 @@ import { Button } from "@/components/ui/button";
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card";
import { Progress } from "@/components/ui/progress";
import { Separator } from "@/components/ui/separator";
import { getAuthHeaders } from "@/lib/auth-utils";
import { GridPattern } from "./GridPattern";
interface DocumentUploadTabProps {
@ -168,9 +169,7 @@ export function DocumentUploadTab({ searchSpaceId }: DocumentUploadTabProps) {
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/documents/fileupload`,
{
method: "POST",
headers: {
Authorization: `Bearer ${window.localStorage.getItem("surfsense_bearer_token")}`,
},
headers: getAuthHeaders(),
body: formData,
}
);

View file

@ -19,6 +19,7 @@ import {
CardTitle,
} from "@/components/ui/card";
import { Label } from "@/components/ui/label";
import { authenticatedFetch } from "@/lib/auth-utils";
const youtubeRegex =
/^(https:\/\/)?(www\.)?(youtube\.com\/watch\?v=|youtu\.be\/)([a-zA-Z0-9_-]{11})$/;
@ -66,14 +67,11 @@ export function YouTubeTab({ searchSpaceId }: YouTubeTabProps) {
const videoUrls = videoTags.map((tag) => tag.text);
const response = await fetch(
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/documents`,
{
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
},
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
document_type: "YOUTUBE_VIDEO",
content: videoUrls,

View file

@ -1,5 +1,6 @@
export * from "./use-document-by-chunk";
export * from "./use-logs";
export * from "./use-rbac";
export * from "./use-search-source-connectors";
export * from "./use-search-space";
export * from "./use-user";

View file

@ -1,5 +1,6 @@
import { useCallback, useEffect, useState } from "react";
import { toast } from "sonner";
import { getBearerToken } from "@/lib/auth-utils";
interface UseApiKeyReturn {
apiKey: string | null;
@ -17,7 +18,7 @@ export function useApiKey(): UseApiKeyReturn {
// Load API key from localStorage
const loadApiKey = () => {
try {
const token = localStorage.getItem("surfsense_bearer_token");
const token = getBearerToken();
setApiKey(token);
} catch (error) {
console.error("Error loading API key:", error);
@ -32,17 +33,58 @@ export function useApiKey(): UseApiKeyReturn {
return () => clearTimeout(timer);
}, []);
const fallbackCopyTextToClipboard = (text: string) => {
const textArea = document.createElement("textarea");
textArea.value = text;
// Avoid scrolling to bottom
textArea.style.top = "0";
textArea.style.left = "0";
textArea.style.position = "fixed";
textArea.style.opacity = "0";
document.body.appendChild(textArea);
textArea.focus();
textArea.select();
try {
const successful = document.execCommand("copy");
document.body.removeChild(textArea);
if (successful) {
setCopied(true);
toast.success("API key copied to clipboard");
setTimeout(() => {
setCopied(false);
}, 2000);
} else {
toast.error("Failed to copy API key");
}
} catch (err) {
console.error("Fallback: Oops, unable to copy", err);
document.body.removeChild(textArea);
toast.error("Failed to copy API key");
}
};
const copyToClipboard = useCallback(async () => {
if (!apiKey) return;
try {
await navigator.clipboard.writeText(apiKey);
setCopied(true);
toast.success("API key copied to clipboard");
if (navigator.clipboard && window.isSecureContext) {
// Use Clipboard API if available and in secure context
await navigator.clipboard.writeText(apiKey);
setCopied(true);
toast.success("API key copied to clipboard");
setTimeout(() => {
setCopied(false);
}, 2000);
setTimeout(() => {
setCopied(false);
}, 2000);
} else {
// Fallback for non-secure contexts or browsers without clipboard API
fallbackCopyTextToClipboard(apiKey);
}
} catch (err) {
console.error("Failed to copy:", err);
toast.error("Failed to copy API key");

View file

@ -3,6 +3,7 @@ import { useCallback, useEffect, useState } from "react";
import type { ChatDetails } from "@/app/dashboard/[search_space_id]/chats/chats-client";
import type { ResearchMode } from "@/components/chat";
import type { Document } from "@/hooks/use-documents";
import { getBearerToken } from "@/lib/auth-utils";
interface UseChatStateProps {
search_space_id: string;
@ -22,7 +23,7 @@ export function useChatState({ chat_id }: UseChatStateProps) {
const [topK, setTopK] = useState<number>(5);
useEffect(() => {
const bearerToken = localStorage.getItem("surfsense_bearer_token");
const bearerToken = getBearerToken();
setToken(bearerToken);
}, []);

View file

@ -15,6 +15,7 @@ import {
type SearchSourceConnector,
useSearchSourceConnectors,
} from "@/hooks/use-search-source-connectors";
import { authenticatedFetch } from "@/lib/auth-utils";
const normalizeListInput = (value: unknown): string[] => {
if (Array.isArray(value)) {
@ -184,16 +185,11 @@ export function useConnectorEditPage(connectorId: number, searchSpaceId: string)
setIsFetchingRepos(true);
setFetchedRepos(null);
try {
const token = localStorage.getItem("surfsense_bearer_token");
if (!token) throw new Error("No auth token");
const response = await fetch(
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/github/repositories`,
{
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${token}`,
},
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ github_pat: values.github_pat }),
}
);

View file

@ -1,3 +1,5 @@
import { authenticatedFetch } from "@/lib/auth-utils";
// Types for connector API
export interface ConnectorConfig {
[key: string]: string;
@ -32,14 +34,11 @@ export const getConnectorTypeDisplay = (type: string): string => {
export const ConnectorService = {
// Create a new connector
async createConnector(data: CreateConnectorRequest): Promise<Connector> {
const response = await fetch(
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-source-connectors`,
{
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
},
headers: { "Content-Type": "application/json" },
body: JSON.stringify(data),
}
);
@ -54,13 +53,9 @@ export const ConnectorService = {
// Get all connectors
async getConnectors(skip = 0, limit = 100): Promise<Connector[]> {
const response = await fetch(
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-source-connectors?skip=${skip}&limit=${limit}`,
{
headers: {
Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
},
}
{ method: "GET" }
);
if (!response.ok) {
@ -73,13 +68,9 @@ export const ConnectorService = {
// Get a specific connector
async getConnector(connectorId: number): Promise<Connector> {
const response = await fetch(
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-source-connectors/${connectorId}`,
{
headers: {
Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
},
}
{ method: "GET" }
);
if (!response.ok) {
@ -92,14 +83,11 @@ export const ConnectorService = {
// Update a connector
async updateConnector(connectorId: number, data: CreateConnectorRequest): Promise<Connector> {
const response = await fetch(
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-source-connectors/${connectorId}`,
{
method: "PUT",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
},
headers: { "Content-Type": "application/json" },
body: JSON.stringify(data),
}
);
@ -114,14 +102,9 @@ export const ConnectorService = {
// Delete a connector
async deleteConnector(connectorId: number): Promise<void> {
const response = await fetch(
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-source-connectors/${connectorId}`,
{
method: "DELETE",
headers: {
Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
},
}
{ method: "DELETE" }
);
if (!response.ok) {

View file

@ -1,6 +1,7 @@
"use client";
import { useCallback, useState } from "react";
import { toast } from "sonner";
import { authenticatedFetch } from "@/lib/auth-utils";
export interface Chunk {
id: number;
@ -49,13 +50,10 @@ export function useDocumentByChunk() {
setError(null);
setDocument(null);
const response = await fetch(
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/documents/by-chunk/${chunkId}`,
{
headers: {
Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
"Content-Type": "application/json",
},
headers: { "Content-Type": "application/json" },
method: "GET",
}
);

View file

@ -1,4 +1,5 @@
import { useCallback, useEffect, useState } from "react";
import { authenticatedFetch } from "@/lib/auth-utils";
export interface DocumentTypeCount {
type: string;
@ -23,11 +24,6 @@ export const useDocumentTypes = (searchSpaceId?: number, lazy: boolean = false)
try {
setIsLoading(true);
setError(null);
const token = localStorage.getItem("surfsense_bearer_token");
if (!token) {
throw new Error("No authentication token found");
}
// Build URL with optional search_space_id query parameter
const url = new URL(
@ -37,12 +33,9 @@ export const useDocumentTypes = (searchSpaceId?: number, lazy: boolean = false)
url.searchParams.append("search_space_id", spaceId.toString());
}
const response = await fetch(url.toString(), {
const response = await authenticatedFetch(url.toString(), {
method: "GET",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${token}`,
},
headers: { "Content-Type": "application/json" },
});
if (!response.ok) {

View file

@ -1,6 +1,7 @@
"use client";
import { useCallback, useEffect, useState } from "react";
import { toast } from "sonner";
import { authenticatedFetch } from "@/lib/auth-utils";
import { normalizeListResponse } from "@/lib/pagination";
export interface Document {
@ -78,14 +79,9 @@ export function useDocuments(searchSpaceId: number, options?: UseDocumentsOption
params.append("document_types", effectiveDocumentTypes.join(","));
}
const response = await fetch(
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/documents?${params.toString()}`,
{
headers: {
Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
},
method: "GET",
}
{ method: "GET" }
);
if (!response.ok) {
@ -159,14 +155,9 @@ export function useDocuments(searchSpaceId: number, options?: UseDocumentsOption
params.append("document_types", effectiveDocumentTypes.join(","));
}
const response = await fetch(
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/documents/search?${params.toString()}`,
{
headers: {
Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
},
method: "GET",
}
{ method: "GET" }
);
if (!response.ok) {
@ -193,14 +184,9 @@ export function useDocuments(searchSpaceId: number, options?: UseDocumentsOption
const deleteDocument = useCallback(
async (documentId: number) => {
try {
const response = await fetch(
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/documents/${documentId}`,
{
headers: {
Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
},
method: "DELETE",
}
{ method: "DELETE" }
);
if (!response.ok) {
@ -228,14 +214,9 @@ export function useDocuments(searchSpaceId: number, options?: UseDocumentsOption
search_space_id: searchSpaceId.toString(),
});
const response = await fetch(
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/documents/type-counts?${params.toString()}`,
{
headers: {
Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
},
method: "GET",
}
{ method: "GET" }
);
if (!response.ok) {

View file

@ -1,6 +1,7 @@
"use client";
import { useEffect, useState } from "react";
import { toast } from "sonner";
import { authenticatedFetch } from "@/lib/auth-utils";
export interface LLMConfig {
id: number;
@ -61,14 +62,9 @@ export function useLLMConfigs(searchSpaceId: number | null) {
try {
setLoading(true);
const response = await fetch(
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/llm-configs?search_space_id=${searchSpaceId}`,
{
headers: {
Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
},
method: "GET",
}
{ method: "GET" }
);
if (!response.ok) {
@ -92,14 +88,11 @@ export function useLLMConfigs(searchSpaceId: number | null) {
const createLLMConfig = async (config: CreateLLMConfig): Promise<LLMConfig | null> => {
try {
const response = await fetch(
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/llm-configs`,
{
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
},
headers: { "Content-Type": "application/json" },
body: JSON.stringify(config),
}
);
@ -122,14 +115,9 @@ export function useLLMConfigs(searchSpaceId: number | null) {
const deleteLLMConfig = async (id: number): Promise<boolean> => {
try {
const response = await fetch(
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/llm-configs/${id}`,
{
method: "DELETE",
headers: {
Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
},
}
{ method: "DELETE" }
);
if (!response.ok) {
@ -151,14 +139,11 @@ export function useLLMConfigs(searchSpaceId: number | null) {
config: UpdateLLMConfig
): Promise<LLMConfig | null> => {
try {
const response = await fetch(
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/llm-configs/${id}`,
{
method: "PUT",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
},
headers: { "Content-Type": "application/json" },
body: JSON.stringify(config),
}
);
@ -203,14 +188,9 @@ export function useLLMPreferences(searchSpaceId: number | null) {
try {
setLoading(true);
const response = await fetch(
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-spaces/${searchSpaceId}/llm-preferences`,
{
headers: {
Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
},
method: "GET",
}
{ method: "GET" }
);
if (!response.ok) {
@ -239,14 +219,11 @@ export function useLLMPreferences(searchSpaceId: number | null) {
}
try {
const response = await fetch(
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-spaces/${searchSpaceId}/llm-preferences`,
{
method: "PUT",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
},
headers: { "Content-Type": "application/json" },
body: JSON.stringify(newPreferences),
}
);
@ -293,14 +270,9 @@ export function useGlobalLLMConfigs() {
const fetchGlobalConfigs = async () => {
try {
setLoading(true);
const response = await fetch(
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/global-llm-configs`,
{
headers: {
Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
},
method: "GET",
}
{ method: "GET" }
);
if (!response.ok) {

View file

@ -1,6 +1,7 @@
"use client";
import { useCallback, useEffect, useMemo, useState } from "react";
import { toast } from "sonner";
import { authenticatedFetch } from "@/lib/auth-utils";
export type LogLevel = "DEBUG" | "INFO" | "WARNING" | "ERROR" | "CRITICAL";
export type LogStatus = "IN_PROGRESS" | "SUCCESS" | "FAILED";
@ -95,14 +96,9 @@ export function useLogs(searchSpaceId?: number, filters: LogFilters = {}) {
if (options.skip !== undefined) params.append("skip", options.skip.toString());
if (options.limit !== undefined) params.append("limit", options.limit.toString());
const response = await fetch(
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/logs?${params}`,
{
headers: {
Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
},
method: "GET",
}
{ method: "GET" }
);
if (!response.ok) {
@ -147,14 +143,14 @@ export function useLogs(searchSpaceId?: number, filters: LogFilters = {}) {
// Function to create a new log
const createLog = useCallback(async (logData: Omit<Log, "id" | "created_at">) => {
try {
const response = await fetch(`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/logs`, {
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
},
method: "POST",
body: JSON.stringify(logData),
});
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/logs`,
{
headers: { "Content-Type": "application/json" },
method: "POST",
body: JSON.stringify(logData),
}
);
if (!response.ok) {
const errorData = await response.json().catch(() => ({}));
@ -179,13 +175,10 @@ export function useLogs(searchSpaceId?: number, filters: LogFilters = {}) {
updateData: Partial<Omit<Log, "id" | "created_at" | "search_space_id">>
) => {
try {
const response = await fetch(
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/logs/${logId}`,
{
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
},
headers: { "Content-Type": "application/json" },
method: "PUT",
body: JSON.stringify(updateData),
}
@ -212,14 +205,9 @@ export function useLogs(searchSpaceId?: number, filters: LogFilters = {}) {
// Function to delete a log
const deleteLog = useCallback(async (logId: number) => {
try {
const response = await fetch(
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/logs/${logId}`,
{
headers: {
Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
},
method: "DELETE",
}
{ method: "DELETE" }
);
if (!response.ok) {
@ -240,14 +228,9 @@ export function useLogs(searchSpaceId?: number, filters: LogFilters = {}) {
// Function to get a single log
const getLog = useCallback(async (logId: number) => {
try {
const response = await fetch(
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/logs/${logId}`,
{
headers: {
Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
},
method: "GET",
}
{ method: "GET" }
);
if (!response.ok) {
@ -287,14 +270,9 @@ export function useLogsSummary(searchSpaceId: number, hours: number = 24) {
try {
setLoading(true);
const response = await fetch(
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/logs/search-space/${searchSpaceId}/summary?hours=${hours}`,
{
headers: {
Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
},
method: "GET",
}
{ method: "GET" }
);
if (!response.ok) {

View file

@ -0,0 +1,687 @@
"use client";
import { useCallback, useEffect, useMemo, useState } from "react";
import { toast } from "sonner";
import { authenticatedFetch, getBearerToken, handleUnauthorized } from "@/lib/auth-utils";
// ============ Types ============
export interface Role {
id: number;
name: string;
description: string | null;
permissions: string[];
is_default: boolean;
is_system_role: boolean;
search_space_id: number;
created_at: string;
}
export interface Member {
id: number;
user_id: string;
search_space_id: number;
role_id: number | null;
is_owner: boolean;
joined_at: string;
created_at: string;
role: Role | null;
user_email: string | null;
}
export interface Invite {
id: number;
invite_code: string;
search_space_id: number;
role_id: number | null;
created_by_id: string | null;
expires_at: string | null;
max_uses: number | null;
uses_count: number;
is_active: boolean;
name: string | null;
created_at: string;
role: Role | null;
}
export interface InviteCreate {
name?: string;
role_id?: number;
expires_at?: string;
max_uses?: number;
}
export interface InviteUpdate {
name?: string;
role_id?: number;
expires_at?: string;
max_uses?: number;
is_active?: boolean;
}
export interface RoleCreate {
name: string;
description?: string;
permissions: string[];
is_default?: boolean;
}
export interface RoleUpdate {
name?: string;
description?: string;
permissions?: string[];
is_default?: boolean;
}
export interface PermissionInfo {
value: string;
name: string;
category: string;
}
export interface UserAccess {
search_space_id: number;
search_space_name: string;
is_owner: boolean;
role_name: string | null;
permissions: string[];
}
export interface InviteInfo {
search_space_name: string;
role_name: string | null;
is_valid: boolean;
message: string | null;
}
// ============ Members Hook ============
export function useMembers(searchSpaceId: number) {
const [members, setMembers] = useState<Member[]>([]);
const [loading, setLoading] = useState(true);
const [error, setError] = useState<string | null>(null);
const fetchMembers = useCallback(async () => {
if (!searchSpaceId) return;
try {
setLoading(true);
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/members`,
{ method: "GET" }
);
if (!response.ok) {
const errorData = await response.json().catch(() => ({}));
throw new Error(errorData.detail || "Failed to fetch members");
}
const data = await response.json();
setMembers(data);
setError(null);
return data;
} catch (err: any) {
setError(err.message || "Failed to fetch members");
console.error("Error fetching members:", err);
} finally {
setLoading(false);
}
}, [searchSpaceId]);
useEffect(() => {
fetchMembers();
}, [fetchMembers]);
const updateMemberRole = useCallback(
async (membershipId: number, roleId: number | null) => {
try {
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/members/${membershipId}`,
{
headers: { "Content-Type": "application/json" },
method: "PUT",
body: JSON.stringify({ role_id: roleId }),
}
);
if (!response.ok) {
const errorData = await response.json().catch(() => ({}));
throw new Error(errorData.detail || "Failed to update member role");
}
const updatedMember = await response.json();
setMembers((prev) => prev.map((m) => (m.id === membershipId ? updatedMember : m)));
toast.success("Member role updated successfully");
return updatedMember;
} catch (err: any) {
toast.error(err.message || "Failed to update member role");
throw err;
}
},
[searchSpaceId]
);
const removeMember = useCallback(
async (membershipId: number) => {
try {
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/members/${membershipId}`,
{ method: "DELETE" }
);
if (!response.ok) {
const errorData = await response.json().catch(() => ({}));
throw new Error(errorData.detail || "Failed to remove member");
}
setMembers((prev) => prev.filter((m) => m.id !== membershipId));
toast.success("Member removed successfully");
return true;
} catch (err: any) {
toast.error(err.message || "Failed to remove member");
return false;
}
},
[searchSpaceId]
);
const leaveSearchSpace = useCallback(async () => {
try {
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/members/me`,
{ method: "DELETE" }
);
if (!response.ok) {
const errorData = await response.json().catch(() => ({}));
throw new Error(errorData.detail || "Failed to leave search space");
}
toast.success("Successfully left the search space");
return true;
} catch (err: any) {
toast.error(err.message || "Failed to leave search space");
return false;
}
}, [searchSpaceId]);
return {
members,
loading,
error,
fetchMembers,
updateMemberRole,
removeMember,
leaveSearchSpace,
};
}
// ============ Roles Hook ============
export function useRoles(searchSpaceId: number) {
const [roles, setRoles] = useState<Role[]>([]);
const [loading, setLoading] = useState(true);
const [error, setError] = useState<string | null>(null);
const fetchRoles = useCallback(async () => {
if (!searchSpaceId) return;
try {
setLoading(true);
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/roles`,
{ method: "GET" }
);
if (!response.ok) {
const errorData = await response.json().catch(() => ({}));
throw new Error(errorData.detail || "Failed to fetch roles");
}
const data = await response.json();
setRoles(data);
setError(null);
return data;
} catch (err: any) {
setError(err.message || "Failed to fetch roles");
console.error("Error fetching roles:", err);
} finally {
setLoading(false);
}
}, [searchSpaceId]);
useEffect(() => {
fetchRoles();
}, [fetchRoles]);
const createRole = useCallback(
async (roleData: RoleCreate) => {
try {
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/roles`,
{
headers: { "Content-Type": "application/json" },
method: "POST",
body: JSON.stringify(roleData),
}
);
if (!response.ok) {
const errorData = await response.json().catch(() => ({}));
throw new Error(errorData.detail || "Failed to create role");
}
const newRole = await response.json();
setRoles((prev) => [...prev, newRole]);
toast.success("Role created successfully");
return newRole;
} catch (err: any) {
toast.error(err.message || "Failed to create role");
throw err;
}
},
[searchSpaceId]
);
const updateRole = useCallback(
async (roleId: number, roleData: RoleUpdate) => {
try {
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/roles/${roleId}`,
{
headers: { "Content-Type": "application/json" },
method: "PUT",
body: JSON.stringify(roleData),
}
);
if (!response.ok) {
const errorData = await response.json().catch(() => ({}));
throw new Error(errorData.detail || "Failed to update role");
}
const updatedRole = await response.json();
setRoles((prev) => prev.map((r) => (r.id === roleId ? updatedRole : r)));
toast.success("Role updated successfully");
return updatedRole;
} catch (err: any) {
toast.error(err.message || "Failed to update role");
throw err;
}
},
[searchSpaceId]
);
const deleteRole = useCallback(
async (roleId: number) => {
try {
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/roles/${roleId}`,
{ method: "DELETE" }
);
if (!response.ok) {
const errorData = await response.json().catch(() => ({}));
throw new Error(errorData.detail || "Failed to delete role");
}
setRoles((prev) => prev.filter((r) => r.id !== roleId));
toast.success("Role deleted successfully");
return true;
} catch (err: any) {
toast.error(err.message || "Failed to delete role");
return false;
}
},
[searchSpaceId]
);
return {
roles,
loading,
error,
fetchRoles,
createRole,
updateRole,
deleteRole,
};
}
// ============ Invites Hook ============
export function useInvites(searchSpaceId: number) {
const [invites, setInvites] = useState<Invite[]>([]);
const [loading, setLoading] = useState(true);
const [error, setError] = useState<string | null>(null);
const fetchInvites = useCallback(async () => {
if (!searchSpaceId) return;
try {
setLoading(true);
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/invites`,
{ method: "GET" }
);
if (!response.ok) {
const errorData = await response.json().catch(() => ({}));
throw new Error(errorData.detail || "Failed to fetch invites");
}
const data = await response.json();
setInvites(data);
setError(null);
return data;
} catch (err: any) {
setError(err.message || "Failed to fetch invites");
console.error("Error fetching invites:", err);
} finally {
setLoading(false);
}
}, [searchSpaceId]);
useEffect(() => {
fetchInvites();
}, [fetchInvites]);
const createInvite = useCallback(
async (inviteData: InviteCreate) => {
try {
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/invites`,
{
headers: { "Content-Type": "application/json" },
method: "POST",
body: JSON.stringify(inviteData),
}
);
if (!response.ok) {
const errorData = await response.json().catch(() => ({}));
throw new Error(errorData.detail || "Failed to create invite");
}
const newInvite = await response.json();
setInvites((prev) => [...prev, newInvite]);
toast.success("Invite created successfully");
return newInvite;
} catch (err: any) {
toast.error(err.message || "Failed to create invite");
throw err;
}
},
[searchSpaceId]
);
const updateInvite = useCallback(
async (inviteId: number, inviteData: InviteUpdate) => {
try {
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/invites/${inviteId}`,
{
headers: { "Content-Type": "application/json" },
method: "PUT",
body: JSON.stringify(inviteData),
}
);
if (!response.ok) {
const errorData = await response.json().catch(() => ({}));
throw new Error(errorData.detail || "Failed to update invite");
}
const updatedInvite = await response.json();
setInvites((prev) => prev.map((i) => (i.id === inviteId ? updatedInvite : i)));
toast.success("Invite updated successfully");
return updatedInvite;
} catch (err: any) {
toast.error(err.message || "Failed to update invite");
throw err;
}
},
[searchSpaceId]
);
const revokeInvite = useCallback(
async (inviteId: number) => {
try {
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/invites/${inviteId}`,
{ method: "DELETE" }
);
if (!response.ok) {
const errorData = await response.json().catch(() => ({}));
throw new Error(errorData.detail || "Failed to revoke invite");
}
setInvites((prev) => prev.filter((i) => i.id !== inviteId));
toast.success("Invite revoked successfully");
return true;
} catch (err: any) {
toast.error(err.message || "Failed to revoke invite");
return false;
}
},
[searchSpaceId]
);
return {
invites,
loading,
error,
fetchInvites,
createInvite,
updateInvite,
revokeInvite,
};
}
// ============ Permissions Hook ============
export function usePermissions() {
const [permissions, setPermissions] = useState<PermissionInfo[]>([]);
const [loading, setLoading] = useState(true);
const [error, setError] = useState<string | null>(null);
const fetchPermissions = useCallback(async () => {
try {
setLoading(true);
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/permissions`,
{ method: "GET" }
);
if (!response.ok) {
const errorData = await response.json().catch(() => ({}));
throw new Error(errorData.detail || "Failed to fetch permissions");
}
const data = await response.json();
setPermissions(data.permissions);
setError(null);
return data.permissions;
} catch (err: any) {
setError(err.message || "Failed to fetch permissions");
console.error("Error fetching permissions:", err);
} finally {
setLoading(false);
}
}, []);
useEffect(() => {
fetchPermissions();
}, [fetchPermissions]);
// Group permissions by category
const groupedPermissions = useMemo(() => {
const groups: Record<string, PermissionInfo[]> = {};
for (const perm of permissions) {
if (!groups[perm.category]) {
groups[perm.category] = [];
}
groups[perm.category].push(perm);
}
return groups;
}, [permissions]);
return {
permissions,
groupedPermissions,
loading,
error,
fetchPermissions,
};
}
// ============ User Access Hook ============
export function useUserAccess(searchSpaceId: number) {
const [access, setAccess] = useState<UserAccess | null>(null);
const [loading, setLoading] = useState(true);
const [error, setError] = useState<string | null>(null);
const fetchAccess = useCallback(async () => {
if (!searchSpaceId) return;
try {
setLoading(true);
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/my-access`,
{ method: "GET" }
);
if (!response.ok) {
const errorData = await response.json().catch(() => ({}));
throw new Error(errorData.detail || "Failed to fetch access info");
}
const data = await response.json();
setAccess(data);
setError(null);
return data;
} catch (err: any) {
setError(err.message || "Failed to fetch access info");
console.error("Error fetching access:", err);
} finally {
setLoading(false);
}
}, [searchSpaceId]);
useEffect(() => {
fetchAccess();
}, [fetchAccess]);
// Helper function to check if user has a specific permission
const hasPermission = useCallback(
(permission: string) => {
if (!access) return false;
// Owner/full access check
if (access.permissions.includes("*")) return true;
return access.permissions.includes(permission);
},
[access]
);
// Helper function to check if user has any of the given permissions
const hasAnyPermission = useCallback(
(permissions: string[]) => {
if (!access) return false;
if (access.permissions.includes("*")) return true;
return permissions.some((p) => access.permissions.includes(p));
},
[access]
);
return {
access,
loading,
error,
fetchAccess,
hasPermission,
hasAnyPermission,
};
}
// ============ Invite Info Hook (Public) ============
export function useInviteInfo(inviteCode: string | null) {
const [inviteInfo, setInviteInfo] = useState<InviteInfo | null>(null);
const [loading, setLoading] = useState(true);
const [error, setError] = useState<string | null>(null);
const fetchInviteInfo = useCallback(async () => {
if (!inviteCode) {
setLoading(false);
return;
}
try {
setLoading(true);
const response = await fetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/invites/${inviteCode}/info`,
{
method: "GET",
}
);
if (!response.ok) {
const errorData = await response.json().catch(() => ({}));
throw new Error(errorData.detail || "Failed to fetch invite info");
}
const data = await response.json();
setInviteInfo(data);
setError(null);
return data;
} catch (err: any) {
setError(err.message || "Failed to fetch invite info");
console.error("Error fetching invite info:", err);
} finally {
setLoading(false);
}
}, [inviteCode]);
useEffect(() => {
fetchInviteInfo();
}, [fetchInviteInfo]);
const acceptInvite = useCallback(async () => {
if (!inviteCode) {
toast.error("No invite code provided");
return null;
}
try {
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/invites/accept`,
{
headers: { "Content-Type": "application/json" },
method: "POST",
body: JSON.stringify({ invite_code: inviteCode }),
}
);
if (!response.ok) {
const errorData = await response.json().catch(() => ({}));
throw new Error(errorData.detail || "Failed to accept invite");
}
const data = await response.json();
toast.success(data.message || "Successfully joined the search space");
return data;
} catch (err: any) {
toast.error(err.message || "Failed to accept invite");
throw err;
}
}, [inviteCode]);
return {
inviteInfo,
loading,
error,
fetchInviteInfo,
acceptInvite,
};
}

View file

@ -1,4 +1,5 @@
import { useCallback, useEffect, useState } from "react";
import { authenticatedFetch, getBearerToken, handleUnauthorized } from "@/lib/auth-utils";
export interface SearchSourceConnector {
id: number;
@ -66,11 +67,6 @@ export const useSearchSourceConnectors = (lazy: boolean = false, searchSpaceId?:
try {
setIsLoading(true);
setError(null);
const token = localStorage.getItem("surfsense_bearer_token");
if (!token) {
throw new Error("No authentication token found");
}
// Build URL with optional search_space_id query parameter
const url = new URL(
@ -80,12 +76,9 @@ export const useSearchSourceConnectors = (lazy: boolean = false, searchSpaceId?:
url.searchParams.append("search_space_id", spaceId.toString());
}
const response = await fetch(url.toString(), {
const response = await authenticatedFetch(url.toString(), {
method: "GET",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${token}`,
},
headers: { "Content-Type": "application/json" },
});
if (!response.ok) {
@ -176,24 +169,15 @@ export const useSearchSourceConnectors = (lazy: boolean = false, searchSpaceId?:
spaceId: number
) => {
try {
const token = localStorage.getItem("surfsense_bearer_token");
if (!token) {
throw new Error("No authentication token found");
}
// Add search_space_id as a query parameter
const url = new URL(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-source-connectors`
);
url.searchParams.append("search_space_id", spaceId.toString());
const response = await fetch(url.toString(), {
const response = await authenticatedFetch(url.toString(), {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${token}`,
},
headers: { "Content-Type": "application/json" },
body: JSON.stringify(connectorData),
});
@ -222,20 +206,11 @@ export const useSearchSourceConnectors = (lazy: boolean = false, searchSpaceId?:
>
) => {
try {
const token = localStorage.getItem("surfsense_bearer_token");
if (!token) {
throw new Error("No authentication token found");
}
const response = await fetch(
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-source-connectors/${connectorId}`,
{
method: "PUT",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${token}`,
},
headers: { "Content-Type": "application/json" },
body: JSON.stringify(connectorData),
}
);
@ -262,20 +237,11 @@ export const useSearchSourceConnectors = (lazy: boolean = false, searchSpaceId?:
*/
const deleteConnector = async (connectorId: number) => {
try {
const token = localStorage.getItem("surfsense_bearer_token");
if (!token) {
throw new Error("No authentication token found");
}
const response = await fetch(
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-source-connectors/${connectorId}`,
{
method: "DELETE",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${token}`,
},
headers: { "Content-Type": "application/json" },
}
);
@ -302,12 +268,6 @@ export const useSearchSourceConnectors = (lazy: boolean = false, searchSpaceId?:
endDate?: string
) => {
try {
const token = localStorage.getItem("surfsense_bearer_token");
if (!token) {
throw new Error("No authentication token found");
}
// Build query parameters
const params = new URLSearchParams({
search_space_id: searchSpaceId.toString(),
@ -319,16 +279,13 @@ export const useSearchSourceConnectors = (lazy: boolean = false, searchSpaceId?:
params.append("end_date", endDate);
}
const response = await fetch(
const response = await authenticatedFetch(
`${
process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL
}/api/v1/search-source-connectors/${connectorId}/index?${params.toString()}`,
{
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${token}`,
},
headers: { "Content-Type": "application/json" },
}
);

Some files were not shown because too many files have changed in this diff Show more