From e68286f22ea1a57652626db3bb59134c10e6d2eb Mon Sep 17 00:00:00 2001
From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com>
Date: Sun, 23 Nov 2025 15:23:31 +0530
Subject: [PATCH 01/36] introduced blocknote editor
---
.../38_add_blocknote_fields_to_documents.py | 41 +
surfsense_backend/app/db.py | 13 +-
surfsense_backend/app/routes/__init__.py | 2 +
surfsense_backend/app/routes/editor_routes.py | 161 ++
.../extension_processor.py | 12 +
.../document_processors/file_processors.py | 30 +
.../document_processors/markdown_processor.py | 10 +
.../tasks/document_processors/url_crawler.py | 12 +
.../document_processors/youtube_processor.py | 12 +
.../app/utils/blocknote_converter.py | 113 ++
.../app/api/convert-to-blocknote/route.ts | 43 +
.../app/api/convert-to-markdown/route.ts | 31 +
.../components/DocumentsTableShell.tsx | 2 +
.../(manage)/components/RowActions.tsx | 17 +-
.../editor/[documentId]/page.tsx | 209 +++
surfsense_web/components/BlockNoteEditor.tsx | 53 +
.../components/DynamicBlockNoteEditor.tsx | 9 +
.../components/dashboard-breadcrumb.tsx | 62 +-
surfsense_web/messages/en.json | 1 +
surfsense_web/messages/zh.json | 1 +
surfsense_web/next.config.ts | 18 +
surfsense_web/package.json | 4 +
surfsense_web/pnpm-lock.yaml | 1316 ++++++++++++++++-
23 files changed, 2158 insertions(+), 14 deletions(-)
create mode 100644 surfsense_backend/alembic/versions/38_add_blocknote_fields_to_documents.py
create mode 100644 surfsense_backend/app/routes/editor_routes.py
create mode 100644 surfsense_backend/app/utils/blocknote_converter.py
create mode 100644 surfsense_web/app/api/convert-to-blocknote/route.ts
create mode 100644 surfsense_web/app/api/convert-to-markdown/route.ts
create mode 100644 surfsense_web/app/dashboard/[search_space_id]/editor/[documentId]/page.tsx
create mode 100644 surfsense_web/components/BlockNoteEditor.tsx
create mode 100644 surfsense_web/components/DynamicBlockNoteEditor.tsx
diff --git a/surfsense_backend/alembic/versions/38_add_blocknote_fields_to_documents.py b/surfsense_backend/alembic/versions/38_add_blocknote_fields_to_documents.py
new file mode 100644
index 000000000..d882f020b
--- /dev/null
+++ b/surfsense_backend/alembic/versions/38_add_blocknote_fields_to_documents.py
@@ -0,0 +1,41 @@
+"""38_add_blocknote_fields_to_documents
+
+Revision ID: 38
+Revises: 37
+
+"""
+from collections.abc import Sequence
+
+import sqlalchemy as sa
+from sqlalchemy.dialects import postgresql
+
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision: str = '38'
+down_revision: str | None = '37'
+branch_labels: str | Sequence[str] | None = None
+depends_on: str | Sequence[str] | None = None
+
+
+def upgrade() -> None:
+ """Upgrade schema - Add BlockNote fields only."""
+
+ op.add_column(
+ 'documents',
+ sa.Column('blocknote_document', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
+ )
+ op.add_column(
+ 'documents',
+ sa.Column('content_needs_reindexing', sa.Boolean(), nullable=False, server_default=sa.false()),
+ )
+ op.add_column(
+ 'documents',
+ sa.Column('last_edited_at', sa.TIMESTAMP(timezone=True), nullable=True)
+ )
+
+def downgrade() -> None:
+ """Downgrade schema - Remove BlockNote fields."""
+ op.drop_column('documents', 'last_edited_at')
+ op.drop_column('documents', 'content_needs_reindexing')
+ op.drop_column('documents', 'blocknote_document')
\ No newline at end of file
diff --git a/surfsense_backend/app/db.py b/surfsense_backend/app/db.py
index 4ad31b508..38505e931 100644
--- a/surfsense_backend/app/db.py
+++ b/surfsense_backend/app/db.py
@@ -20,7 +20,7 @@ from sqlalchemy import (
UniqueConstraint,
text,
)
-from sqlalchemy.dialects.postgresql import UUID
+from sqlalchemy.dialects.postgresql import JSONB, UUID
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
from sqlalchemy.orm import DeclarativeBase, Mapped, declared_attr, relationship
@@ -178,6 +178,17 @@ class Document(BaseModel, TimestampMixin):
content_hash = Column(String, nullable=False, index=True, unique=True)
unique_identifier_hash = Column(String, nullable=True, index=True, unique=True)
embedding = Column(Vector(config.embedding_model_instance.dimension))
+
+ # BlockNote live editing state (NULL when never edited)
+ blocknote_document = Column(JSONB, nullable=True)
+
+ # blocknote background reindex flag
+ content_needs_reindexing = Column(
+ Boolean, nullable=False, default=False, server_default=text("false")
+ )
+
+ # Track when blocknote document was last edited
+ last_edited_at = Column(TIMESTAMP(timezone=True), nullable=True)
search_space_id = Column(
Integer, ForeignKey("searchspaces.id", ondelete="CASCADE"), nullable=False
diff --git a/surfsense_backend/app/routes/__init__.py b/surfsense_backend/app/routes/__init__.py
index 1c7e3505f..3edcf8cf8 100644
--- a/surfsense_backend/app/routes/__init__.py
+++ b/surfsense_backend/app/routes/__init__.py
@@ -5,6 +5,7 @@ from .airtable_add_connector_route import (
)
from .chats_routes import router as chats_router
from .documents_routes import router as documents_router
+from .editor_routes import router as editor_router
from .google_calendar_add_connector_route import (
router as google_calendar_add_connector_router,
)
@@ -21,6 +22,7 @@ from .search_spaces_routes import router as search_spaces_router
router = APIRouter()
router.include_router(search_spaces_router)
+router.include_router(editor_router)
router.include_router(documents_router)
router.include_router(podcasts_router)
router.include_router(chats_router)
diff --git a/surfsense_backend/app/routes/editor_routes.py b/surfsense_backend/app/routes/editor_routes.py
new file mode 100644
index 000000000..f248b37b1
--- /dev/null
+++ b/surfsense_backend/app/routes/editor_routes.py
@@ -0,0 +1,161 @@
+"""
+Editor routes for BlockNote document editing.
+"""
+from datetime import UTC, datetime
+from typing import Any
+
+from fastapi import APIRouter, Depends, HTTPException
+from sqlalchemy import select
+from sqlalchemy.ext.asyncio import AsyncSession
+
+from app.db import Document, SearchSpace, User, get_async_session
+from app.users import current_active_user
+from app.utils.blocknote_converter import (
+ convert_blocknote_to_markdown,
+ convert_markdown_to_blocknote,
+)
+
+router = APIRouter()
+
+
+@router.get("/documents/{document_id}/editor-content")
+async def get_editor_content(
+ document_id: int,
+ session: AsyncSession = Depends(get_async_session),
+ user: User = Depends(current_active_user),
+):
+ """
+ Get document content for editing.
+
+ Returns BlockNote JSON document. If blocknote_document is NULL,
+ attempts to convert from `content` - though this won't work well
+ for old documents that only have summaries.
+ """
+ result = await session.execute(
+ select(Document)
+ .join(SearchSpace)
+ .filter(Document.id == document_id, SearchSpace.user_id == user.id)
+ )
+ document = result.scalars().first() # ✅ Changed from scalar_one_or_none()
+
+ if not document:
+ raise HTTPException(status_code=404, detail="Document not found")
+
+ # If blocknote_document exists, return it
+ if document.blocknote_document:
+ return {
+ "document_id": document.id,
+ "title": document.title,
+ "blocknote_document": document.blocknote_document,
+ "last_edited_at": document.last_edited_at.isoformat() if document.last_edited_at else None,
+ }
+
+ # For old documents without blocknote_document, return error
+ # (Can't convert summary back to full document)
+ raise HTTPException(
+ status_code=400,
+ detail="This document was uploaded before editing was enabled. Please re-upload to enable editing."
+ )
+
+
+@router.put("/documents/{document_id}/blocknote-content")
+async def update_blocknote_content(
+ document_id: int,
+ data: dict[str, Any],
+ session: AsyncSession = Depends(get_async_session),
+ user: User = Depends(current_active_user),
+):
+ """
+ Auto-save BlockNote document during editing.
+ Only updates blocknote_document field, not content.
+ """
+ result = await session.execute(
+ select(Document)
+ .join(SearchSpace)
+ .filter(Document.id == document_id, SearchSpace.user_id == user.id)
+ )
+ document = result.scalars().first() # ✅ Changed from scalar_one_or_none()
+
+ if not document:
+ raise HTTPException(status_code=404, detail="Document not found")
+
+ blocknote_document = data.get("blocknote_document")
+ if not blocknote_document:
+ raise HTTPException(status_code=400, detail="blocknote_document is required")
+
+ # Update only blocknote_document and last_edited_at
+ document.blocknote_document = blocknote_document
+ document.last_edited_at = datetime.now(UTC)
+
+ await session.commit()
+ await session.refresh(document)
+
+ return {"status": "saved", "last_edited_at": document.last_edited_at.isoformat()}
+
+
+# did not implement reindexing (for now)
+# @router.post("/documents/{document_id}/finalize-edit")
+# async def finalize_edit(
+# document_id: int,
+# session: AsyncSession = Depends(get_async_session),
+# user: User = Depends(current_active_user),
+# ):
+# """
+# Finalize document editing: convert BlockNote to markdown,
+# update content (summary), and trigger reindexing.
+# """
+# result = await session.execute(
+# select(Document)
+# .join(SearchSpace)
+# .filter(Document.id == document_id, SearchSpace.user_id == user.id)
+# )
+# document = result.scalars().first()
+
+# if not document:
+# raise HTTPException(status_code=404, detail="Document not found")
+
+# if not document.blocknote_document:
+# raise HTTPException(
+# status_code=400,
+# detail="Document has no BlockNote content to finalize"
+# )
+
+# # 1. Convert BlockNote JSON → Markdown
+# full_markdown = await convert_blocknote_to_markdown(document.blocknote_document)
+
+# if not full_markdown:
+# raise HTTPException(
+# status_code=500,
+# detail="Failed to convert BlockNote document to markdown"
+# )
+
+# # 2. Generate new summary from full markdown
+# from app.services.llm_service import get_user_long_context_llm
+# from app.utils.document_converters import generate_document_summary
+
+# user_llm = await get_user_long_context_llm(session, str(user.id), document.search_space_id)
+# if not user_llm:
+# raise HTTPException(
+# status_code=500,
+# detail="No LLM configured for summary generation"
+# )
+
+# document_metadata = document.document_metadata or {}
+# summary_content, summary_embedding = await generate_document_summary(
+# full_markdown, user_llm, document_metadata
+# )
+
+# # 3. Update document fields
+# document.content = summary_content
+# document.embedding = summary_embedding
+# document.content_needs_reindexing = True # Trigger chunk regeneration
+# document.last_edited_at = datetime.now(UTC)
+
+# await session.commit()
+
+# return {
+# "status": "finalized",
+# "message": "Document saved. Summary and chunks will be regenerated in the background.",
+# "content_needs_reindexing": True,
+# }
+
\ No newline at end of file
diff --git a/surfsense_backend/app/tasks/document_processors/extension_processor.py b/surfsense_backend/app/tasks/document_processors/extension_processor.py
index 663093375..640775372 100644
--- a/surfsense_backend/app/tasks/document_processors/extension_processor.py
+++ b/surfsense_backend/app/tasks/document_processors/extension_processor.py
@@ -144,6 +144,16 @@ async def add_extension_received_document(
# Process chunks
chunks = await create_document_chunks(content.pageContent)
+
+ from app.utils.blocknote_converter import convert_markdown_to_blocknote
+
+ # Convert markdown to BlockNote JSON
+ blocknote_json = await convert_markdown_to_blocknote(combined_document_string)
+ if not blocknote_json:
+ logging.warning(
+ f"Failed to convert extension document '{content.metadata.VisitedWebPageTitle}' "
+ f"to BlockNote JSON, document will not be editable"
+ )
# Update or create document
if existing_document:
@@ -154,6 +164,7 @@ async def add_extension_received_document(
existing_document.embedding = summary_embedding
existing_document.document_metadata = content.metadata.model_dump()
existing_document.chunks = chunks
+ existing_document.blocknote_document = blocknote_json
await session.commit()
await session.refresh(existing_document)
@@ -170,6 +181,7 @@ async def add_extension_received_document(
chunks=chunks,
content_hash=content_hash,
unique_identifier_hash=unique_identifier_hash,
+ blocknote_document=blocknote_json,
)
session.add(document)
diff --git a/surfsense_backend/app/tasks/document_processors/file_processors.py b/surfsense_backend/app/tasks/document_processors/file_processors.py
index 859f6a25d..c4b75b0bd 100644
--- a/surfsense_backend/app/tasks/document_processors/file_processors.py
+++ b/surfsense_backend/app/tasks/document_processors/file_processors.py
@@ -99,6 +99,14 @@ async def add_received_file_document_using_unstructured(
# Process chunks
chunks = await create_document_chunks(file_in_markdown)
+
+ from app.utils.blocknote_converter import convert_markdown_to_blocknote
+
+ # Convert markdown to BlockNote JSON
+ blocknote_json = await convert_markdown_to_blocknote(file_in_markdown)
+ if not blocknote_json:
+ logging.warning(f"Failed to convert {file_name} to BlockNote JSON, document will not be editable")
+
# Update or create document
if existing_document:
@@ -112,6 +120,7 @@ async def add_received_file_document_using_unstructured(
"ETL_SERVICE": "UNSTRUCTURED",
}
existing_document.chunks = chunks
+ existing_document.blocknote_document = blocknote_json
await session.commit()
await session.refresh(existing_document)
@@ -131,6 +140,7 @@ async def add_received_file_document_using_unstructured(
chunks=chunks,
content_hash=content_hash,
unique_identifier_hash=unique_identifier_hash,
+ blocknote_document=blocknote_json,
)
session.add(document)
@@ -213,6 +223,14 @@ async def add_received_file_document_using_llamacloud(
# Process chunks
chunks = await create_document_chunks(file_in_markdown)
+
+ from app.utils.blocknote_converter import convert_markdown_to_blocknote
+
+ # Convert markdown to BlockNote JSON
+ blocknote_json = await convert_markdown_to_blocknote(file_in_markdown)
+ if not blocknote_json:
+ logging.warning(f"Failed to convert {file_name} to BlockNote JSON, document will not be editable")
+
# Update or create document
if existing_document:
@@ -226,6 +244,7 @@ async def add_received_file_document_using_llamacloud(
"ETL_SERVICE": "LLAMACLOUD",
}
existing_document.chunks = chunks
+ existing_document.blocknote_document = blocknote_json
await session.commit()
await session.refresh(existing_document)
@@ -245,6 +264,7 @@ async def add_received_file_document_using_llamacloud(
chunks=chunks,
content_hash=content_hash,
unique_identifier_hash=unique_identifier_hash,
+ blocknote_document=blocknote_json,
)
session.add(document)
@@ -352,6 +372,14 @@ async def add_received_file_document_using_docling(
# Process chunks
chunks = await create_document_chunks(file_in_markdown)
+
+ from app.utils.blocknote_converter import convert_markdown_to_blocknote
+
+ # Convert markdown to BlockNote JSON
+ blocknote_json = await convert_markdown_to_blocknote(file_in_markdown)
+ if not blocknote_json:
+ logging.warning(f"Failed to convert {file_name} to BlockNote JSON, document will not be editable")
+
# Update or create document
if existing_document:
@@ -365,6 +393,7 @@ async def add_received_file_document_using_docling(
"ETL_SERVICE": "DOCLING",
}
existing_document.chunks = chunks
+ existing_document.blocknote_document = blocknote_json
await session.commit()
await session.refresh(existing_document)
@@ -384,6 +413,7 @@ async def add_received_file_document_using_docling(
chunks=chunks,
content_hash=content_hash,
unique_identifier_hash=unique_identifier_hash,
+ blocknote_document=blocknote_json,
)
session.add(document)
diff --git a/surfsense_backend/app/tasks/document_processors/markdown_processor.py b/surfsense_backend/app/tasks/document_processors/markdown_processor.py
index 76215ed51..b63d2bdf2 100644
--- a/surfsense_backend/app/tasks/document_processors/markdown_processor.py
+++ b/surfsense_backend/app/tasks/document_processors/markdown_processor.py
@@ -109,6 +109,14 @@ async def add_received_markdown_file_document(
# Process chunks
chunks = await create_document_chunks(file_in_markdown)
+
+ from app.utils.blocknote_converter import convert_markdown_to_blocknote
+
+ # Convert to BlockNote JSON
+ blocknote_json = await convert_markdown_to_blocknote(file_in_markdown)
+ if not blocknote_json:
+ logging.warning(f"Failed to convert {file_name} to BlockNote JSON, document will not be editable")
+
# Update or create document
if existing_document:
@@ -121,6 +129,7 @@ async def add_received_markdown_file_document(
"FILE_NAME": file_name,
}
existing_document.chunks = chunks
+ existing_document.blocknote_document = blocknote_json
await session.commit()
await session.refresh(existing_document)
@@ -139,6 +148,7 @@ async def add_received_markdown_file_document(
chunks=chunks,
content_hash=content_hash,
unique_identifier_hash=unique_identifier_hash,
+ blocknote_document=blocknote_json,
)
session.add(document)
diff --git a/surfsense_backend/app/tasks/document_processors/url_crawler.py b/surfsense_backend/app/tasks/document_processors/url_crawler.py
index ce2764897..d392031ea 100644
--- a/surfsense_backend/app/tasks/document_processors/url_crawler.py
+++ b/surfsense_backend/app/tasks/document_processors/url_crawler.py
@@ -247,6 +247,16 @@ async def add_crawled_url_document(
f"Processing content chunks for URL: {url}",
{"stage": "chunk_processing"},
)
+
+ from app.utils.blocknote_converter import convert_markdown_to_blocknote
+
+ # Convert markdown to BlockNote JSON
+ blocknote_json = await convert_markdown_to_blocknote(combined_document_string)
+ if not blocknote_json:
+ logging.warning(
+ f"Failed to convert crawled URL '{url}' to BlockNote JSON, "
+ "document will not be editable"
+ )
chunks = await create_document_chunks(content_in_markdown)
@@ -267,6 +277,7 @@ async def add_crawled_url_document(
existing_document.embedding = summary_embedding
existing_document.document_metadata = url_crawled[0].metadata
existing_document.chunks = chunks
+ existing_document.blocknote_document = blocknote_json
document = existing_document
else:
@@ -289,6 +300,7 @@ async def add_crawled_url_document(
chunks=chunks,
content_hash=content_hash,
unique_identifier_hash=unique_identifier_hash,
+ blocknote_document=blocknote_json,
)
session.add(document)
diff --git a/surfsense_backend/app/tasks/document_processors/youtube_processor.py b/surfsense_backend/app/tasks/document_processors/youtube_processor.py
index c7d396974..e479ec773 100644
--- a/surfsense_backend/app/tasks/document_processors/youtube_processor.py
+++ b/surfsense_backend/app/tasks/document_processors/youtube_processor.py
@@ -290,6 +290,16 @@ async def add_youtube_video_document(
f"Processing content chunks for video: {video_data.get('title', 'YouTube Video')}",
{"stage": "chunk_processing"},
)
+
+ from app.utils.blocknote_converter import convert_markdown_to_blocknote
+
+ # Convert transcript to BlockNote JSON
+ blocknote_json = await convert_markdown_to_blocknote(combined_document_string)
+ if not blocknote_json:
+ logging.warning(
+ f"Failed to convert YouTube video '{video_id}' to BlockNote JSON, "
+ "document will not be editable"
+ )
chunks = await create_document_chunks(combined_document_string)
@@ -314,6 +324,7 @@ async def add_youtube_video_document(
"thumbnail": video_data.get("thumbnail_url", ""),
}
existing_document.chunks = chunks
+ existing_document.blocknote_document = blocknote_json
await session.commit()
await session.refresh(existing_document)
@@ -342,6 +353,7 @@ async def add_youtube_video_document(
search_space_id=search_space_id,
content_hash=content_hash,
unique_identifier_hash=unique_identifier_hash,
+ blocknote_document=blocknote_json,
)
session.add(document)
diff --git a/surfsense_backend/app/utils/blocknote_converter.py b/surfsense_backend/app/utils/blocknote_converter.py
new file mode 100644
index 000000000..ce5cbf64b
--- /dev/null
+++ b/surfsense_backend/app/utils/blocknote_converter.py
@@ -0,0 +1,113 @@
+import logging
+from typing import Any
+
+import httpx
+
+from app.config import config
+
+logger = logging.getLogger(__name__)
+
+
+async def convert_markdown_to_blocknote(markdown: str) -> dict[str, Any] | None:
+ """
+ Convert markdown to BlockNote JSON via Next.js API.
+
+ Args:
+ markdown: Markdown string to convert
+
+ Returns:
+ BlockNote document as dict, or None if conversion fails
+ """
+ if not markdown or not markdown.strip():
+ logger.warning("Empty markdown provided for conversion")
+ return None
+
+ if not markdown or len(markdown) < 10:
+ logger.warning("Markdown became too short after sanitization")
+ # Return a minimal BlockNote document
+ return [
+ {
+ "type": "paragraph",
+ "content": [
+ {
+ "type": "text",
+ "text": "Document content could not be converted for editing.",
+ "styles": {}
+ }
+ ],
+ "children": []
+ }
+ ]
+
+ async with httpx.AsyncClient() as client:
+ try:
+ response = await client.post(
+ f"{config.NEXT_FRONTEND_URL}/api/convert-to-blocknote",
+ json={"markdown": markdown},
+ timeout=30.0,
+ )
+ response.raise_for_status()
+ data = response.json()
+ blocknote_document = data.get("blocknote_document")
+
+ if blocknote_document:
+ logger.info(f"Successfully converted markdown to BlockNote (original: {len(markdown)} chars, sanitized: {len(markdown)} chars)")
+ return blocknote_document
+ else:
+ logger.warning("Next.js API returned empty blocknote_document")
+ return None
+
+ except httpx.TimeoutException:
+ logger.error("Timeout converting markdown to BlockNote after 30s")
+ return None
+ except httpx.HTTPStatusError as e:
+ logger.error(f"HTTP error converting markdown to BlockNote: {e.response.status_code} - {e.response.text}")
+ # Log first 1000 chars of problematic markdown for debugging
+ logger.debug(f"Problematic markdown sample: {markdown[:1000]}")
+ return None
+ except Exception as e:
+ logger.error(f"Failed to convert markdown to BlockNote: {e}", exc_info=True)
+ return None
+
+async def convert_blocknote_to_markdown(blocknote_document: dict[str, Any] | list[dict[str, Any]]) -> str | None:
+ """
+ Convert BlockNote JSON to markdown via Next.js API.
+
+ Args:
+ blocknote_document: BlockNote document as dict or list of blocks
+
+ Returns:
+ Markdown string, or None if conversion fails
+ """
+ if not blocknote_document:
+ logger.warning("Empty BlockNote document provided for conversion")
+ return None
+
+ async with httpx.AsyncClient() as client:
+ try:
+ response = await client.post(
+ f"{config.NEXT_FRONTEND_URL}/api/convert-to-markdown",
+ json={"blocknote_document": blocknote_document},
+ timeout=30.0,
+ )
+ response.raise_for_status()
+ data = response.json()
+ markdown = data.get("markdown")
+
+ if markdown:
+ logger.info(f"Successfully converted BlockNote to markdown ({len(markdown)} chars)")
+ return markdown
+ else:
+ logger.warning("Next.js API returned empty markdown")
+ return None
+
+ except httpx.TimeoutException:
+ logger.error("Timeout converting BlockNote to markdown after 30s")
+ return None
+ except httpx.HTTPStatusError as e:
+ logger.error(f"HTTP error converting BlockNote to markdown: {e.response.status_code} - {e.response.text}")
+ return None
+ except Exception as e:
+ logger.error(f"Failed to convert BlockNote to markdown: {e}", exc_info=True)
+ return None
+
\ No newline at end of file
diff --git a/surfsense_web/app/api/convert-to-blocknote/route.ts b/surfsense_web/app/api/convert-to-blocknote/route.ts
new file mode 100644
index 000000000..2cb15fb31
--- /dev/null
+++ b/surfsense_web/app/api/convert-to-blocknote/route.ts
@@ -0,0 +1,43 @@
+import { ServerBlockNoteEditor } from "@blocknote/server-util";
+import { type NextRequest, NextResponse } from "next/server";
+
+export async function POST(request: NextRequest) {
+ try {
+ const { markdown } = await request.json();
+
+ if (!markdown || typeof markdown !== "string") {
+ return NextResponse.json(
+ { error: "Markdown string is required" },
+ { status: 400 }
+ );
+ }
+
+ // Log raw markdown input before conversion
+ // console.log(`\n${"=".repeat(80)}`);
+ // console.log("RAW MARKDOWN INPUT (BEFORE CONVERSION):");
+ // console.log("=".repeat(80));
+ // console.log(markdown);
+ // console.log(`${"=".repeat(80)}\n`);
+
+ // Create server-side editor instance
+ const editor = ServerBlockNoteEditor.create();
+
+ // Convert markdown directly to BlockNote blocks
+ const blocks = await editor.tryParseMarkdownToBlocks(markdown);
+
+ if (!blocks || blocks.length === 0) {
+ throw new Error("Markdown parsing returned no blocks");
+ }
+
+ return NextResponse.json({ blocknote_document: blocks });
+ } catch (error: any) {
+ console.error("Failed to convert markdown to BlockNote:", error);
+ return NextResponse.json(
+ {
+ error: "Failed to convert markdown to BlockNote blocks",
+ details: error.message
+ },
+ { status: 500 }
+ );
+ }
+}
diff --git a/surfsense_web/app/api/convert-to-markdown/route.ts b/surfsense_web/app/api/convert-to-markdown/route.ts
new file mode 100644
index 000000000..76faf45f3
--- /dev/null
+++ b/surfsense_web/app/api/convert-to-markdown/route.ts
@@ -0,0 +1,31 @@
+import { ServerBlockNoteEditor } from "@blocknote/server-util";
+import { type NextRequest, NextResponse } from "next/server";
+
+export async function POST(request: NextRequest) {
+ try {
+ const { blocknote_document } = await request.json();
+
+ if (!blocknote_document || !Array.isArray(blocknote_document)) {
+ return NextResponse.json(
+ { error: "BlockNote document array is required" },
+ { status: 400 }
+ );
+ }
+
+ // Create server-side editor instance
+ const editor = ServerBlockNoteEditor.create();
+
+ // Convert BlockNote blocks to markdown
+ const markdown = await editor.blocksToMarkdownLossy(blocknote_document);
+
+ return NextResponse.json({
+ markdown
+ });
+ } catch (error) {
+ console.error("Failed to convert BlockNote to markdown:", error);
+ return NextResponse.json(
+ { error: "Failed to convert BlockNote blocks to markdown" },
+ { status: 500 }
+ );
+ }
+}
diff --git a/surfsense_web/app/dashboard/[search_space_id]/documents/(manage)/components/DocumentsTableShell.tsx b/surfsense_web/app/dashboard/[search_space_id]/documents/(manage)/components/DocumentsTableShell.tsx
index 20f2be15f..0483940e0 100644
--- a/surfsense_web/app/dashboard/[search_space_id]/documents/(manage)/components/DocumentsTableShell.tsx
+++ b/surfsense_web/app/dashboard/[search_space_id]/documents/(manage)/components/DocumentsTableShell.tsx
@@ -309,6 +309,7 @@ export function DocumentsTableShell({
refreshDocuments={async () => {
await onRefresh();
}}
+ searchSpaceId={searchSpaceId as string}
/>
@@ -340,6 +341,7 @@ export function DocumentsTableShell({
refreshDocuments={async () => {
await onRefresh();
}}
+ searchSpaceId={searchSpaceId as string}
/>
diff --git a/surfsense_web/app/dashboard/[search_space_id]/documents/(manage)/components/RowActions.tsx b/surfsense_web/app/dashboard/[search_space_id]/documents/(manage)/components/RowActions.tsx
index bd1e182d9..5967e62ca 100644
--- a/surfsense_web/app/dashboard/[search_space_id]/documents/(manage)/components/RowActions.tsx
+++ b/surfsense_web/app/dashboard/[search_space_id]/documents/(manage)/components/RowActions.tsx
@@ -1,6 +1,7 @@
"use client";
-import { MoreHorizontal } from "lucide-react";
+import { MoreHorizontal, Pencil, FileText, Trash2 } from "lucide-react";
+import { useRouter } from "next/navigation";
import { useState } from "react";
import { toast } from "sonner";
import { JsonMetadataViewer } from "@/components/json-metadata-viewer";
@@ -28,13 +29,16 @@ export function RowActions({
document,
deleteDocument,
refreshDocuments,
+ searchSpaceId,
}: {
document: Document;
deleteDocument: (id: number) => Promise
;
refreshDocuments: () => Promise;
+ searchSpaceId: string;
}) {
const [isOpen, setIsOpen] = useState(false);
const [isDeleting, setIsDeleting] = useState(false);
+ const router = useRouter();
const handleDelete = async () => {
setIsDeleting(true);
@@ -52,6 +56,10 @@ export function RowActions({
}
};
+ const handleEdit = () => {
+ router.push(`/dashboard/${searchSpaceId}/editor/${document.id}`);
+ };
+
return (
@@ -62,11 +70,17 @@ export function RowActions({
+
+
+ Edit Document
+
+
e.preventDefault()}>
+
View Metadata
}
@@ -81,6 +95,7 @@ export function RowActions({
setIsOpen(true);
}}
>
+
Delete
diff --git a/surfsense_web/app/dashboard/[search_space_id]/editor/[documentId]/page.tsx b/surfsense_web/app/dashboard/[search_space_id]/editor/[documentId]/page.tsx
new file mode 100644
index 000000000..fb5709608
--- /dev/null
+++ b/surfsense_web/app/dashboard/[search_space_id]/editor/[documentId]/page.tsx
@@ -0,0 +1,209 @@
+"use client";
+
+import { useParams, useRouter } from "next/navigation";
+import { useEffect, useState } from "react";
+import { BlockNoteEditor } from "@/components/DynamicBlockNoteEditor";
+
+interface EditorContent {
+ document_id: number;
+ title: string;
+ blocknote_document: any;
+ last_edited_at: string | null;
+}
+
+export default function EditorPage() {
+ const params = useParams();
+ const router = useRouter();
+ const documentId = params.documentId as string;
+
+ const [document, setDocument] = useState(null);
+ const [loading, setLoading] = useState(true);
+ const [saving, setSaving] = useState(false);
+ const [editorContent, setEditorContent] = useState(null);
+ const [error, setError] = useState(null);
+
+ // Get auth token
+ const token = typeof window !== "undefined"
+ ? localStorage.getItem("surfsense_bearer_token")
+ : null;
+
+ // Fetch document content - DIRECT CALL TO FASTAPI
+ useEffect(() => {
+ async function fetchDocument() {
+ if (!token) {
+ console.error("No auth token found");
+ setError("Please login to access the editor");
+ setLoading(false);
+ return;
+ }
+
+ try {
+ const response = await fetch(
+ `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/documents/${documentId}/editor-content`,
+ {
+ headers: {
+ Authorization: `Bearer ${token}`,
+ },
+ }
+ );
+
+ if (!response.ok) {
+ const errorData = await response.json().catch(() => ({ detail: "Failed to fetch document" }));
+ throw new Error(errorData.detail || "Failed to fetch document");
+ }
+
+ const data = await response.json();
+
+ // Check if blocknote_document exists
+ if (!data.blocknote_document) {
+ setError("This document does not have BlockNote content. Please re-upload the document to enable editing.");
+ setLoading(false);
+ return;
+ }
+
+ setDocument(data);
+ setEditorContent(data.blocknote_document);
+ setError(null);
+ } catch (error) {
+ console.error("Error fetching document:", error);
+ setError(error instanceof Error ? error.message : "Failed to fetch document. Please try again.");
+ } finally {
+ setLoading(false);
+ }
+ }
+
+ if (documentId && token) {
+ fetchDocument();
+ }
+ }, [documentId, token]);
+
+ // Auto-save every 30 seconds - DIRECT CALL TO FASTAPI
+ useEffect(() => {
+ if (!editorContent || !token) return;
+
+ const interval = setInterval(async () => {
+ try {
+ await fetch(
+ `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/documents/${documentId}/blocknote-content`,
+ {
+ method: "PUT",
+ headers: {
+ "Content-Type": "application/json",
+ Authorization: `Bearer ${token}`,
+ },
+ body: JSON.stringify({ blocknote_document: editorContent }),
+ }
+ );
+ console.log("Auto-saved");
+ } catch (error) {
+ console.error("Auto-save failed:", error);
+ }
+ }, 30000); // 30 seconds
+
+ return () => clearInterval(interval);
+ }, [editorContent, documentId, token]);
+
+ // Save and exit - DIRECT CALL TO FASTAPI
+ const handleSave = async () => {
+ if (!token) {
+ alert("Please login to save");
+ return;
+ }
+
+ if (!editorContent) {
+ alert("No content to save");
+ return;
+ }
+
+ setSaving(true);
+ try {
+ // Save blocknote_document to database (without finalizing/reindexing)
+ const response = await fetch(
+ `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/documents/${documentId}/blocknote-content`,
+ {
+ method: "PUT",
+ headers: {
+ "Content-Type": "application/json",
+ Authorization: `Bearer ${token}`,
+ },
+ body: JSON.stringify({ blocknote_document: editorContent }),
+ }
+ );
+
+ if (!response.ok) {
+ const errorData = await response.json().catch(() => ({ detail: "Failed to save document" }));
+ throw new Error(errorData.detail || "Failed to save document");
+ }
+
+ // Redirect back to documents list
+ router.push(`/dashboard/${params.search_space_id}/documents`);
+ } catch (error) {
+ console.error("Error saving document:", error);
+ alert(error instanceof Error ? error.message : "Failed to save document. Please try again.");
+ } finally {
+ setSaving(false);
+ }
+ };
+
+ if (loading) {
+ return Loading editor...
;
+ }
+
+ if (error) {
+ return (
+ //
+
+
+
Error
+
{error}
+
router.back()}
+ className="px-4 py-2 bg-red-600 text-white rounded hover:bg-red-700"
+ >
+ Go Back
+
+
+
+ );
+ }
+
+ if (!document) {
+ return
Document not found
;
+ }
+
+ return (
+ //
+
+ {/* Toolbar */}
+
+
{document.title}
+
+ router.back()}
+ className="px-4 py-2 border rounded"
+ >
+ Cancel
+
+
+ {saving ? "Saving..." : "Save & Exit"}
+
+
+
+
+ {/* Editor - Now using dynamic import */}
+
+
+
+
+ );
+}
diff --git a/surfsense_web/components/BlockNoteEditor.tsx b/surfsense_web/components/BlockNoteEditor.tsx
new file mode 100644
index 000000000..e43d70fc7
--- /dev/null
+++ b/surfsense_web/components/BlockNoteEditor.tsx
@@ -0,0 +1,53 @@
+"use client";
+
+import { useEffect, useRef } from "react";
+import "@blocknote/core/fonts/inter.css";
+import "@blocknote/mantine/style.css";
+import { useCreateBlockNote } from "@blocknote/react";
+import { BlockNoteView } from "@blocknote/mantine";
+
+interface BlockNoteEditorProps {
+ initialContent?: any;
+ onChange?: (content: any) => void;
+}
+
+export default function BlockNoteEditor({
+ initialContent,
+ onChange,
+}: BlockNoteEditorProps) {
+ // Track the initial content to prevent re-initialization
+ const initialContentRef = useRef
(null);
+ const isInitializedRef = useRef(false);
+
+ // Creates a new editor instance - only use initialContent on first render
+ const editor = useCreateBlockNote({
+ initialContent: initialContentRef.current === null ? (initialContent || undefined) : undefined,
+ });
+
+ // Store initial content on first render only
+ useEffect(() => {
+ if (initialContent && initialContentRef.current === null) {
+ initialContentRef.current = initialContent;
+ isInitializedRef.current = true;
+ }
+ }, [initialContent]);
+
+ // Call onChange when document changes (but don't update from props)
+ useEffect(() => {
+ if (!onChange || !editor || !isInitializedRef.current) return;
+
+ const handleChange = () => {
+ onChange(editor.document);
+ };
+
+ // Subscribe to document changes
+ const unsubscribe = editor.onChange(handleChange);
+
+ return () => {
+ unsubscribe();
+ };
+ }, [editor, onChange]);
+
+ // Renders the editor instance
+ return ;
+}
diff --git a/surfsense_web/components/DynamicBlockNoteEditor.tsx b/surfsense_web/components/DynamicBlockNoteEditor.tsx
new file mode 100644
index 000000000..ceb678548
--- /dev/null
+++ b/surfsense_web/components/DynamicBlockNoteEditor.tsx
@@ -0,0 +1,9 @@
+"use client";
+
+import dynamic from "next/dynamic";
+
+// Dynamically import BlockNote editor with SSR disabled
+export const BlockNoteEditor = dynamic(
+ () => import("./BlockNoteEditor"),
+ { ssr: false }
+);
diff --git a/surfsense_web/components/dashboard-breadcrumb.tsx b/surfsense_web/components/dashboard-breadcrumb.tsx
index 0324ee1b6..d44c2a756 100644
--- a/surfsense_web/components/dashboard-breadcrumb.tsx
+++ b/surfsense_web/components/dashboard-breadcrumb.tsx
@@ -3,7 +3,7 @@
import { useAtomValue } from "jotai";
import { usePathname } from "next/navigation";
import { useTranslations } from "next-intl";
-import React, { useEffect } from "react";
+import React, { useEffect, useState } from "react";
import { activeChatAtom } from "@/atoms/chats/chat-query.atoms";
import {
Breadcrumb,
@@ -34,6 +34,42 @@ export function DashboardBreadcrumb() {
autoFetch: !!searchSpaceId,
});
+ // State to store document title for editor breadcrumb
+ const [documentTitle, setDocumentTitle] = useState(null);
+
+ // Fetch document title when on editor page
+ useEffect(() => {
+ if (segments[2] === "editor" && segments[3] && searchSpaceId) {
+ const documentId = segments[3];
+ const token = typeof window !== "undefined"
+ ? localStorage.getItem("surfsense_bearer_token")
+ : null;
+
+ if (token) {
+ fetch(
+ `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/documents/${documentId}/editor-content`,
+ {
+ headers: {
+ Authorization: `Bearer ${token}`,
+ },
+ }
+ )
+ .then((res) => res.json())
+ .then((data) => {
+ if (data.title) {
+ setDocumentTitle(data.title);
+ }
+ })
+ .catch(() => {
+ // If fetch fails, just use the document ID
+ setDocumentTitle(null);
+ });
+ }
+ } else {
+ setDocumentTitle(null);
+ }
+ }, [segments, searchSpaceId]);
+
// Parse the pathname to create breadcrumb items
const generateBreadcrumbs = (path: string): BreadcrumbItemInterface[] => {
const segments = path.split("/").filter(Boolean);
@@ -66,6 +102,7 @@ export function DashboardBreadcrumb() {
logs: t("logs"),
chats: t("chats"),
settings: t("settings"),
+ editor: t("editor"),
};
sectionLabel = sectionLabels[section] || sectionLabel;
@@ -73,7 +110,21 @@ export function DashboardBreadcrumb() {
// Handle sub-sections
if (segments[3]) {
const subSection = segments[3];
- let subSectionLabel = subSection.charAt(0).toUpperCase() + subSection.slice(1);
+
+ // Handle editor sub-sections (document ID)
+ if (section === "editor") {
+ const documentLabel = documentTitle || subSection;
+ breadcrumbs.push({
+ label: t("documents"),
+ href: `/dashboard/${segments[1]}/documents`,
+ });
+ breadcrumbs.push({
+ label: sectionLabel,
+ href: `/dashboard/${segments[1]}/documents`,
+ });
+ breadcrumbs.push({ label: documentLabel });
+ return breadcrumbs;
+ }
// Handle sources sub-sections
if (section === "sources") {
@@ -81,7 +132,7 @@ export function DashboardBreadcrumb() {
add: "Add Sources",
};
- const sourceLabel = sourceLabels[subSection] || subSectionLabel;
+ const sourceLabel = sourceLabels[subSection] || subSection;
breadcrumbs.push({
label: "Sources",
href: `/dashboard/${segments[1]}/sources`,
@@ -98,7 +149,7 @@ export function DashboardBreadcrumb() {
webpage: t("add_webpages"),
};
- const documentLabel = documentLabels[subSection] || subSectionLabel;
+ const documentLabel = documentLabels[subSection] || subSection;
breadcrumbs.push({
label: t("documents"),
href: `/dashboard/${segments[1]}/documents`,
@@ -158,7 +209,7 @@ export function DashboardBreadcrumb() {
manage: t("manage_connectors"),
};
- const connectorLabel = connectorLabels[subSection] || subSectionLabel;
+ const connectorLabel = connectorLabels[subSection] || subSection;
breadcrumbs.push({
label: t("connectors"),
href: `/dashboard/${segments[1]}/connectors`,
@@ -168,6 +219,7 @@ export function DashboardBreadcrumb() {
}
// Handle other sub-sections
+ let subSectionLabel = subSection.charAt(0).toUpperCase() + subSection.slice(1);
const subSectionLabels: Record = {
upload: t("upload_documents"),
youtube: t("add_youtube"),
diff --git a/surfsense_web/messages/en.json b/surfsense_web/messages/en.json
index ee1bea40b..b9c25f35a 100644
--- a/surfsense_web/messages/en.json
+++ b/surfsense_web/messages/en.json
@@ -615,6 +615,7 @@
"documents": "Documents",
"connectors": "Connectors",
"podcasts": "Podcasts",
+ "editor": "Editor",
"logs": "Logs",
"chats": "Chats",
"settings": "Settings",
diff --git a/surfsense_web/messages/zh.json b/surfsense_web/messages/zh.json
index 11880a2e8..fe36c4134 100644
--- a/surfsense_web/messages/zh.json
+++ b/surfsense_web/messages/zh.json
@@ -615,6 +615,7 @@
"documents": "文档",
"connectors": "连接器",
"podcasts": "播客",
+ "editor": "编辑器",
"logs": "日志",
"chats": "聊天",
"settings": "设置",
diff --git a/surfsense_web/next.config.ts b/surfsense_web/next.config.ts
index aca3e2d37..0777d022b 100644
--- a/surfsense_web/next.config.ts
+++ b/surfsense_web/next.config.ts
@@ -7,6 +7,8 @@ const withNextIntl = createNextIntlPlugin("./i18n/request.ts");
const nextConfig: NextConfig = {
output: "standalone",
+ // Disable StrictMode for BlockNote compatibility with React 19/Next 15
+ reactStrictMode: false,
typescript: {
ignoreBuildErrors: true,
},
@@ -21,6 +23,22 @@ const nextConfig: NextConfig = {
},
],
},
+ // Mark BlockNote server packages as external
+ serverExternalPackages: [
+ '@blocknote/server-util',
+ ],
+
+ // Configure webpack to handle blocknote packages
+ webpack: (config, { isServer }) => {
+ if (isServer) {
+ // Don't bundle these packages on the server
+ config.externals = [
+ ...(config.externals || []),
+ '@blocknote/server-util',
+ ];
+ }
+ return config;
+ },
};
// Wrap the config with MDX and next-intl plugins
diff --git a/surfsense_web/package.json b/surfsense_web/package.json
index a9de1d069..7f97111b9 100644
--- a/surfsense_web/package.json
+++ b/surfsense_web/package.json
@@ -22,6 +22,10 @@
},
"dependencies": {
"@ai-sdk/react": "^1.2.12",
+ "@blocknote/core": "^0.42.3",
+ "@blocknote/mantine": "^0.42.3",
+ "@blocknote/react": "^0.42.3",
+ "@blocknote/server-util": "^0.42.3",
"@hookform/resolvers": "^4.1.3",
"@llamaindex/chat-ui": "^0.5.17",
"@next/third-parties": "^15.5.6",
diff --git a/surfsense_web/pnpm-lock.yaml b/surfsense_web/pnpm-lock.yaml
index bee0c6c56..285099816 100644
--- a/surfsense_web/pnpm-lock.yaml
+++ b/surfsense_web/pnpm-lock.yaml
@@ -11,6 +11,18 @@ importers:
'@ai-sdk/react':
specifier: ^1.2.12
version: 1.2.12(react@19.1.0)(zod@3.25.76)
+ '@blocknote/core':
+ specifier: ^0.42.3
+ version: 0.42.3(@tiptap/extensions@3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))(@tiptap/pm@3.11.0))(@types/hast@3.0.4)(highlight.js@11.11.1)
+ '@blocknote/mantine':
+ specifier: ^0.42.3
+ version: 0.42.3(@floating-ui/dom@1.7.4)(@mantine/core@8.3.9(@mantine/hooks@8.3.9(react@19.1.0))(@types/react@19.1.8)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(@mantine/hooks@8.3.9(react@19.1.0))(@mantine/utils@6.0.22(react@19.1.0))(@tiptap/extensions@3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))(@tiptap/pm@3.11.0))(@types/hast@3.0.4)(@types/react-dom@19.1.6(@types/react@19.1.8))(@types/react@19.1.8)(highlight.js@11.11.1)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
+ '@blocknote/react':
+ specifier: ^0.42.3
+ version: 0.42.3(@floating-ui/dom@1.7.4)(@tiptap/extensions@3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))(@tiptap/pm@3.11.0))(@types/hast@3.0.4)(@types/react-dom@19.1.6(@types/react@19.1.8))(@types/react@19.1.8)(highlight.js@11.11.1)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
+ '@blocknote/server-util':
+ specifier: ^0.42.3
+ version: 0.42.3(@floating-ui/dom@1.7.4)(@tiptap/extensions@3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))(@tiptap/pm@3.11.0))(@types/hast@3.0.4)(@types/react-dom@19.1.6(@types/react@19.1.8))(@types/react@19.1.8)(highlight.js@11.11.1)(prosemirror-model@1.25.4)(prosemirror-state@1.4.4)(prosemirror-view@1.41.3)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
'@hookform/resolvers':
specifier: ^4.1.3
version: 4.1.3(react-hook-form@7.61.1(react@19.1.0))
@@ -136,7 +148,7 @@ importers:
version: 15.6.6(@types/react@19.1.8)(next@15.5.6(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
fumadocs-mdx:
specifier: ^11.7.1
- version: 11.7.1(acorn@8.14.0)(fumadocs-core@15.6.6(@types/react@19.1.8)(next@15.5.6(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(next@15.5.6(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react@19.1.0)
+ version: 11.7.1(acorn@8.15.0)(fumadocs-core@15.6.6(@types/react@19.1.8)(next@15.5.6(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(next@15.5.6(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react@19.1.0)
fumadocs-ui:
specifier: ^15.6.6
version: 15.6.6(@types/react-dom@19.1.6(@types/react@19.1.8))(@types/react@19.1.8)(next@15.5.6(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react-dom@19.1.0(react@19.1.0))(react@19.1.0)(tailwindcss@4.1.11)
@@ -310,6 +322,9 @@ packages:
resolution: {integrity: sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==}
engines: {node: '>=6.0.0'}
+ '@asamuzakjp/css-color@3.2.0':
+ resolution: {integrity: sha512-K1A6z8tS3XsmCMM86xoWdn7Fkdn9m6RSVtocUrJYIwZnFVkng/PvkEoWtOWmP+Scc6saYWHWZYbndEEXxl24jw==}
+
'@babel/runtime@7.26.9':
resolution: {integrity: sha512-aA63XwOkcl4xxQa3HjPMqOP6LiK0ZDv3mUPYEFXkpHbaFjtGggE1A61FjFzJnB+p7/oy2gA8E+rcBNl/zC1tMg==}
engines: {node: '>=6.9.0'}
@@ -367,6 +382,35 @@ packages:
cpu: [x64]
os: [win32]
+ '@blocknote/core@0.42.3':
+ resolution: {integrity: sha512-wtZki6Gok5Ac9Ek6QTQztcDymstEQgVCisJwiUZTWXh8CD4UKfnIxM7C9+6eEnZMmQ8GNTvRf1HXFl+E4N78VA==}
+ peerDependencies:
+ '@hocuspocus/provider': ^2.15.2
+ peerDependenciesMeta:
+ '@hocuspocus/provider':
+ optional: true
+
+ '@blocknote/mantine@0.42.3':
+ resolution: {integrity: sha512-xzLweZG1KfFoOp/aSHTXE10IrfEHnhDlP0C2Qt2eNO2IHHa7l8XZJpIGhCoVMsn0yylm91OSynNfTO7JkZZi8w==}
+ peerDependencies:
+ '@mantine/core': ^8.3.4
+ '@mantine/hooks': ^8.3.4
+ '@mantine/utils': ^6.0.22
+ react: ^18.0 || ^19.0 || >= 19.0.0-rc
+ react-dom: ^18.0 || ^19.0 || >= 19.0.0-rc
+
+ '@blocknote/react@0.42.3':
+ resolution: {integrity: sha512-YnrQ1uyezDbaxYcFstWOJ2r8BMxqwwEc7QAhrEjCMEyBAiOxSCPnrM4/GE2mOgCS0Xa9wIp2LDoPQP2Syv+2EA==}
+ peerDependencies:
+ react: ^18.0 || ^19.0 || >= 19.0.0-rc
+ react-dom: ^18.0 || ^19.0 || >= 19.0.0-rc
+
+ '@blocknote/server-util@0.42.3':
+ resolution: {integrity: sha512-M+jtKeC2aHOYBp6GQ0YR19iv0/0f1HElrrnKwlaSPbwR6bw6tg+yb3yQkaJJioLTpd2X2Z/RwcEvxSJGnlZ81w==}
+ peerDependencies:
+ react: ^18.0 || ^19.0 || >= 19.0.0-rc
+ react-dom: ^18.0 || ^19.0 || >= 19.0.0-rc
+
'@codemirror/autocomplete@6.18.6':
resolution: {integrity: sha512-PHHBXFomUs5DF+9tCOM/UoW6XQ4R44lLNNhRaW9PKPTU0D7lIjRg3ElxaJnTwsl/oHiR93WSXDBrekhoUGCPtg==}
@@ -472,6 +516,34 @@ packages:
react: ^16.8.0 || ^17 || ^18 || ^19
react-dom: ^16.8.0 || ^17 || ^18 || ^19
+ '@csstools/color-helpers@5.1.0':
+ resolution: {integrity: sha512-S11EXWJyy0Mz5SYvRmY8nJYTFFd1LCNV+7cXyAgQtOOuzb4EsgfqDufL+9esx72/eLhsRdGZwaldu/h+E4t4BA==}
+ engines: {node: '>=18'}
+
+ '@csstools/css-calc@2.1.4':
+ resolution: {integrity: sha512-3N8oaj+0juUw/1H3YwmDDJXCgTB1gKU6Hc/bB502u9zR0q2vd786XJH9QfrKIEgFlZmhZiq6epXl4rHqhzsIgQ==}
+ engines: {node: '>=18'}
+ peerDependencies:
+ '@csstools/css-parser-algorithms': ^3.0.5
+ '@csstools/css-tokenizer': ^3.0.4
+
+ '@csstools/css-color-parser@3.1.0':
+ resolution: {integrity: sha512-nbtKwh3a6xNVIp/VRuXV64yTKnb1IjTAEEh3irzS+HkKjAOYLTGNb9pmVNntZ8iVBHcWDA2Dof0QtPgFI1BaTA==}
+ engines: {node: '>=18'}
+ peerDependencies:
+ '@csstools/css-parser-algorithms': ^3.0.5
+ '@csstools/css-tokenizer': ^3.0.4
+
+ '@csstools/css-parser-algorithms@3.0.5':
+ resolution: {integrity: sha512-DaDeUkXZKjdGhgYaHNJTV9pV7Y9B3b644jCLs9Upc3VeNGg6LWARAT6O+Q+/COo+2gg/bM5rhpMAtf70WqfBdQ==}
+ engines: {node: '>=18'}
+ peerDependencies:
+ '@csstools/css-tokenizer': ^3.0.4
+
+ '@csstools/css-tokenizer@3.0.4':
+ resolution: {integrity: sha512-Vd/9EVDiu6PPJt9yAh6roZP6El1xHrdvIVGjyBsHR0RYwNHgL7FJPyIIW4fANJNG6FtyZfvlRPpFI4ZM/lubvw==}
+ engines: {node: '>=18'}
+
'@date-fns/tz@1.2.0':
resolution: {integrity: sha512-LBrd7MiJZ9McsOgxqWX7AaxrDjcFVjWH/tIKJd7pnR7McaslGYOP1QmmiBXdJH/H/yLCT+rcQ7FaPBUxRGUtrg==}
@@ -481,6 +553,9 @@ packages:
'@emnapi/runtime@1.7.0':
resolution: {integrity: sha512-oAYoQnCYaQZKVS53Fq23ceWMRxq5EhQsE0x0RdQ55jT7wagMu5k+fS39v1fiSLrtrLQlXwVINenqhLMtTrV/1Q==}
+ '@emoji-mart/data@1.2.1':
+ resolution: {integrity: sha512-no2pQMWiBy6gpBEiqGeU77/bFejDqUTRY7KX+0+iur13op3bqUsXdnwoZs6Xb1zbv0gAj5VvS1PWoUUckSr5Dw==}
+
'@esbuild-kit/core-utils@3.3.2':
resolution: {integrity: sha512-sPRAnw9CdSsRmEtnsl2WXWdyquogVpB3yZ3dgwJfe8zrOzTsV7cJvmwrKVa+0ma5BoiGJ+BoqkMvawbayKUsqQ==}
deprecated: 'Merged into tsx: https://tsx.is'
@@ -821,12 +896,18 @@ packages:
'@floating-ui/core@1.7.2':
resolution: {integrity: sha512-wNB5ooIKHQc+Kui96jE/n69rHFWAVoxn5CAzL1Xdd8FG03cgY3MLO+GF9U3W737fYDSgPWA6MReKhBQBop6Pcw==}
+ '@floating-ui/core@1.7.3':
+ resolution: {integrity: sha512-sGnvb5dmrJaKEZ+LDIpguvdX3bDlEllmv4/ClQ9awcmCZrlx5jQyyMWFM5kBI+EyNOCDDiKk8il0zeuX3Zlg/w==}
+
'@floating-ui/dom@1.6.13':
resolution: {integrity: sha512-umqzocjDgNRGTuO7Q8CU32dkHkECqI8ZdMZ5Swb6QAM0t5rnlrN3lGo1hdpscRd3WS8T6DKYK4ephgIH9iRh3w==}
'@floating-ui/dom@1.7.2':
resolution: {integrity: sha512-7cfaOQuCS27HD7DX+6ib2OrnW+b4ZBwDNnCcT0uTyidcmyWb03FnQqJybDBoCnpdxwBSfA94UAYlRCt7mV+TbA==}
+ '@floating-ui/dom@1.7.4':
+ resolution: {integrity: sha512-OOchDgh4F2CchOX94cRVqhvy7b3AFb+/rQXyswmzmGakRfkMgoWVjfnLWkRirfLEfuD4ysVW16eXzwt3jHIzKA==}
+
'@floating-ui/react-dom@2.1.2':
resolution: {integrity: sha512-06okr5cgPzMNBy+Ycse2A6udMi4bqwW/zgBF/rwjcNqWkyr82Mcg8b0vjX8OJpZFy/FKjJmw6wV7t44kK6kW7A==}
peerDependencies:
@@ -839,12 +920,24 @@ packages:
react: '>=16.8.0'
react-dom: '>=16.8.0'
+ '@floating-ui/react-dom@2.1.6':
+ resolution: {integrity: sha512-4JX6rEatQEvlmgU80wZyq9RT96HZJa88q8hp0pBd+LrczeDI4o6uA2M+uvxngVHo4Ihr8uibXxH6+70zhAFrVw==}
+ peerDependencies:
+ react: '>=16.8.0'
+ react-dom: '>=16.8.0'
+
'@floating-ui/react@0.27.13':
resolution: {integrity: sha512-Qmj6t9TjgWAvbygNEu1hj4dbHI9CY0ziCMIJrmYoDIn9TUAH5lRmiIeZmRd4c6QEZkzdoH7jNnoNyoY1AIESiA==}
peerDependencies:
react: '>=17.0.0'
react-dom: '>=17.0.0'
+ '@floating-ui/react@0.27.16':
+ resolution: {integrity: sha512-9O8N4SeG2z++TSM8QA/KTeKFBVCNEz/AGS7gWPJf6KFRzmRWixFRnCnkPHRDwSVZW6QPDO6uT0P2SpWNKCc9/g==}
+ peerDependencies:
+ react: '>=17.0.0'
+ react-dom: '>=17.0.0'
+
'@floating-ui/utils@0.2.10':
resolution: {integrity: sha512-aGTxbpbg8/b5JfU1HXSrbH3wXZuLPJcNEcZQFMxLs3oSzgtVu6nFPkbbGGUvBcUjKV2YyB9Wxxabo+HEH9tcRQ==}
@@ -1196,6 +1289,23 @@ packages:
'@types/react':
optional: true
+ '@mantine/core@8.3.9':
+ resolution: {integrity: sha512-ivj0Crn5N521cI2eWZBsBGckg0ZYRqfOJz5vbbvYmfj65bp0EdsyqZuOxXzIcn2aUScQhskfvzyhV5XIUv81PQ==}
+ peerDependencies:
+ '@mantine/hooks': 8.3.9
+ react: ^18.x || ^19.x
+ react-dom: ^18.x || ^19.x
+
+ '@mantine/hooks@8.3.9':
+ resolution: {integrity: sha512-Dfz7W0+K1cq4Gb1WFQCZn8tsMXkLH6MV409wZR/ToqsxdNDUMJ/xxbfnwEXWEZjXNJd1wDETHgc+cZG2lTe3Xw==}
+ peerDependencies:
+ react: ^18.x || ^19.x
+
+ '@mantine/utils@6.0.22':
+ resolution: {integrity: sha512-RSKlNZvxhMCkOFZ6slbYvZYbWjHUM+PxDQnupIOxIdsTZQQjx/BFfrfJ7kQFOP+g7MtpOds8weAetEs5obwMOQ==}
+ peerDependencies:
+ react: '>=16.8.0'
+
'@marijn/find-cluster-break@1.0.2':
resolution: {integrity: sha512-l0h88YhZFyKdXIFNfSWpyjStDjGHwZ/U7iobcK1cQQD8sejsONdQtTVU+1wVN1PBw40PiiHB1vA5S7VTfQiP9g==}
@@ -2236,6 +2346,9 @@ packages:
peerDependencies:
react: '>=16.8'
+ '@remirror/core-constants@3.0.0':
+ resolution: {integrity: sha512-42aWfPrimMfDKDi4YegyS7x+/0tlzaqwPQCULLanv3DMIlu96KTJR0fM5isWX2UViOqlGnX6YFgqWepcX+XMNg==}
+
'@rtsao/scc@1.1.0':
resolution: {integrity: sha512-zt6OdqaDoOnJ1ZYsCYGt9YmWzDXl4vQdKTyJev62gFhRGKdx7mcT54V9KIjg+d2wi9EXsPvAPKe7i7WjfVWB8g==}
@@ -2263,6 +2376,9 @@ packages:
'@shikijs/transformers@3.8.1':
resolution: {integrity: sha512-nmTyFfBrhJk6HJi118jes0wuWdfKXeVUq1Nq+hm8h6wbk1KUfvtg+LY/uDfxZD2VDItHO3QoINIs3NtoKBmgxw==}
+ '@shikijs/types@3.13.0':
+ resolution: {integrity: sha512-oM9P+NCFri/mmQ8LoFGVfVyemm5Hi27330zuOBp0annwJdKH1kOLndw3zCtAVDehPLg9fKqoEx3Ht/wNZxolfw==}
+
'@shikijs/types@3.8.1':
resolution: {integrity: sha512-5C39Q8/8r1I26suLh+5TPk1DTrbY/kn3IdWA5HdizR0FhlhD05zx5nKCqhzSfDHH3p4S0ZefxWd77DLV+8FhGg==}
@@ -2410,6 +2526,100 @@ packages:
resolution: {integrity: sha512-ldZXEhOBb8Is7xLs01fR3YEc3DERiz5silj8tnGkFZytt1abEvl/GhUmCE0PMLaMPTa3Jk4HbKmRlHmu+gCftg==}
engines: {node: '>=12'}
+ '@tiptap/core@3.11.0':
+ resolution: {integrity: sha512-kmS7ZVpHm1EMnW1Wmft9H5ZLM7E0G0NGBx+aGEHGDcNxZBXD2ZUa76CuWjIhOGpwsPbELp684ZdpF2JWoNi4Dg==}
+ peerDependencies:
+ '@tiptap/pm': ^3.11.0
+
+ '@tiptap/extension-bold@3.11.0':
+ resolution: {integrity: sha512-V/c3XYO09Le9GlBGq1MK4c97Fffi0GADQTbZ+LFoi65nUrAwutn5wYnXBcEyWQI6RmFWVDJTieamqtc4j9teyw==}
+ peerDependencies:
+ '@tiptap/core': ^3.11.0
+
+ '@tiptap/extension-bubble-menu@3.11.0':
+ resolution: {integrity: sha512-P3j9lQ+EZ5Zg/isJzLpCPX7bp7WUBmz8GPs/HPlyMyN2su8LqXntITBZr8IP1JNBlB/wR83k/W0XqdC57mG7cA==}
+ peerDependencies:
+ '@tiptap/core': ^3.11.0
+ '@tiptap/pm': ^3.11.0
+
+ '@tiptap/extension-code@3.11.0':
+ resolution: {integrity: sha512-5OpR5O4bveHe1KG9CJsto86NgkuerYq3OLY78vzh9uFCLdv7xgXA2aZYJfRMhbZ7hKsR7hHg1etBJUCk+TKsMg==}
+ peerDependencies:
+ '@tiptap/core': ^3.11.0
+
+ '@tiptap/extension-floating-menu@3.11.0':
+ resolution: {integrity: sha512-nEHdWZHEJYX1II1oJQ4aeZ8O/Kss4BRbYFXQFGIvPelCfCYEATpUJh3aq3767ARSq40bOWyu+Dcd4SCW0We6Sw==}
+ peerDependencies:
+ '@floating-ui/dom': ^1.0.0
+ '@tiptap/core': ^3.11.0
+ '@tiptap/pm': ^3.11.0
+
+ '@tiptap/extension-gapcursor@3.11.0':
+ resolution: {integrity: sha512-lXGEZiYX7k/pEFr8BgDE91vqjLTwuf+qhHLTgIpfhbt562nShLPIDj9Vzu3xrR4fwUAMiUNiLyaeInb8j3I4kg==}
+ peerDependencies:
+ '@tiptap/extensions': ^3.11.0
+
+ '@tiptap/extension-history@3.11.0':
+ resolution: {integrity: sha512-Q/kuNDCoeH2dZ2P+OqEKnRW047SkrngNq+vSrwQlAKO8osO/eAS7aLzn1NELzE5jLvzOKqUda43bSTKsBeTh+w==}
+ peerDependencies:
+ '@tiptap/extensions': ^3.11.0
+
+ '@tiptap/extension-horizontal-rule@3.11.0':
+ resolution: {integrity: sha512-FugFHZG+oiMBV6k42hn9NOA4wRNc2b9UeEIMR+XwEMpWJInV4VwSwDvu8JClgkDo8z7FEnker9e51DZ00CLWqg==}
+ peerDependencies:
+ '@tiptap/core': ^3.11.0
+ '@tiptap/pm': ^3.11.0
+
+ '@tiptap/extension-italic@3.11.0':
+ resolution: {integrity: sha512-WP6wL2b//8bLVdeUCWOpYA7nUStvrAMMD0nRn0F9CEW+l7vH6El2PZFhHmJ9uqXo5MnyugBpARiwgxfoAlef5w==}
+ peerDependencies:
+ '@tiptap/core': ^3.11.0
+
+ '@tiptap/extension-link@3.11.0':
+ resolution: {integrity: sha512-RoUkGqowVMKLE76KktNOGhzNMyKtwrSDRqeYCe1ODPuOMZvDGexOE8cIuA4A1ODkgN6ji9qE/9Sf8uhpZdH39Q==}
+ peerDependencies:
+ '@tiptap/core': ^3.11.0
+ '@tiptap/pm': ^3.11.0
+
+ '@tiptap/extension-paragraph@3.11.0':
+ resolution: {integrity: sha512-hxgjZOXOqstRTWv+QjWJjK23rD5qzIV9ePlhX3imLeq/MgX0aU9VBDaG5SGKbSjaBNQnpLw6+sABJi3CDP6Z5A==}
+ peerDependencies:
+ '@tiptap/core': ^3.11.0
+
+ '@tiptap/extension-strike@3.11.0':
+ resolution: {integrity: sha512-XVP/WMYLrqLBfUsGPu2H9MrOUZLhGUaxtZ3hSRffDi/lsw53x/coZ9eO0FxOB9R7z2ksHWmticIs+0YnKt9LNQ==}
+ peerDependencies:
+ '@tiptap/core': ^3.11.0
+
+ '@tiptap/extension-text@3.11.0':
+ resolution: {integrity: sha512-ELAYm2BuChzZOqDG9B0k3W6zqM4pwNvXkam28KgHGiT2y7Ni68Rb+NXp16uVR+5zR6hkqnQ/BmJSKzAW59MXpA==}
+ peerDependencies:
+ '@tiptap/core': ^3.11.0
+
+ '@tiptap/extension-underline@3.11.0':
+ resolution: {integrity: sha512-D3PsS/84RlQKFjd5eerMIUioC0mNh4yy1RRV/WbXx6ugu+6T+0hT42gNk9Ap8pDsVQZCk0SHfDyBEUFC2KOwKw==}
+ peerDependencies:
+ '@tiptap/core': ^3.11.0
+
+ '@tiptap/extensions@3.11.0':
+ resolution: {integrity: sha512-g43beA73ZMLezez1st9LEwYrRHZ0FLzlsSlOZKk7sdmtHLmuqWHf4oyb0XAHol1HZIdGv104rYaGNgmQXr1ecQ==}
+ peerDependencies:
+ '@tiptap/core': ^3.11.0
+ '@tiptap/pm': ^3.11.0
+
+ '@tiptap/pm@3.11.0':
+ resolution: {integrity: sha512-plCQDLCZIOc92cizB8NNhBRN0szvYR3cx9i5IXo6v9Xsgcun8KHNcJkesc2AyeqdIs0BtOJZaqQ9adHThz8UDw==}
+
+ '@tiptap/react@3.11.0':
+ resolution: {integrity: sha512-SDGei/2DjwmhzsxIQNr6dkB6NxLgXZjQ6hF36NfDm4937r5NLrWrNk5tCsoDQiKZ0DHEzuJ6yZM5C7I7LZLB6w==}
+ peerDependencies:
+ '@tiptap/core': ^3.11.0
+ '@tiptap/pm': ^3.11.0
+ '@types/react': ^17.0.0 || ^18.0.0 || ^19.0.0
+ '@types/react-dom': ^17.0.0 || ^18.0.0 || ^19.0.0
+ react: ^17.0.0 || ^18.0.0 || ^19.0.0
+ react-dom: ^17.0.0 || ^18.0.0 || ^19.0.0
+
'@types/canvas-confetti@1.9.0':
resolution: {integrity: sha512-aBGj/dULrimR1XDZLtG9JwxX1b4HPRF6CX9Yfwh3NvstZEm1ZL7RBnel4keCPSqs1ANRu1u2Aoz9R+VmtjYuTg==}
@@ -2440,12 +2650,21 @@ packages:
'@types/katex@0.16.7':
resolution: {integrity: sha512-HMwFiRujE5PjrgwHQ25+bsLJgowjGjm5Z8FVSf0N6PwgJrwxH0QxzHYDcKsTfV3wva0vzrpqMTJS2jXPr5BMEQ==}
+ '@types/linkify-it@5.0.0':
+ resolution: {integrity: sha512-sVDA58zAw4eWAffKOaQH5/5j3XeayukzDk+ewSsnv3p4yJEZHCCzMDiZM8e0OUrRvmpGZ85jf4yDHkHsgBNr9Q==}
+
+ '@types/markdown-it@14.1.2':
+ resolution: {integrity: sha512-promo4eFwuiW+TfGxhi+0x3czqTYJkG8qB17ZUJiVF10Xm7NLVRSLUsfRTU/6h1e24VvRnXCx+hG7li58lkzog==}
+
'@types/mdast@3.0.15':
resolution: {integrity: sha512-LnwD+mUEfxWMa1QpDraczIn6k0Ee3SMicuYSSzS6ZYl2gKS09EClnJYGd8Du6rfc5r/GZEk5o1mRb8TaTj03sQ==}
'@types/mdast@4.0.4':
resolution: {integrity: sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==}
+ '@types/mdurl@2.0.0':
+ resolution: {integrity: sha512-RGdgjQUZba5p6QEFAVx2OGb8rQDL/cPRG7GiedRzMcJ1tYnUANBncjbSB1NRGwbvjcPeikRABz2nshyPk1bhWg==}
+
'@types/mdx@2.0.13':
resolution: {integrity: sha512-+OWZQfAYyio6YkJb3HLxDrvnx6SWWDbC0zVPfBRzUk0/nqoDyf6dNxQi3eArPe8rJ473nobTMQ/8Zk+LxJ+Yuw==}
@@ -2478,6 +2697,9 @@ packages:
'@types/unist@3.0.3':
resolution: {integrity: sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==}
+ '@types/use-sync-external-store@0.0.6':
+ resolution: {integrity: sha512-zFDAD+tlpf2r4asuHEj0XH6pY6i0g5NeAHPn+15wk3BV6JA69eERFXC1gyGThDkVa1zCyKr5jox1+2LbV/AMLg==}
+
'@typescript-eslint/eslint-plugin@8.25.0':
resolution: {integrity: sha512-VM7bpzAe7JO/BFf40pIT1lJqS/z1F8OaSsUB3rpFJucQA4cOSuH2RVVVkFULN+En0Djgr29/jb4EQnedUo95KA==}
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
@@ -2580,6 +2802,10 @@ packages:
engines: {node: '>=0.4.0'}
hasBin: true
+ agent-base@7.1.4:
+ resolution: {integrity: sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==}
+ engines: {node: '>= 14'}
+
ai@4.3.19:
resolution: {integrity: sha512-dIE2bfNpqHN3r6IINp9znguYdhIOheKW2LDigAMrgt/upT3B8eBGPSCblENvaZGoq+hxaN9fSMzjWpbqloP+7Q==}
engines: {node: '>=18'}
@@ -2661,6 +2887,9 @@ packages:
resolution: {integrity: sha512-hsU18Ae8CDTR6Kgu9DYf0EbCr/a5iGL0rytQDobUcdpYOKokk8LEjVphnXkDkgpi0wYVsqrXuP0bZxJaTqdgoA==}
engines: {node: '>= 0.4'}
+ asynckit@0.4.0:
+ resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==}
+
attr-accept@2.2.5:
resolution: {integrity: sha512-0bDNnY/u6pPwHDMoF0FieU354oBi0a8rD9FcsLwzcGWbc8KS8KPIi7y+s13OlVY+gMWc/9xEMUgNE6Qm8ZllYQ==}
engines: {node: '>=4'}
@@ -2829,6 +3058,10 @@ packages:
color-name@1.1.4:
resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==}
+ combined-stream@1.0.8:
+ resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==}
+ engines: {node: '>= 0.8'}
+
comma-separated-tokens@1.0.8:
resolution: {integrity: sha512-GHuDRO12Sypu2cV70d1dkA2EUmXHgntrzbpvOB+Qy+49ypNfGgFQIC2fhhXbnyrJRynDCAARsT7Ou0M6hirpfw==}
@@ -2868,6 +3101,10 @@ packages:
engines: {node: '>=4'}
hasBin: true
+ cssstyle@4.6.0:
+ resolution: {integrity: sha512-2z+rWdzbbSZv6/rhtvzvqeZQHrBaqgogqt85sqFNbabZOuFbCVFb8kPeEtZjiKkbrm395irpNKiYeFeLiQnFPg==}
+ engines: {node: '>=18'}
+
csstype@3.1.3:
resolution: {integrity: sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==}
@@ -2878,6 +3115,10 @@ packages:
damerau-levenshtein@1.0.8:
resolution: {integrity: sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA==}
+ data-urls@5.0.0:
+ resolution: {integrity: sha512-ZYP5VBHshaDAiVZxjbRVcFJpc+4xGgT0bK3vzy1HLN8jTO975HEbuYzZJcHoQEY5K1a0z8YayJkyVETa08eNTg==}
+ engines: {node: '>=18'}
+
data-view-buffer@1.0.2:
resolution: {integrity: sha512-EmKO5V3OLXh1rtK2wgXRansaK1/mtVdTUEiEI0W8RkvgT05kfxaH29PliLnpLP73yYO6142Q72QNa8Wx/A5CqQ==}
engines: {node: '>= 0.4'}
@@ -2938,6 +3179,10 @@ packages:
resolution: {integrity: sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==}
engines: {node: '>= 0.4'}
+ delayed-stream@1.0.0:
+ resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==}
+ engines: {node: '>=0.4.0'}
+
dequal@2.0.3:
resolution: {integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==}
engines: {node: '>=6'}
@@ -3090,6 +3335,9 @@ packages:
react: ^18.0.0
react-dom: ^18.0.0
+ emoji-mart@5.6.0:
+ resolution: {integrity: sha512-eJp3QRe79pjwa+duv+n7+5YsNhRcMl812EcFVwrnRvYKoNPoQb5qxU8DG6Bgwji0akHdp6D4Ln6tYLG58MFSow==}
+
emoji-regex@9.2.2:
resolution: {integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==}
@@ -3430,6 +3678,10 @@ packages:
resolution: {integrity: sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg==}
engines: {node: '>= 0.4'}
+ form-data@4.0.5:
+ resolution: {integrity: sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w==}
+ engines: {node: '>= 6'}
+
format@0.2.2:
resolution: {integrity: sha512-wzsgA6WOq+09wrU1tsJ09udeR/YZRaeArL9e1wPbFg3GG2yDnC2ldKpxs4xunpFF9DgqCqOIra3bc1HWrJ37Ww==}
engines: {node: '>=0.4.x'}
@@ -3614,6 +3866,12 @@ packages:
resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==}
engines: {node: '>= 0.4'}
+ hast-util-embedded@3.0.0:
+ resolution: {integrity: sha512-naH8sld4Pe2ep03qqULEtvYr7EjrLK2QHY8KJR6RJkTUjPGObe1vnx585uzem2hGra+s1q08DZZpfgDVYRbaXA==}
+
+ hast-util-format@1.1.0:
+ resolution: {integrity: sha512-yY1UDz6bC9rDvCWHpx12aIBGRG7krurX0p0Fm6pT547LwDIZZiNr8a+IHDogorAdreULSEzP82Nlv5SZkHZcjA==}
+
hast-util-from-dom@5.0.1:
resolution: {integrity: sha512-N+LqofjR2zuzTjCPzyDUdSshy4Ma6li7p/c3pA78uTwzFgENbgbUrm2ugwsOdcjI1muO+o6Dgzp9p8WHtn/39Q==}
@@ -3626,15 +3884,27 @@ packages:
hast-util-from-parse5@8.0.3:
resolution: {integrity: sha512-3kxEVkEKt0zvcZ3hCRYI8rqrgwtlIOFMWkbclACvjlDw8Li9S2hk/d51OI0nr/gIpdMHNepwgOKqZ/sy0Clpyg==}
+ hast-util-has-property@3.0.0:
+ resolution: {integrity: sha512-MNilsvEKLFpV604hwfhVStK0usFY/QmM5zX16bo7EjnAEGofr5YyI37kzopBlZJkHD4t887i+q/C8/tr5Q94cA==}
+
+ hast-util-is-body-ok-link@3.0.1:
+ resolution: {integrity: sha512-0qpnzOBLztXHbHQenVB8uNuxTnm/QBFUOmdOSsEn7GnBtyY07+ENTWVFBAnXd/zEgd9/SUG3lRY7hSIBWRgGpQ==}
+
hast-util-is-element@3.0.0:
resolution: {integrity: sha512-Val9mnv2IWpLbNPqc/pUem+a7Ipj2aHacCwgNfTiK0vJKl0LF+4Ba4+v1oPHFpf3bLYmreq0/l3Gud9S5OH42g==}
+ hast-util-minify-whitespace@1.0.1:
+ resolution: {integrity: sha512-L96fPOVpnclQE0xzdWb/D12VT5FabA7SnZOUMtL1DbXmYiHJMXZvFkIZfiMmTCNJHUeO2K9UYNXoVyfz+QHuOw==}
+
hast-util-parse-selector@2.2.5:
resolution: {integrity: sha512-7j6mrk/qqkSehsM92wQjdIgWM2/BW61u/53G6xmC8i1OmEdKLHbk419QKQUjz6LglWsfqoiHmyMRkP1BGjecNQ==}
hast-util-parse-selector@4.0.0:
resolution: {integrity: sha512-wkQCkSYoOGCRKERFWcxMVMOcYE2K1AaNLU8DXS9arxnLOUEWbOXKXiJUNzEpqZ3JOKpnha3jkFrumEjVliDe7A==}
+ hast-util-phrasing@3.0.1:
+ resolution: {integrity: sha512-6h60VfI3uBQUxHqTyMymMZnEbNl1XmEGtOxxKYL7stY2o601COo62AWAYBQR9lZbYXYSBoxag8UpPRXK+9fqSQ==}
+
hast-util-raw@9.1.0:
resolution: {integrity: sha512-Y8/SBAHkZGoNkpzqqfCldijcuUKh7/su31kEBp67cFY09Wy0mTRgtsLYsiIxMJxlu0f6AA5SUTbDR8K0rxnbUw==}
@@ -3650,6 +3920,9 @@ packages:
hast-util-to-jsx-runtime@2.3.6:
resolution: {integrity: sha512-zl6s8LwNyo1P9uw+XJGvZtdFF1GdAkOg8ujOw+4Pyb76874fLps4ueHXDhXWdk6YHQ6OgUtinliG7RsYvCbbBg==}
+ hast-util-to-mdast@10.1.2:
+ resolution: {integrity: sha512-FiCRI7NmOvM4y+f5w32jPRzcxDIz+PUqDwEqn1A+1q2cdp3B8Gx7aVrXORdOKjMNDQsD1ogOr896+0jJHW1EFQ==}
+
hast-util-to-parse5@8.0.0:
resolution: {integrity: sha512-3KKrV5ZVI8if87DVSi1vDeByYrkGzg4mEfeu4alwgmmIeARiBLKCZS2uw5Gb6nU9x9Yufyj3iudm6i7nl52PFw==}
@@ -3681,12 +3954,31 @@ packages:
highlightjs-vue@1.0.0:
resolution: {integrity: sha512-PDEfEF102G23vHmPhLyPboFCD+BkMGu+GuJe2d9/eH4FsCwvgBpnc9n0pGE+ffKdph38s6foEZiEjdgHdzp+IA==}
+ html-encoding-sniffer@4.0.0:
+ resolution: {integrity: sha512-Y22oTqIU4uuPgEemfz7NDJz6OeKf12Lsu+QC+s3BVpda64lTiMYCyGwg5ki4vFxkMwQdeZDl2adZoqUgdFuTgQ==}
+ engines: {node: '>=18'}
+
html-url-attributes@3.0.1:
resolution: {integrity: sha512-ol6UPyBWqsrO6EJySPz2O7ZSr856WDrEzM5zMqp+FJJLGMW35cLYmmZnl0vztAZxRUoNZJFTCohfjuIJ8I4QBQ==}
html-void-elements@3.0.0:
resolution: {integrity: sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg==}
+ html-whitespace-sensitive-tag-names@3.0.1:
+ resolution: {integrity: sha512-q+310vW8zmymYHALr1da4HyXUQ0zgiIwIicEfotYPWGN0OJVEN/58IJ3A4GBYcEq3LGAZqKb+ugvP0GNB9CEAA==}
+
+ http-proxy-agent@7.0.2:
+ resolution: {integrity: sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==}
+ engines: {node: '>= 14'}
+
+ https-proxy-agent@7.0.6:
+ resolution: {integrity: sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==}
+ engines: {node: '>= 14'}
+
+ iconv-lite@0.6.3:
+ resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==}
+ engines: {node: '>=0.10.0'}
+
ieee754@1.2.1:
resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==}
@@ -3824,6 +4116,9 @@ packages:
resolution: {integrity: sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==}
engines: {node: '>=12'}
+ is-potential-custom-element-name@1.0.1:
+ resolution: {integrity: sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==}
+
is-regex@1.2.1:
resolution: {integrity: sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==}
engines: {node: '>= 0.4'}
@@ -3915,6 +4210,15 @@ packages:
resolution: {integrity: sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==}
hasBin: true
+ jsdom@25.0.1:
+ resolution: {integrity: sha512-8i7LzZj7BF8uplX+ZyOlIz86V6TAsSs+np6m1kpW9u0JWi4z/1t+FzcK1aek+ybTnAC4KhBL4uXCNT0wcUIeCw==}
+ engines: {node: '>=18'}
+ peerDependencies:
+ canvas: ^2.11.2
+ peerDependenciesMeta:
+ canvas:
+ optional: true
+
json-buffer@3.0.1:
resolution: {integrity: sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==}
@@ -4034,6 +4338,12 @@ packages:
resolution: {integrity: sha512-xi6IyHML+c9+Q3W0S4fCQJOym42pyurFiJUHEcEyHS0CeKzia4yZDEsLlqOFykxOdHpNy0NmvVO31vcSqAxJCg==}
engines: {node: '>= 12.0.0'}
+ linkify-it@5.0.0:
+ resolution: {integrity: sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ==}
+
+ linkifyjs@4.3.2:
+ resolution: {integrity: sha512-NT1CJtq3hHIreOianA8aSXn6Cw0JzYOuDQbOrSPe7gqFnCpKP++MQe3ODgO3oh2GJFORkAAdqredOa60z63GbA==}
+
locate-path@6.0.0:
resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==}
engines: {node: '>=10'}
@@ -4069,6 +4379,9 @@ packages:
lowlight@1.20.0:
resolution: {integrity: sha512-8Ktj+prEb1RoCPkEOrPMYUN/nCggB7qAWe3a7OpMjWQkh3l2RD5wKRQ+o8Q8YuI9RG/xs95waaI/E6ym/7NsTw==}
+ lru-cache@10.4.3:
+ resolution: {integrity: sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==}
+
lru-cache@11.1.0:
resolution: {integrity: sha512-QIXZUBJUx+2zHUdQujWejBkcD9+cs94tLn0+YL8UrCh+D5sCXZ4c7LaEH48pNwRY3MLDgqUFyhlCyjJPf1WP0A==}
engines: {node: 20 || >=22}
@@ -4100,6 +4413,10 @@ packages:
resolution: {integrity: sha512-o5vL7aDWatOTX8LzaS1WMoaoxIiLRQJuIKKe2wAw6IeULDHaqbiqiggmx+pKvZDb1Sj+pE46Sn1T7lCqfFtg1Q==}
engines: {node: '>=16'}
+ markdown-it@14.1.0:
+ resolution: {integrity: sha512-a54IwgWPaeBCAAsv13YgmALOF1elABB08FxO9i+r4VFk5Vl4pKokRPeX8u5TCgSsPi6ec1otfLjdOpVcgbpshg==}
+ hasBin: true
+
markdown-table@3.0.4:
resolution: {integrity: sha512-wiYz4+JrLyb/DqW2hkFJxP7Vd7JuTDm77fvbM8VfEQdmSMqcImWeeRbHwZjBjIFki/VaMK2BhFi7oUUZeM5bqw==}
@@ -4203,6 +4520,9 @@ packages:
mdast-util-to-string@4.0.0:
resolution: {integrity: sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==}
+ mdurl@2.0.0:
+ resolution: {integrity: sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==}
+
memoize-one@5.2.1:
resolution: {integrity: sha512-zYiwtZUcYyXKo/np96AGZAckk+FWWsUdJ3cHGGmld7+AhvcWmQyGCYUh1hc4Q/pkOhb65dQR/pqCyK0cOaHz4Q==}
@@ -4420,10 +4740,18 @@ packages:
resolution: {integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==}
engines: {node: '>=8.6'}
+ mime-db@1.52.0:
+ resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==}
+ engines: {node: '>= 0.6'}
+
mime-db@1.54.0:
resolution: {integrity: sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==}
engines: {node: '>= 0.6'}
+ mime-types@2.1.35:
+ resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==}
+ engines: {node: '>= 0.6'}
+
mimic-response@3.1.0:
resolution: {integrity: sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==}
engines: {node: '>=10'}
@@ -4559,6 +4887,9 @@ packages:
number-flow@0.5.8:
resolution: {integrity: sha512-FPr1DumWyGi5Nucoug14bC6xEz70A1TnhgSHhKyfqjgji2SOTz+iLJxKtv37N5JyJbteGYCm6NQ9p1O4KZ7iiA==}
+ nwsapi@2.2.22:
+ resolution: {integrity: sha512-ujSMe1OWVn55euT1ihwCI1ZcAaAU3nxUiDwfDQldc51ZXaB9m2AyOn6/jh1BLe2t/G8xd6uKG1UBF2aZJeg2SQ==}
+
object-assign@4.1.1:
resolution: {integrity: sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==}
engines: {node: '>=0.10.0'}
@@ -4604,6 +4935,9 @@ packages:
resolution: {integrity: sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==}
engines: {node: '>= 0.8.0'}
+ orderedmap@2.1.1:
+ resolution: {integrity: sha512-TvAWxi0nDe1j/rtMcWcIj94+Ffe6n7zhow33h40SKxmsmozs6dz/e+EajymfoFcHd7sxNn8yHM8839uixMOV6g==}
+
outvariant@1.4.0:
resolution: {integrity: sha512-AlWY719RF02ujitly7Kk/0QlV+pXGFDHrHf9O2OKqyqgBieaPOIeuSkL8sRK6j2WK+/ZAURq2kZsY0d8JapUiw==}
@@ -4775,9 +5109,106 @@ packages:
property-information@7.1.0:
resolution: {integrity: sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ==}
+ prosemirror-changeset@2.3.1:
+ resolution: {integrity: sha512-j0kORIBm8ayJNl3zQvD1TTPHJX3g042et6y/KQhZhnPrruO8exkTgG8X+NRpj7kIyMMEx74Xb3DyMIBtO0IKkQ==}
+
+ prosemirror-collab@1.3.1:
+ resolution: {integrity: sha512-4SnynYR9TTYaQVXd/ieUvsVV4PDMBzrq2xPUWutHivDuOshZXqQ5rGbZM84HEaXKbLdItse7weMGOUdDVcLKEQ==}
+
+ prosemirror-commands@1.7.1:
+ resolution: {integrity: sha512-rT7qZnQtx5c0/y/KlYaGvtG411S97UaL6gdp6RIZ23DLHanMYLyfGBV5DtSnZdthQql7W+lEVbpSfwtO8T+L2w==}
+
+ prosemirror-dropcursor@1.8.2:
+ resolution: {integrity: sha512-CCk6Gyx9+Tt2sbYk5NK0nB1ukHi2ryaRgadV/LvyNuO3ena1payM2z6Cg0vO1ebK8cxbzo41ku2DE5Axj1Zuiw==}
+
+ prosemirror-gapcursor@1.4.0:
+ resolution: {integrity: sha512-z00qvurSdCEWUIulij/isHaqu4uLS8r/Fi61IbjdIPJEonQgggbJsLnstW7Lgdk4zQ68/yr6B6bf7sJXowIgdQ==}
+
+ prosemirror-highlight@0.13.0:
+ resolution: {integrity: sha512-GIC2VCTUnukNdsEGLQWWOVpYPl/7/KrVp4xs7XMB48/4rhUrHK8hp8TEog4Irmv+2kmjx24RLnaisGOCP6U8jw==}
+ peerDependencies:
+ '@shikijs/types': ^1.29.2 || ^2.0.0 || ^3.0.0
+ '@types/hast': ^3.0.0
+ highlight.js: ^11.9.0
+ lowlight: ^3.1.0
+ prosemirror-model: ^1.19.3
+ prosemirror-state: ^1.4.3
+ prosemirror-transform: ^1.8.0
+ prosemirror-view: ^1.32.4
+ refractor: ^5.0.0
+ sugar-high: ^0.6.1 || ^0.7.0 || ^0.8.0 || ^0.9.0
+ peerDependenciesMeta:
+ '@shikijs/types':
+ optional: true
+ '@types/hast':
+ optional: true
+ highlight.js:
+ optional: true
+ lowlight:
+ optional: true
+ prosemirror-model:
+ optional: true
+ prosemirror-state:
+ optional: true
+ prosemirror-transform:
+ optional: true
+ prosemirror-view:
+ optional: true
+ refractor:
+ optional: true
+ sugar-high:
+ optional: true
+
+ prosemirror-history@1.5.0:
+ resolution: {integrity: sha512-zlzTiH01eKA55UAf1MEjtssJeHnGxO0j4K4Dpx+gnmX9n+SHNlDqI2oO1Kv1iPN5B1dm5fsljCfqKF9nFL6HRg==}
+
+ prosemirror-inputrules@1.5.1:
+ resolution: {integrity: sha512-7wj4uMjKaXWAQ1CDgxNzNtR9AlsuwzHfdFH1ygEHA2KHF2DOEaXl1CJfNPAKCg9qNEh4rum975QLaCiQPyY6Fw==}
+
+ prosemirror-keymap@1.2.3:
+ resolution: {integrity: sha512-4HucRlpiLd1IPQQXNqeo81BGtkY8Ai5smHhKW9jjPKRc2wQIxksg7Hl1tTI2IfT2B/LgX6bfYvXxEpJl7aKYKw==}
+
+ prosemirror-markdown@1.13.2:
+ resolution: {integrity: sha512-FPD9rHPdA9fqzNmIIDhhnYQ6WgNoSWX9StUZ8LEKapaXU9i6XgykaHKhp6XMyXlOWetmaFgGDS/nu/w9/vUc5g==}
+
+ prosemirror-menu@1.2.5:
+ resolution: {integrity: sha512-qwXzynnpBIeg1D7BAtjOusR+81xCp53j7iWu/IargiRZqRjGIlQuu1f3jFi+ehrHhWMLoyOQTSRx/IWZJqOYtQ==}
+
+ prosemirror-model@1.25.4:
+ resolution: {integrity: sha512-PIM7E43PBxKce8OQeezAs9j4TP+5yDpZVbuurd1h5phUxEKIu+G2a+EUZzIC5nS1mJktDJWzbqS23n1tsAf5QA==}
+
+ prosemirror-schema-basic@1.2.4:
+ resolution: {integrity: sha512-ELxP4TlX3yr2v5rM7Sb70SqStq5NvI15c0j9j/gjsrO5vaw+fnnpovCLEGIcpeGfifkuqJwl4fon6b+KdrODYQ==}
+
+ prosemirror-schema-list@1.5.1:
+ resolution: {integrity: sha512-927lFx/uwyQaGwJxLWCZRkjXG0p48KpMj6ueoYiu4JX05GGuGcgzAy62dfiV8eFZftgyBUvLx76RsMe20fJl+Q==}
+
+ prosemirror-state@1.4.4:
+ resolution: {integrity: sha512-6jiYHH2CIGbCfnxdHbXZ12gySFY/fz/ulZE333G6bPqIZ4F+TXo9ifiR86nAHpWnfoNjOb3o5ESi7J8Uz1jXHw==}
+
+ prosemirror-tables@1.8.1:
+ resolution: {integrity: sha512-DAgDoUYHCcc6tOGpLVPSU1k84kCUWTWnfWX3UDy2Delv4ryH0KqTD6RBI6k4yi9j9I8gl3j8MkPpRD/vWPZbug==}
+
+ prosemirror-trailing-node@3.0.0:
+ resolution: {integrity: sha512-xiun5/3q0w5eRnGYfNlW1uU9W6x5MoFKWwq/0TIRgt09lv7Hcser2QYV8t4muXbEr+Fwo0geYn79Xs4GKywrRQ==}
+ peerDependencies:
+ prosemirror-model: ^1.22.1
+ prosemirror-state: ^1.4.2
+ prosemirror-view: ^1.33.8
+
+ prosemirror-transform@1.10.5:
+ resolution: {integrity: sha512-RPDQCxIDhIBb1o36xxwsaeAvivO8VLJcgBtzmOwQ64bMtsVFh5SSuJ6dWSxO1UsHTiTXPCgQm3PDJt7p6IOLbw==}
+
+ prosemirror-view@1.41.3:
+ resolution: {integrity: sha512-SqMiYMUQNNBP9kfPhLO8WXEk/fon47vc52FQsUiJzTBuyjKgEcoAwMyF04eQ4WZ2ArMn7+ReypYL60aKngbACQ==}
+
pump@3.0.3:
resolution: {integrity: sha512-todwxLMY7/heScKmntwQG8CXVkWUOdYxIvY2s0VWAAMh/nd8SoYiRaKjlr7+iCs984f2P8zvrfWcDDYVb73NfA==}
+ punycode.js@2.3.1:
+ resolution: {integrity: sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA==}
+ engines: {node: '>=6'}
+
punycode@2.3.1:
resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==}
engines: {node: '>=6'}
@@ -4834,6 +5265,11 @@ packages:
peerDependencies:
react: ^16.8.0 || ^17 || ^18 || ^19
+ react-icons@5.5.0:
+ resolution: {integrity: sha512-MEFcXdkP3dLo8uumGI5xN3lDFNsRtrjbOEKDLD7yv76v4wpnEq2Lt2qeHaQOr34I/wPN3s3+N08WkQ+CW37Xiw==}
+ peerDependencies:
+ react: '*'
+
react-intersection-observer@9.5.1:
resolution: {integrity: sha512-YwcNF/4WsMAG1rLVDQHSbpdEW9vDaIl4QW88d+vqeXNUewFV4AJDQB14oHpAJ3rRCnKRmwu3nqfwwYe6wioNIg==}
peerDependencies:
@@ -4881,6 +5317,12 @@ packages:
react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0
react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0
+ react-number-format@5.4.4:
+ resolution: {integrity: sha512-wOmoNZoOpvMminhifQYiYSTCLUDOiUbBunrMrMjA+dV52sY+vck1S4UhR6PkgnoCquvvMSeJjErXZ4qSaWCliA==}
+ peerDependencies:
+ react: ^0.14 || ^15.0.0 || ^16.0.0 || ^17.0.0 || ^18.0.0 || ^19.0.0
+ react-dom: ^0.14 || ^15.0.0 || ^16.0.0 || ^17.0.0 || ^18.0.0 || ^19.0.0
+
react-pdf@9.2.1:
resolution: {integrity: sha512-AJt0lAIkItWEZRA5d/mO+Om4nPCuTiQ0saA+qItO967DTjmGjnhmF+Bi2tL286mOTfBlF5CyLzJ35KTMaDoH+A==}
peerDependencies:
@@ -4958,6 +5400,12 @@ packages:
peerDependencies:
react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0
+ react-textarea-autosize@8.5.9:
+ resolution: {integrity: sha512-U1DGlIQN5AwgjTyOEnI1oCcMuEr1pv1qOtklB2l4nyMGbHzWrI0eFsYK0zos2YWqAolJyG0IWJaqWmWj5ETh0A==}
+ engines: {node: '>=10'}
+ peerDependencies:
+ react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0
+
react-window@1.8.9:
resolution: {integrity: sha512-+Eqx/fj1Aa5WnhRfj9dJg4VYATGwIUP2ItwItiJ6zboKWA6EX3lYDAXfGF2hyNqplEprhbtjbipiADEcwQ823Q==}
engines: {node: '>8.0.0'}
@@ -5017,18 +5465,33 @@ packages:
resolution: {integrity: sha512-dYqgNSZbDwkaJ2ceRd9ojCGjBq+mOm9LmtXnAnEGyHhN/5R7iDW2TRw3h+o/jCFxus3P2LfWIIiwowAjANm7IA==}
engines: {node: '>= 0.4'}
+ rehype-format@5.0.1:
+ resolution: {integrity: sha512-zvmVru9uB0josBVpr946OR8ui7nJEdzZobwLOOqHb/OOD88W0Vk2SqLwoVOj0fM6IPCCO6TaV9CvQvJMWwukFQ==}
+
rehype-katex@7.0.1:
resolution: {integrity: sha512-OiM2wrZ/wuhKkigASodFoo8wimG3H12LWQaH8qSPVJn9apWKFSH3YOCtbKpBorTVw/eI7cuT21XBbvwEswbIOA==}
+ rehype-minify-whitespace@6.0.2:
+ resolution: {integrity: sha512-Zk0pyQ06A3Lyxhe9vGtOtzz3Z0+qZ5+7icZ/PL/2x1SHPbKao5oB/g/rlc6BCTajqBb33JcOe71Ye1oFsuYbnw==}
+
+ rehype-parse@9.0.1:
+ resolution: {integrity: sha512-ksCzCD0Fgfh7trPDxr2rSylbwq9iYDkSn8TCDmEJ49ljEUBxDVCzCHv7QNzZOfODanX4+bWQ4WZqLCRWYLfhag==}
+
rehype-raw@7.0.0:
resolution: {integrity: sha512-/aE8hCfKlQeA8LmyeyQvQF3eBiLRGNlfBJEvWH7ivp9sBqs7TNqBL5X3v157rM4IFETqDnIOO+z5M/biZbo9Ww==}
rehype-recma@1.0.0:
resolution: {integrity: sha512-lqA4rGUf1JmacCNWWZx0Wv1dHqMwxzsDWYMTowuplHF3xH0N/MmrZ/G3BDZnzAkRmxDadujCjaKM2hqYdCBOGw==}
+ rehype-remark@10.0.1:
+ resolution: {integrity: sha512-EmDndlb5NVwXGfUa4c9GPK+lXeItTilLhE6ADSaQuHr4JUlKw9MidzGzx4HpqZrNCt6vnHmEifXQiiA+CEnjYQ==}
+
rehype-sanitize@6.0.0:
resolution: {integrity: sha512-CsnhKNsyI8Tub6L4sm5ZFsme4puGfc6pYylvXo1AeqaGbjOYyzNv3qZPwvs0oMJ39eryyeOdmxwUIo94IpEhqg==}
+ rehype-stringify@10.0.1:
+ resolution: {integrity: sha512-k9ecfXHmIPuFVI61B9DeLPN0qFHfawM6RsuX48hoqlaKSF61RskNjSm1lI8PhBEM0MRdLxVVm4WmTqJQccH9mA==}
+
remark-code-import@1.2.0:
resolution: {integrity: sha512-fgwLruqlZbVOIhCJFjY+JDwPZhA4/eK3InJzN8Ox8UDdtudpG212JwtRj6la+lAzJU7JmSEyewZSukVZdknt3Q==}
engines: {node: '>= 12'}
@@ -5083,9 +5546,18 @@ packages:
resolution: {integrity: sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==}
engines: {iojs: '>=1.0.0', node: '>=0.10.0'}
+ rope-sequence@1.3.4:
+ resolution: {integrity: sha512-UT5EDe2cu2E/6O4igUr5PSFs23nvvukicWHx6GnOPlHAiiYbzNuCRQCuiUdHJQcqKalLKlrYJnjY0ySGsXNQXQ==}
+
rough-notation@0.5.1:
resolution: {integrity: sha512-ITHofTzm13cWFVfoGsh/4c/k2Mg8geKgBCwex71UZLnNuw403tCRjYPQ68jSAd37DMbZIePXPjDgY0XdZi9HPw==}
+ rrweb-cssom@0.7.1:
+ resolution: {integrity: sha512-TrEMa7JGdVm0UThDJSx7ddw5nVm3UJS9o9CCIZ72B1vSyEZoziDqBYP3XIoi/12lKrJR8rE3jeFHMok2F/Mnsg==}
+
+ rrweb-cssom@0.8.0:
+ resolution: {integrity: sha512-guoltQEx+9aMf2gDZ0s62EcV8lsXR+0w8915TC3ITdn2YueuNjdAYh/levpU9nFaoChh9RUS5ZdQMrKfVEN9tw==}
+
run-parallel@1.2.0:
resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==}
@@ -5108,6 +5580,13 @@ packages:
resolution: {integrity: sha512-x/+Cz4YrimQxQccJf5mKEbIa1NzeCRNI5Ecl/ekmlYaampdNLPalVyIcCZNNH3MvmqBugV5TMYZXv0ljslUlaw==}
engines: {node: '>= 0.4'}
+ safer-buffer@2.1.2:
+ resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==}
+
+ saxes@6.0.0:
+ resolution: {integrity: sha512-xAg7SOnEhrm5zI3puOOKyy1OMcMlIJZYNJY7xLBwSze0UjhPLnWfj2GF2EpT0jmzaJKIWKHLsaSSajf35bcYnA==}
+ engines: {node: '>=v12.22.7'}
+
scheduler@0.26.0:
resolution: {integrity: sha512-NlHwttCI/l5gCPR3D1nNXtWABUmBwvZpEQiD4IXSbIDq8BzLIK/7Ir5gTFSGZDUu37K5cMNp0hFtzO38sC7gWA==}
@@ -5306,6 +5785,9 @@ packages:
peerDependencies:
react: ^16.11.0 || ^17.0.0 || ^18.0.0 || ^19.0.0
+ symbol-tree@3.2.4:
+ resolution: {integrity: sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==}
+
tabbable@6.2.0:
resolution: {integrity: sha512-Cat63mxsVJlzYvN51JmVXIgNoUokrIaT2zLclCXjRd8boZ0004U4KCs/sToJ75C6sdlByWxpYnb5Boif1VSFew==}
@@ -5362,6 +5844,13 @@ packages:
resolution: {integrity: sha512-tX5e7OM1HnYr2+a2C/4V0htOcSQcoSTH9KgJnVvNm5zm/cyEWKJ7j7YutsH9CxMdtOkkLFy2AHrMci9IM8IPZQ==}
engines: {node: '>=12.0.0'}
+ tldts-core@6.1.86:
+ resolution: {integrity: sha512-Je6p7pkk+KMzMv2XXKmAE3McmolOQFdxkKw0R8EYNr7sELW46JqnNeTX8ybPiQgvg1ymCoF8LXs5fzFaZvJPTA==}
+
+ tldts@6.1.86:
+ resolution: {integrity: sha512-WMi/OQ2axVTf/ykqCQgXiIct+mSQDFdH2fkwhPwgEwvJ1kSzZRiinb0zF2Xb8u4+OqPChmyI6MEu4EezNJz+FQ==}
+ hasBin: true
+
to-gatsby-remark-plugin@0.1.0:
resolution: {integrity: sha512-blmhJ/gIrytWnWLgPSRCkhCPeki6UBK2daa3k9mGahN7GjwHu8KrS7F70MvwlsG7IE794JLgwAdCbi4hU4faFQ==}
@@ -5372,12 +5861,23 @@ packages:
to-vfile@6.1.0:
resolution: {integrity: sha512-BxX8EkCxOAZe+D/ToHdDsJcVI4HqQfmw0tCkp31zf3dNP/XWIAjU4CmeuSwsSoOzOTqHPOL0KUzyZqJplkD0Qw==}
+ tough-cookie@5.1.2:
+ resolution: {integrity: sha512-FVDYdxtnj0G6Qm/DhNPSb8Ju59ULcup3tuJxkFb5K8Bv2pUXILbf0xZWU8PX8Ov19OXljbUyveOFwRMwkXzO+A==}
+ engines: {node: '>=16'}
+
tr46@0.0.3:
resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==}
+ tr46@5.1.1:
+ resolution: {integrity: sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw==}
+ engines: {node: '>=18'}
+
trim-lines@3.0.1:
resolution: {integrity: sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg==}
+ trim-trailing-lines@2.1.0:
+ resolution: {integrity: sha512-5UR5Biq4VlVOtzqkm2AZlgvSlDJtME46uV0br0gENbwN4l5+mMKT4b9gJKqWtuL2zAIqajGJGuvbCbcAJUZqBg==}
+
trough@2.2.0:
resolution: {integrity: sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw==}
@@ -5408,6 +5908,10 @@ packages:
resolution: {integrity: sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==}
engines: {node: '>= 0.8.0'}
+ type-fest@4.41.0:
+ resolution: {integrity: sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA==}
+ engines: {node: '>=16'}
+
type@2.7.3:
resolution: {integrity: sha512-8j+1QmAbPvLZow5Qpi6NCaN8FB60p/6x8/vfNqOk/hC+HuvFZhL4+WfekuhQLiqFZXOgQdrs3B+XxEmCc6b3FQ==}
@@ -5436,6 +5940,9 @@ packages:
resolution: {integrity: sha512-z6PJ8Lml+v3ichVojCiB8toQJBuwR42ySM4ezjXIqXK3M0HczmKQ3LF4rhU55PfD99KEEXQG6yb7iOMyvYuHew==}
hasBin: true
+ uc.micro@2.1.0:
+ resolution: {integrity: sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==}
+
unbox-primitive@1.1.0:
resolution: {integrity: sha512-nWJ91DjeOkej/TA8pXQ3myruKpKEYgqvpw9lz4OPHj/NWFNluYrjbz9j01CJ8yKQd2g4jFoOkINCTW2I5LEEyw==}
engines: {node: '>= 0.4'}
@@ -5565,6 +6072,10 @@ packages:
util-deprecate@1.0.2:
resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==}
+ uuid@8.3.2:
+ resolution: {integrity: sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==}
+ hasBin: true
+
uvu@0.5.6:
resolution: {integrity: sha512-+g8ENReyr8YsOc6fv/NVJs2vFdHBnBNdfE49rshrTzDWOlUx4Gq7KOS2GD8eqhy2j+Ejq29+SbKH8yjkAqXqoA==}
engines: {node: '>=8'}
@@ -5600,6 +6111,10 @@ packages:
w3c-keyname@2.2.8:
resolution: {integrity: sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ==}
+ w3c-xmlserializer@5.0.0:
+ resolution: {integrity: sha512-o8qghlI8NZHU1lLPrpi2+Uq7abh4GGPpYANlalzWxyWteJOCsr/P+oPBA49TOLu5FTZO4d3F9MnWJfiMo4BkmA==}
+ engines: {node: '>=18'}
+
warning@4.0.3:
resolution: {integrity: sha512-rpJyN222KWIvHJ/F53XSZv0Zl/accqHR8et1kpaMTD/fLCRxtV8iX8czMzY7sVZupTI3zcUTg8eycS2kNF9l6w==}
@@ -5609,6 +6124,22 @@ packages:
webidl-conversions@3.0.1:
resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==}
+ webidl-conversions@7.0.0:
+ resolution: {integrity: sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==}
+ engines: {node: '>=12'}
+
+ whatwg-encoding@3.1.1:
+ resolution: {integrity: sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ==}
+ engines: {node: '>=18'}
+
+ whatwg-mimetype@4.0.0:
+ resolution: {integrity: sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg==}
+ engines: {node: '>=18'}
+
+ whatwg-url@14.2.0:
+ resolution: {integrity: sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw==}
+ engines: {node: '>=18'}
+
whatwg-url@5.0.0:
resolution: {integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==}
@@ -5640,10 +6171,45 @@ packages:
wrappy@1.0.2:
resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==}
+ ws@8.18.3:
+ resolution: {integrity: sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg==}
+ engines: {node: '>=10.0.0'}
+ peerDependencies:
+ bufferutil: ^4.0.1
+ utf-8-validate: '>=5.0.2'
+ peerDependenciesMeta:
+ bufferutil:
+ optional: true
+ utf-8-validate:
+ optional: true
+
+ xml-name-validator@5.0.0:
+ resolution: {integrity: sha512-EvGK8EJ3DhaHfbRlETOWAS5pO9MZITeauHKJyb8wyajUfQUenkIg2MvLDTZ4T/TgIcm3HU0TFBgWWboAZ30UHg==}
+ engines: {node: '>=18'}
+
+ xmlchars@2.2.0:
+ resolution: {integrity: sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==}
+
xtend@4.0.2:
resolution: {integrity: sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==}
engines: {node: '>=0.4'}
+ y-prosemirror@1.3.7:
+ resolution: {integrity: sha512-NpM99WSdD4Fx4if5xOMDpPtU3oAmTSjlzh5U4353ABbRHl1HtAFUx6HlebLZfyFxXN9jzKMDkVbcRjqOZVkYQg==}
+ engines: {node: '>=16.0.0', npm: '>=8.0.0'}
+ peerDependencies:
+ prosemirror-model: ^1.7.1
+ prosemirror-state: ^1.2.3
+ prosemirror-view: ^1.9.10
+ y-protocols: ^1.0.1
+ yjs: ^13.5.38
+
+ y-protocols@1.0.6:
+ resolution: {integrity: sha512-vHRF2L6iT3rwj1jub/K5tYcTT/mEYDUppgNPXwp8fmLpui9f7Yeq3OEtTLVF012j39QnV+KEQpNqoN7CWU7Y9Q==}
+ engines: {node: '>=16.0.0', npm: '>=8.0.0'}
+ peerDependencies:
+ yjs: ^13.0.0
+
yallist@5.0.0:
resolution: {integrity: sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==}
engines: {node: '>=18'}
@@ -5707,6 +6273,14 @@ snapshots:
'@jridgewell/gen-mapping': 0.3.8
'@jridgewell/trace-mapping': 0.3.25
+ '@asamuzakjp/css-color@3.2.0':
+ dependencies:
+ '@csstools/css-calc': 2.1.4(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4)
+ '@csstools/css-color-parser': 3.1.0(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4)
+ '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4)
+ '@csstools/css-tokenizer': 3.0.4
+ lru-cache: 10.4.3
+
'@babel/runtime@7.26.9':
dependencies:
regenerator-runtime: 0.14.1
@@ -5746,6 +6320,136 @@ snapshots:
'@biomejs/cli-win32-x64@2.1.2':
optional: true
+ '@blocknote/core@0.42.3(@tiptap/extensions@3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))(@tiptap/pm@3.11.0))(@types/hast@3.0.4)(highlight.js@11.11.1)':
+ dependencies:
+ '@emoji-mart/data': 1.2.1
+ '@shikijs/types': 3.13.0
+ '@tiptap/core': 3.11.0(@tiptap/pm@3.11.0)
+ '@tiptap/extension-bold': 3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))
+ '@tiptap/extension-code': 3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))
+ '@tiptap/extension-gapcursor': 3.11.0(@tiptap/extensions@3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))(@tiptap/pm@3.11.0))
+ '@tiptap/extension-history': 3.11.0(@tiptap/extensions@3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))(@tiptap/pm@3.11.0))
+ '@tiptap/extension-horizontal-rule': 3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))(@tiptap/pm@3.11.0)
+ '@tiptap/extension-italic': 3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))
+ '@tiptap/extension-link': 3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))(@tiptap/pm@3.11.0)
+ '@tiptap/extension-paragraph': 3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))
+ '@tiptap/extension-strike': 3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))
+ '@tiptap/extension-text': 3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))
+ '@tiptap/extension-underline': 3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))
+ '@tiptap/pm': 3.11.0
+ emoji-mart: 5.6.0
+ fast-deep-equal: 3.1.3
+ hast-util-from-dom: 5.0.1
+ prosemirror-dropcursor: 1.8.2
+ prosemirror-highlight: 0.13.0(@shikijs/types@3.13.0)(@types/hast@3.0.4)(highlight.js@11.11.1)(prosemirror-model@1.25.4)(prosemirror-state@1.4.4)(prosemirror-transform@1.10.5)(prosemirror-view@1.41.3)
+ prosemirror-model: 1.25.4
+ prosemirror-state: 1.4.4
+ prosemirror-tables: 1.8.1
+ prosemirror-transform: 1.10.5
+ prosemirror-view: 1.41.3
+ rehype-format: 5.0.1
+ rehype-parse: 9.0.1
+ rehype-remark: 10.0.1
+ rehype-stringify: 10.0.1
+ remark-gfm: 4.0.1
+ remark-parse: 11.0.0
+ remark-rehype: 11.1.2
+ remark-stringify: 11.0.0
+ unified: 11.0.5
+ unist-util-visit: 5.0.0
+ uuid: 8.3.2
+ y-prosemirror: 1.3.7(prosemirror-model@1.25.4)(prosemirror-state@1.4.4)(prosemirror-view@1.41.3)(y-protocols@1.0.6(yjs@13.6.27))(yjs@13.6.27)
+ y-protocols: 1.0.6(yjs@13.6.27)
+ yjs: 13.6.27
+ transitivePeerDependencies:
+ - '@tiptap/extensions'
+ - '@types/hast'
+ - highlight.js
+ - lowlight
+ - refractor
+ - sugar-high
+ - supports-color
+
+ '@blocknote/mantine@0.42.3(@floating-ui/dom@1.7.4)(@mantine/core@8.3.9(@mantine/hooks@8.3.9(react@19.1.0))(@types/react@19.1.8)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(@mantine/hooks@8.3.9(react@19.1.0))(@mantine/utils@6.0.22(react@19.1.0))(@tiptap/extensions@3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))(@tiptap/pm@3.11.0))(@types/hast@3.0.4)(@types/react-dom@19.1.6(@types/react@19.1.8))(@types/react@19.1.8)(highlight.js@11.11.1)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)':
+ dependencies:
+ '@blocknote/core': 0.42.3(@tiptap/extensions@3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))(@tiptap/pm@3.11.0))(@types/hast@3.0.4)(highlight.js@11.11.1)
+ '@blocknote/react': 0.42.3(@floating-ui/dom@1.7.4)(@tiptap/extensions@3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))(@tiptap/pm@3.11.0))(@types/hast@3.0.4)(@types/react-dom@19.1.6(@types/react@19.1.8))(@types/react@19.1.8)(highlight.js@11.11.1)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
+ '@mantine/core': 8.3.9(@mantine/hooks@8.3.9(react@19.1.0))(@types/react@19.1.8)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
+ '@mantine/hooks': 8.3.9(react@19.1.0)
+ '@mantine/utils': 6.0.22(react@19.1.0)
+ react: 19.1.0
+ react-dom: 19.1.0(react@19.1.0)
+ react-icons: 5.5.0(react@19.1.0)
+ transitivePeerDependencies:
+ - '@floating-ui/dom'
+ - '@hocuspocus/provider'
+ - '@tiptap/extensions'
+ - '@types/hast'
+ - '@types/react'
+ - '@types/react-dom'
+ - highlight.js
+ - lowlight
+ - refractor
+ - sugar-high
+ - supports-color
+
+ '@blocknote/react@0.42.3(@floating-ui/dom@1.7.4)(@tiptap/extensions@3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))(@tiptap/pm@3.11.0))(@types/hast@3.0.4)(@types/react-dom@19.1.6(@types/react@19.1.8))(@types/react@19.1.8)(highlight.js@11.11.1)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)':
+ dependencies:
+ '@blocknote/core': 0.42.3(@tiptap/extensions@3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))(@tiptap/pm@3.11.0))(@types/hast@3.0.4)(highlight.js@11.11.1)
+ '@emoji-mart/data': 1.2.1
+ '@floating-ui/react': 0.27.16(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
+ '@tiptap/core': 3.11.0(@tiptap/pm@3.11.0)
+ '@tiptap/pm': 3.11.0
+ '@tiptap/react': 3.11.0(@floating-ui/dom@1.7.4)(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))(@tiptap/pm@3.11.0)(@types/react-dom@19.1.6(@types/react@19.1.8))(@types/react@19.1.8)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
+ emoji-mart: 5.6.0
+ lodash.merge: 4.6.2
+ react: 19.1.0
+ react-dom: 19.1.0(react@19.1.0)
+ react-icons: 5.5.0(react@19.1.0)
+ transitivePeerDependencies:
+ - '@floating-ui/dom'
+ - '@hocuspocus/provider'
+ - '@tiptap/extensions'
+ - '@types/hast'
+ - '@types/react'
+ - '@types/react-dom'
+ - highlight.js
+ - lowlight
+ - refractor
+ - sugar-high
+ - supports-color
+
+ '@blocknote/server-util@0.42.3(@floating-ui/dom@1.7.4)(@tiptap/extensions@3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))(@tiptap/pm@3.11.0))(@types/hast@3.0.4)(@types/react-dom@19.1.6(@types/react@19.1.8))(@types/react@19.1.8)(highlight.js@11.11.1)(prosemirror-model@1.25.4)(prosemirror-state@1.4.4)(prosemirror-view@1.41.3)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)':
+ dependencies:
+ '@blocknote/core': 0.42.3(@tiptap/extensions@3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))(@tiptap/pm@3.11.0))(@types/hast@3.0.4)(highlight.js@11.11.1)
+ '@blocknote/react': 0.42.3(@floating-ui/dom@1.7.4)(@tiptap/extensions@3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))(@tiptap/pm@3.11.0))(@types/hast@3.0.4)(@types/react-dom@19.1.6(@types/react@19.1.8))(@types/react@19.1.8)(highlight.js@11.11.1)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
+ '@tiptap/core': 3.11.0(@tiptap/pm@3.11.0)
+ '@tiptap/pm': 3.11.0
+ jsdom: 25.0.1
+ react: 19.1.0
+ react-dom: 19.1.0(react@19.1.0)
+ y-prosemirror: 1.3.7(prosemirror-model@1.25.4)(prosemirror-state@1.4.4)(prosemirror-view@1.41.3)(y-protocols@1.0.6(yjs@13.6.27))(yjs@13.6.27)
+ y-protocols: 1.0.6(yjs@13.6.27)
+ yjs: 13.6.27
+ transitivePeerDependencies:
+ - '@floating-ui/dom'
+ - '@hocuspocus/provider'
+ - '@tiptap/extensions'
+ - '@types/hast'
+ - '@types/react'
+ - '@types/react-dom'
+ - bufferutil
+ - canvas
+ - highlight.js
+ - lowlight
+ - prosemirror-model
+ - prosemirror-state
+ - prosemirror-view
+ - refractor
+ - sugar-high
+ - supports-color
+ - utf-8-validate
+
'@codemirror/autocomplete@6.18.6':
dependencies:
'@codemirror/language': 6.11.2
@@ -6038,6 +6742,26 @@ snapshots:
react-dom: 19.1.0(react@19.1.0)
react-is: 17.0.2
+ '@csstools/color-helpers@5.1.0': {}
+
+ '@csstools/css-calc@2.1.4(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4)':
+ dependencies:
+ '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4)
+ '@csstools/css-tokenizer': 3.0.4
+
+ '@csstools/css-color-parser@3.1.0(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4)':
+ dependencies:
+ '@csstools/color-helpers': 5.1.0
+ '@csstools/css-calc': 2.1.4(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4)
+ '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4)
+ '@csstools/css-tokenizer': 3.0.4
+
+ '@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4)':
+ dependencies:
+ '@csstools/css-tokenizer': 3.0.4
+
+ '@csstools/css-tokenizer@3.0.4': {}
+
'@date-fns/tz@1.2.0': {}
'@drizzle-team/brocli@0.10.2': {}
@@ -6047,6 +6771,8 @@ snapshots:
tslib: 2.8.1
optional: true
+ '@emoji-mart/data@1.2.1': {}
+
'@esbuild-kit/core-utils@3.3.2':
dependencies:
esbuild: 0.18.20
@@ -6253,6 +6979,10 @@ snapshots:
dependencies:
'@floating-ui/utils': 0.2.10
+ '@floating-ui/core@1.7.3':
+ dependencies:
+ '@floating-ui/utils': 0.2.10
+
'@floating-ui/dom@1.6.13':
dependencies:
'@floating-ui/core': 1.6.9
@@ -6263,6 +6993,11 @@ snapshots:
'@floating-ui/core': 1.7.2
'@floating-ui/utils': 0.2.10
+ '@floating-ui/dom@1.7.4':
+ dependencies:
+ '@floating-ui/core': 1.7.3
+ '@floating-ui/utils': 0.2.10
+
'@floating-ui/react-dom@2.1.2(react-dom@19.1.0(react@19.1.0))(react@19.1.0)':
dependencies:
'@floating-ui/dom': 1.6.13
@@ -6275,6 +7010,12 @@ snapshots:
react: 19.1.0
react-dom: 19.1.0(react@19.1.0)
+ '@floating-ui/react-dom@2.1.6(react-dom@19.1.0(react@19.1.0))(react@19.1.0)':
+ dependencies:
+ '@floating-ui/dom': 1.7.4
+ react: 19.1.0
+ react-dom: 19.1.0(react@19.1.0)
+
'@floating-ui/react@0.27.13(react-dom@19.1.0(react@19.1.0))(react@19.1.0)':
dependencies:
'@floating-ui/react-dom': 2.1.4(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
@@ -6283,6 +7024,14 @@ snapshots:
react-dom: 19.1.0(react@19.1.0)
tabbable: 6.2.0
+ '@floating-ui/react@0.27.16(react-dom@19.1.0(react@19.1.0))(react@19.1.0)':
+ dependencies:
+ '@floating-ui/react-dom': 2.1.6(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
+ '@floating-ui/utils': 0.2.10
+ react: 19.1.0
+ react-dom: 19.1.0(react@19.1.0)
+ tabbable: 6.2.0
+
'@floating-ui/utils@0.2.10': {}
'@floating-ui/utils@0.2.9': {}
@@ -6766,9 +7515,31 @@ snapshots:
optionalDependencies:
'@types/react': 19.1.8
+ '@mantine/core@8.3.9(@mantine/hooks@8.3.9(react@19.1.0))(@types/react@19.1.8)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)':
+ dependencies:
+ '@floating-ui/react': 0.27.16(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
+ '@mantine/hooks': 8.3.9(react@19.1.0)
+ clsx: 2.1.1
+ react: 19.1.0
+ react-dom: 19.1.0(react@19.1.0)
+ react-number-format: 5.4.4(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
+ react-remove-scroll: 2.7.1(@types/react@19.1.8)(react@19.1.0)
+ react-textarea-autosize: 8.5.9(@types/react@19.1.8)(react@19.1.0)
+ type-fest: 4.41.0
+ transitivePeerDependencies:
+ - '@types/react'
+
+ '@mantine/hooks@8.3.9(react@19.1.0)':
+ dependencies:
+ react: 19.1.0
+
+ '@mantine/utils@6.0.22(react@19.1.0)':
+ dependencies:
+ react: 19.1.0
+
'@marijn/find-cluster-break@1.0.2': {}
- '@mdx-js/mdx@3.1.0(acorn@8.14.0)':
+ '@mdx-js/mdx@3.1.0(acorn@8.15.0)':
dependencies:
'@types/estree': 1.0.8
'@types/estree-jsx': 1.0.5
@@ -6782,7 +7553,7 @@ snapshots:
hast-util-to-jsx-runtime: 2.3.6
markdown-extensions: 2.0.0
recma-build-jsx: 1.0.0
- recma-jsx: 1.0.0(acorn@8.14.0)
+ recma-jsx: 1.0.0(acorn@8.15.0)
recma-stringify: 1.0.0
rehype-recma: 1.0.0
remark-mdx: 3.1.0
@@ -7915,6 +8686,8 @@ snapshots:
dependencies:
react: 19.1.0
+ '@remirror/core-constants@3.0.0': {}
+
'@rtsao/scc@1.1.0': {}
'@rushstack/eslint-patch@1.10.5': {}
@@ -7959,6 +8732,11 @@ snapshots:
'@shikijs/core': 3.8.1
'@shikijs/types': 3.8.1
+ '@shikijs/types@3.13.0':
+ dependencies:
+ '@shikijs/vscode-textmate': 10.0.2
+ '@types/hast': 3.0.4
+
'@shikijs/types@3.8.1':
dependencies:
'@shikijs/vscode-textmate': 10.0.2
@@ -8086,6 +8864,114 @@ snapshots:
'@tanstack/table-core@8.21.3': {}
+ '@tiptap/core@3.11.0(@tiptap/pm@3.11.0)':
+ dependencies:
+ '@tiptap/pm': 3.11.0
+
+ '@tiptap/extension-bold@3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))':
+ dependencies:
+ '@tiptap/core': 3.11.0(@tiptap/pm@3.11.0)
+
+ '@tiptap/extension-bubble-menu@3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))(@tiptap/pm@3.11.0)':
+ dependencies:
+ '@floating-ui/dom': 1.7.4
+ '@tiptap/core': 3.11.0(@tiptap/pm@3.11.0)
+ '@tiptap/pm': 3.11.0
+ optional: true
+
+ '@tiptap/extension-code@3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))':
+ dependencies:
+ '@tiptap/core': 3.11.0(@tiptap/pm@3.11.0)
+
+ '@tiptap/extension-floating-menu@3.11.0(@floating-ui/dom@1.7.4)(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))(@tiptap/pm@3.11.0)':
+ dependencies:
+ '@floating-ui/dom': 1.7.4
+ '@tiptap/core': 3.11.0(@tiptap/pm@3.11.0)
+ '@tiptap/pm': 3.11.0
+ optional: true
+
+ '@tiptap/extension-gapcursor@3.11.0(@tiptap/extensions@3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))(@tiptap/pm@3.11.0))':
+ dependencies:
+ '@tiptap/extensions': 3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))(@tiptap/pm@3.11.0)
+
+ '@tiptap/extension-history@3.11.0(@tiptap/extensions@3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))(@tiptap/pm@3.11.0))':
+ dependencies:
+ '@tiptap/extensions': 3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))(@tiptap/pm@3.11.0)
+
+ '@tiptap/extension-horizontal-rule@3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))(@tiptap/pm@3.11.0)':
+ dependencies:
+ '@tiptap/core': 3.11.0(@tiptap/pm@3.11.0)
+ '@tiptap/pm': 3.11.0
+
+ '@tiptap/extension-italic@3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))':
+ dependencies:
+ '@tiptap/core': 3.11.0(@tiptap/pm@3.11.0)
+
+ '@tiptap/extension-link@3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))(@tiptap/pm@3.11.0)':
+ dependencies:
+ '@tiptap/core': 3.11.0(@tiptap/pm@3.11.0)
+ '@tiptap/pm': 3.11.0
+ linkifyjs: 4.3.2
+
+ '@tiptap/extension-paragraph@3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))':
+ dependencies:
+ '@tiptap/core': 3.11.0(@tiptap/pm@3.11.0)
+
+ '@tiptap/extension-strike@3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))':
+ dependencies:
+ '@tiptap/core': 3.11.0(@tiptap/pm@3.11.0)
+
+ '@tiptap/extension-text@3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))':
+ dependencies:
+ '@tiptap/core': 3.11.0(@tiptap/pm@3.11.0)
+
+ '@tiptap/extension-underline@3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))':
+ dependencies:
+ '@tiptap/core': 3.11.0(@tiptap/pm@3.11.0)
+
+ '@tiptap/extensions@3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))(@tiptap/pm@3.11.0)':
+ dependencies:
+ '@tiptap/core': 3.11.0(@tiptap/pm@3.11.0)
+ '@tiptap/pm': 3.11.0
+
+ '@tiptap/pm@3.11.0':
+ dependencies:
+ prosemirror-changeset: 2.3.1
+ prosemirror-collab: 1.3.1
+ prosemirror-commands: 1.7.1
+ prosemirror-dropcursor: 1.8.2
+ prosemirror-gapcursor: 1.4.0
+ prosemirror-history: 1.5.0
+ prosemirror-inputrules: 1.5.1
+ prosemirror-keymap: 1.2.3
+ prosemirror-markdown: 1.13.2
+ prosemirror-menu: 1.2.5
+ prosemirror-model: 1.25.4
+ prosemirror-schema-basic: 1.2.4
+ prosemirror-schema-list: 1.5.1
+ prosemirror-state: 1.4.4
+ prosemirror-tables: 1.8.1
+ prosemirror-trailing-node: 3.0.0(prosemirror-model@1.25.4)(prosemirror-state@1.4.4)(prosemirror-view@1.41.3)
+ prosemirror-transform: 1.10.5
+ prosemirror-view: 1.41.3
+
+ '@tiptap/react@3.11.0(@floating-ui/dom@1.7.4)(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))(@tiptap/pm@3.11.0)(@types/react-dom@19.1.6(@types/react@19.1.8))(@types/react@19.1.8)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)':
+ dependencies:
+ '@tiptap/core': 3.11.0(@tiptap/pm@3.11.0)
+ '@tiptap/pm': 3.11.0
+ '@types/react': 19.1.8
+ '@types/react-dom': 19.1.6(@types/react@19.1.8)
+ '@types/use-sync-external-store': 0.0.6
+ fast-deep-equal: 3.1.3
+ react: 19.1.0
+ react-dom: 19.1.0(react@19.1.0)
+ use-sync-external-store: 1.5.0(react@19.1.0)
+ optionalDependencies:
+ '@tiptap/extension-bubble-menu': 3.11.0(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))(@tiptap/pm@3.11.0)
+ '@tiptap/extension-floating-menu': 3.11.0(@floating-ui/dom@1.7.4)(@tiptap/core@3.11.0(@tiptap/pm@3.11.0))(@tiptap/pm@3.11.0)
+ transitivePeerDependencies:
+ - '@floating-ui/dom'
+
'@types/canvas-confetti@1.9.0': {}
'@types/debug@4.1.12':
@@ -8114,6 +9000,13 @@ snapshots:
'@types/katex@0.16.7': {}
+ '@types/linkify-it@5.0.0': {}
+
+ '@types/markdown-it@14.1.2':
+ dependencies:
+ '@types/linkify-it': 5.0.0
+ '@types/mdurl': 2.0.0
+
'@types/mdast@3.0.15':
dependencies:
'@types/unist': 2.0.11
@@ -8122,6 +9015,8 @@ snapshots:
dependencies:
'@types/unist': 3.0.3
+ '@types/mdurl@2.0.0': {}
+
'@types/mdx@2.0.13': {}
'@types/ms@2.1.0': {}
@@ -8154,6 +9049,8 @@ snapshots:
'@types/unist@3.0.3': {}
+ '@types/use-sync-external-store@0.0.6': {}
+
'@typescript-eslint/eslint-plugin@8.25.0(@typescript-eslint/parser@8.25.0(eslint@9.32.0(jiti@2.4.2))(typescript@5.8.3))(eslint@9.32.0(jiti@2.4.2))(typescript@5.8.3)':
dependencies:
'@eslint-community/regexpp': 4.12.1
@@ -8291,6 +9188,8 @@ snapshots:
acorn@8.15.0: {}
+ agent-base@7.1.4: {}
+
ai@4.3.19(react@19.1.0)(zod@3.25.76):
dependencies:
'@ai-sdk/provider': 1.1.3
@@ -8398,6 +9297,8 @@ snapshots:
async-function@1.0.0: {}
+ asynckit@0.4.0: {}
+
attr-accept@2.2.5: {}
available-typed-arrays@1.0.7:
@@ -8567,6 +9468,10 @@ snapshots:
color-name@1.1.4: {}
+ combined-stream@1.0.8:
+ dependencies:
+ delayed-stream: 1.0.0
+
comma-separated-tokens@1.0.8: {}
comma-separated-tokens@2.0.3: {}
@@ -8599,6 +9504,11 @@ snapshots:
cssesc@3.0.0: {}
+ cssstyle@4.6.0:
+ dependencies:
+ '@asamuzakjp/css-color': 3.2.0
+ rrweb-cssom: 0.8.0
+
csstype@3.1.3: {}
d@1.0.2:
@@ -8608,6 +9518,11 @@ snapshots:
damerau-levenshtein@1.0.8: {}
+ data-urls@5.0.0:
+ dependencies:
+ whatwg-mimetype: 4.0.0
+ whatwg-url: 14.2.0
+
data-view-buffer@1.0.2:
dependencies:
call-bound: 1.0.3
@@ -8666,6 +9581,8 @@ snapshots:
has-property-descriptors: 1.0.2
object-keys: 1.1.1
+ delayed-stream@1.0.0: {}
+
dequal@2.0.3: {}
detect-libc@2.0.3: {}
@@ -8740,6 +9657,8 @@ snapshots:
- '@types/react'
- '@types/react-dom'
+ emoji-mart@5.6.0: {}
+
emoji-regex@9.2.2: {}
end-of-stream@1.4.5:
@@ -9314,6 +10233,14 @@ snapshots:
dependencies:
is-callable: 1.2.7
+ form-data@4.0.5:
+ dependencies:
+ asynckit: 0.4.0
+ combined-stream: 1.0.8
+ es-set-tostringtag: 2.1.0
+ hasown: 2.0.2
+ mime-types: 2.1.35
+
format@0.2.2: {}
framer-motion@12.23.22(react-dom@19.1.0(react@19.1.0))(react@19.1.0):
@@ -9358,9 +10285,9 @@ snapshots:
transitivePeerDependencies:
- supports-color
- fumadocs-mdx@11.7.1(acorn@8.14.0)(fumadocs-core@15.6.6(@types/react@19.1.8)(next@15.5.6(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(next@15.5.6(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react@19.1.0):
+ fumadocs-mdx@11.7.1(acorn@8.15.0)(fumadocs-core@15.6.6(@types/react@19.1.8)(next@15.5.6(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(next@15.5.6(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react@19.1.0):
dependencies:
- '@mdx-js/mdx': 3.1.0(acorn@8.14.0)
+ '@mdx-js/mdx': 3.1.0(acorn@8.15.0)
'@standard-schema/spec': 1.0.0
chokidar: 4.0.3
esbuild: 0.25.8
@@ -9510,6 +10437,21 @@ snapshots:
dependencies:
function-bind: 1.1.2
+ hast-util-embedded@3.0.0:
+ dependencies:
+ '@types/hast': 3.0.4
+ hast-util-is-element: 3.0.0
+
+ hast-util-format@1.1.0:
+ dependencies:
+ '@types/hast': 3.0.4
+ hast-util-embedded: 3.0.0
+ hast-util-minify-whitespace: 1.0.1
+ hast-util-phrasing: 3.0.1
+ hast-util-whitespace: 3.0.0
+ html-whitespace-sensitive-tag-names: 3.0.1
+ unist-util-visit-parents: 6.0.1
+
hast-util-from-dom@5.0.1:
dependencies:
'@types/hast': 3.0.4
@@ -9543,16 +10485,40 @@ snapshots:
vfile-location: 5.0.3
web-namespaces: 2.0.1
+ hast-util-has-property@3.0.0:
+ dependencies:
+ '@types/hast': 3.0.4
+
+ hast-util-is-body-ok-link@3.0.1:
+ dependencies:
+ '@types/hast': 3.0.4
+
hast-util-is-element@3.0.0:
dependencies:
'@types/hast': 3.0.4
+ hast-util-minify-whitespace@1.0.1:
+ dependencies:
+ '@types/hast': 3.0.4
+ hast-util-embedded: 3.0.0
+ hast-util-is-element: 3.0.0
+ hast-util-whitespace: 3.0.0
+ unist-util-is: 6.0.0
+
hast-util-parse-selector@2.2.5: {}
hast-util-parse-selector@4.0.0:
dependencies:
'@types/hast': 3.0.4
+ hast-util-phrasing@3.0.1:
+ dependencies:
+ '@types/hast': 3.0.4
+ hast-util-embedded: 3.0.0
+ hast-util-has-property: 3.0.0
+ hast-util-is-body-ok-link: 3.0.1
+ hast-util-is-element: 3.0.0
+
hast-util-raw@9.1.0:
dependencies:
'@types/hast': 3.0.4
@@ -9630,6 +10596,23 @@ snapshots:
transitivePeerDependencies:
- supports-color
+ hast-util-to-mdast@10.1.2:
+ dependencies:
+ '@types/hast': 3.0.4
+ '@types/mdast': 4.0.4
+ '@ungap/structured-clone': 1.3.0
+ hast-util-phrasing: 3.0.1
+ hast-util-to-html: 9.0.5
+ hast-util-to-text: 4.0.2
+ hast-util-whitespace: 3.0.0
+ mdast-util-phrasing: 4.1.0
+ mdast-util-to-hast: 13.2.0
+ mdast-util-to-string: 4.0.0
+ rehype-minify-whitespace: 6.0.2
+ trim-trailing-lines: 2.1.0
+ unist-util-position: 5.0.0
+ unist-util-visit: 5.0.0
+
hast-util-to-parse5@8.0.0:
dependencies:
'@types/hast': 3.0.4
@@ -9679,10 +10662,34 @@ snapshots:
highlightjs-vue@1.0.0: {}
+ html-encoding-sniffer@4.0.0:
+ dependencies:
+ whatwg-encoding: 3.1.1
+
html-url-attributes@3.0.1: {}
html-void-elements@3.0.0: {}
+ html-whitespace-sensitive-tag-names@3.0.1: {}
+
+ http-proxy-agent@7.0.2:
+ dependencies:
+ agent-base: 7.1.4
+ debug: 4.4.0
+ transitivePeerDependencies:
+ - supports-color
+
+ https-proxy-agent@7.0.6:
+ dependencies:
+ agent-base: 7.1.4
+ debug: 4.4.0
+ transitivePeerDependencies:
+ - supports-color
+
+ iconv-lite@0.6.3:
+ dependencies:
+ safer-buffer: 2.1.2
+
ieee754@1.2.1: {}
ignore@5.3.2: {}
@@ -9817,6 +10824,8 @@ snapshots:
is-plain-obj@4.1.0: {}
+ is-potential-custom-element-name@1.0.1: {}
+
is-regex@1.2.1:
dependencies:
call-bound: 1.0.3
@@ -9892,6 +10901,34 @@ snapshots:
dependencies:
argparse: 2.0.1
+ jsdom@25.0.1:
+ dependencies:
+ cssstyle: 4.6.0
+ data-urls: 5.0.0
+ decimal.js: 10.6.0
+ form-data: 4.0.5
+ html-encoding-sniffer: 4.0.0
+ http-proxy-agent: 7.0.2
+ https-proxy-agent: 7.0.6
+ is-potential-custom-element-name: 1.0.1
+ nwsapi: 2.2.22
+ parse5: 7.2.1
+ rrweb-cssom: 0.7.1
+ saxes: 6.0.0
+ symbol-tree: 3.2.4
+ tough-cookie: 5.1.2
+ w3c-xmlserializer: 5.0.0
+ webidl-conversions: 7.0.0
+ whatwg-encoding: 3.1.1
+ whatwg-mimetype: 4.0.0
+ whatwg-url: 14.2.0
+ ws: 8.18.3
+ xml-name-validator: 5.0.0
+ transitivePeerDependencies:
+ - bufferutil
+ - supports-color
+ - utf-8-validate
+
json-buffer@3.0.1: {}
json-schema-traverse@0.4.1: {}
@@ -9989,6 +11026,12 @@ snapshots:
lightningcss-win32-arm64-msvc: 1.30.1
lightningcss-win32-x64-msvc: 1.30.1
+ linkify-it@5.0.0:
+ dependencies:
+ uc.micro: 2.1.0
+
+ linkifyjs@4.3.2: {}
+
locate-path@6.0.0:
dependencies:
p-locate: 5.0.0
@@ -10018,6 +11061,8 @@ snapshots:
fault: 1.0.4
highlight.js: 10.7.3
+ lru-cache@10.4.3: {}
+
lru-cache@11.1.0: {}
lucide-react@0.453.0(react@19.1.0):
@@ -10040,6 +11085,15 @@ snapshots:
markdown-extensions@2.0.0: {}
+ markdown-it@14.1.0:
+ dependencies:
+ argparse: 2.0.1
+ entities: 4.5.0
+ linkify-it: 5.0.0
+ mdurl: 2.0.0
+ punycode.js: 2.3.1
+ uc.micro: 2.1.0
+
markdown-table@3.0.4: {}
math-intrinsics@1.1.0: {}
@@ -10343,6 +11397,8 @@ snapshots:
dependencies:
'@types/mdast': 4.0.4
+ mdurl@2.0.0: {}
+
memoize-one@5.2.1: {}
merge-refs@1.3.0(@types/react@19.1.8):
@@ -10838,8 +11894,14 @@ snapshots:
braces: 3.0.3
picomatch: 2.3.1
+ mime-db@1.52.0: {}
+
mime-db@1.54.0: {}
+ mime-types@2.1.35:
+ dependencies:
+ mime-db: 1.52.0
+
mimic-response@3.1.0:
optional: true
@@ -10950,6 +12012,8 @@ snapshots:
dependencies:
esm-env: 1.2.2
+ nwsapi@2.2.22: {}
+
object-assign@4.1.1: {}
object-inspect@1.13.4: {}
@@ -11013,6 +12077,8 @@ snapshots:
type-check: 0.4.0
word-wrap: 1.2.5
+ orderedmap@2.1.1: {}
+
outvariant@1.4.0: {}
own-keys@1.0.1:
@@ -11191,12 +12257,127 @@ snapshots:
property-information@7.1.0: {}
+ prosemirror-changeset@2.3.1:
+ dependencies:
+ prosemirror-transform: 1.10.5
+
+ prosemirror-collab@1.3.1:
+ dependencies:
+ prosemirror-state: 1.4.4
+
+ prosemirror-commands@1.7.1:
+ dependencies:
+ prosemirror-model: 1.25.4
+ prosemirror-state: 1.4.4
+ prosemirror-transform: 1.10.5
+
+ prosemirror-dropcursor@1.8.2:
+ dependencies:
+ prosemirror-state: 1.4.4
+ prosemirror-transform: 1.10.5
+ prosemirror-view: 1.41.3
+
+ prosemirror-gapcursor@1.4.0:
+ dependencies:
+ prosemirror-keymap: 1.2.3
+ prosemirror-model: 1.25.4
+ prosemirror-state: 1.4.4
+ prosemirror-view: 1.41.3
+
+ prosemirror-highlight@0.13.0(@shikijs/types@3.13.0)(@types/hast@3.0.4)(highlight.js@11.11.1)(prosemirror-model@1.25.4)(prosemirror-state@1.4.4)(prosemirror-transform@1.10.5)(prosemirror-view@1.41.3):
+ optionalDependencies:
+ '@shikijs/types': 3.13.0
+ '@types/hast': 3.0.4
+ highlight.js: 11.11.1
+ prosemirror-model: 1.25.4
+ prosemirror-state: 1.4.4
+ prosemirror-transform: 1.10.5
+ prosemirror-view: 1.41.3
+
+ prosemirror-history@1.5.0:
+ dependencies:
+ prosemirror-state: 1.4.4
+ prosemirror-transform: 1.10.5
+ prosemirror-view: 1.41.3
+ rope-sequence: 1.3.4
+
+ prosemirror-inputrules@1.5.1:
+ dependencies:
+ prosemirror-state: 1.4.4
+ prosemirror-transform: 1.10.5
+
+ prosemirror-keymap@1.2.3:
+ dependencies:
+ prosemirror-state: 1.4.4
+ w3c-keyname: 2.2.8
+
+ prosemirror-markdown@1.13.2:
+ dependencies:
+ '@types/markdown-it': 14.1.2
+ markdown-it: 14.1.0
+ prosemirror-model: 1.25.4
+
+ prosemirror-menu@1.2.5:
+ dependencies:
+ crelt: 1.0.6
+ prosemirror-commands: 1.7.1
+ prosemirror-history: 1.5.0
+ prosemirror-state: 1.4.4
+
+ prosemirror-model@1.25.4:
+ dependencies:
+ orderedmap: 2.1.1
+
+ prosemirror-schema-basic@1.2.4:
+ dependencies:
+ prosemirror-model: 1.25.4
+
+ prosemirror-schema-list@1.5.1:
+ dependencies:
+ prosemirror-model: 1.25.4
+ prosemirror-state: 1.4.4
+ prosemirror-transform: 1.10.5
+
+ prosemirror-state@1.4.4:
+ dependencies:
+ prosemirror-model: 1.25.4
+ prosemirror-transform: 1.10.5
+ prosemirror-view: 1.41.3
+
+ prosemirror-tables@1.8.1:
+ dependencies:
+ prosemirror-keymap: 1.2.3
+ prosemirror-model: 1.25.4
+ prosemirror-state: 1.4.4
+ prosemirror-transform: 1.10.5
+ prosemirror-view: 1.41.3
+
+ prosemirror-trailing-node@3.0.0(prosemirror-model@1.25.4)(prosemirror-state@1.4.4)(prosemirror-view@1.41.3):
+ dependencies:
+ '@remirror/core-constants': 3.0.0
+ escape-string-regexp: 4.0.0
+ prosemirror-model: 1.25.4
+ prosemirror-state: 1.4.4
+ prosemirror-view: 1.41.3
+
+ prosemirror-transform@1.10.5:
+ dependencies:
+ prosemirror-model: 1.25.4
+
+ prosemirror-view@1.41.3:
+ dependencies:
+ prosemirror-model: 1.25.4
+ prosemirror-state: 1.4.4
+ prosemirror-transform: 1.10.5
+
pump@3.0.3:
dependencies:
end-of-stream: 1.4.5
once: 1.4.0
optional: true
+ punycode.js@2.3.1: {}
+
punycode@2.3.1: {}
pure-color@1.3.0: {}
@@ -11257,6 +12438,10 @@ snapshots:
dependencies:
react: 19.1.0
+ react-icons@5.5.0(react@19.1.0):
+ dependencies:
+ react: 19.1.0
+
react-intersection-observer@9.5.1(react@19.1.0):
dependencies:
react: 19.1.0
@@ -11330,6 +12515,11 @@ snapshots:
react: 19.1.0
react-dom: 19.1.0(react@19.1.0)
+ react-number-format@5.4.4(react-dom@19.1.0(react@19.1.0))(react@19.1.0):
+ dependencies:
+ react: 19.1.0
+ react-dom: 19.1.0(react@19.1.0)
+
react-pdf@9.2.1(@types/react@19.1.8)(react-dom@19.1.0(react@19.1.0))(react@19.1.0):
dependencies:
clsx: 2.1.1
@@ -11419,6 +12609,15 @@ snapshots:
transitivePeerDependencies:
- '@types/react'
+ react-textarea-autosize@8.5.9(@types/react@19.1.8)(react@19.1.0):
+ dependencies:
+ '@babel/runtime': 7.26.9
+ react: 19.1.0
+ use-composed-ref: 1.4.0(@types/react@19.1.8)(react@19.1.0)
+ use-latest: 1.3.0(@types/react@19.1.8)(react@19.1.0)
+ transitivePeerDependencies:
+ - '@types/react'
+
react-window@1.8.9(react-dom@19.1.0(react@19.1.0))(react@19.1.0):
dependencies:
'@babel/runtime': 7.26.9
@@ -11447,9 +12646,9 @@ snapshots:
estree-util-build-jsx: 3.0.1
vfile: 6.0.3
- recma-jsx@1.0.0(acorn@8.14.0):
+ recma-jsx@1.0.0(acorn@8.15.0):
dependencies:
- acorn-jsx: 5.3.2(acorn@8.14.0)
+ acorn-jsx: 5.3.2(acorn@8.15.0)
estree-util-to-js: 2.0.0
recma-parse: 1.0.0
recma-stringify: 1.0.0
@@ -11509,6 +12708,11 @@ snapshots:
gopd: 1.2.0
set-function-name: 2.0.2
+ rehype-format@5.0.1:
+ dependencies:
+ '@types/hast': 3.0.4
+ hast-util-format: 1.1.0
+
rehype-katex@7.0.1:
dependencies:
'@types/hast': 3.0.4
@@ -11519,6 +12723,17 @@ snapshots:
unist-util-visit-parents: 6.0.1
vfile: 6.0.3
+ rehype-minify-whitespace@6.0.2:
+ dependencies:
+ '@types/hast': 3.0.4
+ hast-util-minify-whitespace: 1.0.1
+
+ rehype-parse@9.0.1:
+ dependencies:
+ '@types/hast': 3.0.4
+ hast-util-from-html: 2.0.3
+ unified: 11.0.5
+
rehype-raw@7.0.0:
dependencies:
'@types/hast': 3.0.4
@@ -11533,11 +12748,25 @@ snapshots:
transitivePeerDependencies:
- supports-color
+ rehype-remark@10.0.1:
+ dependencies:
+ '@types/hast': 3.0.4
+ '@types/mdast': 4.0.4
+ hast-util-to-mdast: 10.1.2
+ unified: 11.0.5
+ vfile: 6.0.3
+
rehype-sanitize@6.0.0:
dependencies:
'@types/hast': 3.0.4
hast-util-sanitize: 5.0.2
+ rehype-stringify@10.0.1:
+ dependencies:
+ '@types/hast': 3.0.4
+ hast-util-to-html: 9.0.5
+ unified: 11.0.5
+
remark-code-import@1.2.0:
dependencies:
strip-indent: 4.0.0
@@ -11643,8 +12872,14 @@ snapshots:
reusify@1.1.0: {}
+ rope-sequence@1.3.4: {}
+
rough-notation@0.5.1: {}
+ rrweb-cssom@0.7.1: {}
+
+ rrweb-cssom@0.8.0: {}
+
run-parallel@1.2.0:
dependencies:
queue-microtask: 1.2.3
@@ -11675,6 +12910,12 @@ snapshots:
es-errors: 1.3.0
is-regex: 1.2.1
+ safer-buffer@2.1.2: {}
+
+ saxes@6.0.0:
+ dependencies:
+ xmlchars: 2.2.0
+
scheduler@0.26.0: {}
scroll-into-view-if-needed@3.1.0:
@@ -11936,6 +13177,8 @@ snapshots:
react: 19.1.0
use-sync-external-store: 1.4.0(react@19.1.0)
+ symbol-tree@3.2.4: {}
+
tabbable@6.2.0: {}
tailwind-merge@2.6.0: {}
@@ -11996,6 +13239,12 @@ snapshots:
fdir: 6.4.6(picomatch@4.0.3)
picomatch: 4.0.3
+ tldts-core@6.1.86: {}
+
+ tldts@6.1.86:
+ dependencies:
+ tldts-core: 6.1.86
+
to-gatsby-remark-plugin@0.1.0:
dependencies:
to-vfile: 6.1.0
@@ -12009,10 +13258,20 @@ snapshots:
is-buffer: 2.0.5
vfile: 4.2.1
+ tough-cookie@5.1.2:
+ dependencies:
+ tldts: 6.1.86
+
tr46@0.0.3: {}
+ tr46@5.1.1:
+ dependencies:
+ punycode: 2.3.1
+
trim-lines@3.0.1: {}
+ trim-trailing-lines@2.1.0: {}
+
trough@2.2.0: {}
ts-api-utils@2.0.1(typescript@5.8.3):
@@ -12046,6 +13305,8 @@ snapshots:
dependencies:
prelude-ls: 1.2.1
+ type-fest@4.41.0: {}
+
type@2.7.3: {}
typed-array-buffer@1.0.3:
@@ -12085,6 +13346,8 @@ snapshots:
ua-parser-js@1.0.40: {}
+ uc.micro@2.1.0: {}
+
unbox-primitive@1.1.0:
dependencies:
call-bound: 1.0.3
@@ -12238,6 +13501,8 @@ snapshots:
util-deprecate@1.0.2: {}
+ uuid@8.3.2: {}
+
uvu@0.5.6:
dependencies:
dequal: 2.0.3
@@ -12295,6 +13560,10 @@ snapshots:
w3c-keyname@2.2.8: {}
+ w3c-xmlserializer@5.0.0:
+ dependencies:
+ xml-name-validator: 5.0.0
+
warning@4.0.3:
dependencies:
loose-envify: 1.4.0
@@ -12303,6 +13572,19 @@ snapshots:
webidl-conversions@3.0.1: {}
+ webidl-conversions@7.0.0: {}
+
+ whatwg-encoding@3.1.1:
+ dependencies:
+ iconv-lite: 0.6.3
+
+ whatwg-mimetype@4.0.0: {}
+
+ whatwg-url@14.2.0:
+ dependencies:
+ tr46: 5.1.1
+ webidl-conversions: 7.0.0
+
whatwg-url@5.0.0:
dependencies:
tr46: 0.0.3
@@ -12357,8 +13639,28 @@ snapshots:
wrappy@1.0.2:
optional: true
+ ws@8.18.3: {}
+
+ xml-name-validator@5.0.0: {}
+
+ xmlchars@2.2.0: {}
+
xtend@4.0.2: {}
+ y-prosemirror@1.3.7(prosemirror-model@1.25.4)(prosemirror-state@1.4.4)(prosemirror-view@1.41.3)(y-protocols@1.0.6(yjs@13.6.27))(yjs@13.6.27):
+ dependencies:
+ lib0: 0.2.114
+ prosemirror-model: 1.25.4
+ prosemirror-state: 1.4.4
+ prosemirror-view: 1.41.3
+ y-protocols: 1.0.6(yjs@13.6.27)
+ yjs: 13.6.27
+
+ y-protocols@1.0.6(yjs@13.6.27):
+ dependencies:
+ lib0: 0.2.114
+ yjs: 13.6.27
+
yallist@5.0.0: {}
yjs@13.6.27:
From abbaa848f35bae44c38a5e12c7d9f366b292d7fa Mon Sep 17 00:00:00 2001
From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com>
Date: Sun, 23 Nov 2025 16:24:11 +0530
Subject: [PATCH 02/36] changed UI for the blocknote editor with dark/light
mode
---
surfsense_backend/app/routes/editor_routes.py | 4 +-
.../editor/[documentId]/page.tsx | 180 +++++++++++++-----
surfsense_web/components/BlockNoteEditor.tsx | 22 ++-
3 files changed, 156 insertions(+), 50 deletions(-)
diff --git a/surfsense_backend/app/routes/editor_routes.py b/surfsense_backend/app/routes/editor_routes.py
index f248b37b1..4579ec874 100644
--- a/surfsense_backend/app/routes/editor_routes.py
+++ b/surfsense_backend/app/routes/editor_routes.py
@@ -36,7 +36,7 @@ async def get_editor_content(
.join(SearchSpace)
.filter(Document.id == document_id, SearchSpace.user_id == user.id)
)
- document = result.scalars().first() # ✅ Changed from scalar_one_or_none()
+ document = result.scalars().first()
if not document:
raise HTTPException(status_code=404, detail="Document not found")
@@ -74,7 +74,7 @@ async def update_blocknote_content(
.join(SearchSpace)
.filter(Document.id == document_id, SearchSpace.user_id == user.id)
)
- document = result.scalars().first() # ✅ Changed from scalar_one_or_none()
+ document = result.scalars().first()
if not document:
raise HTTPException(status_code=404, detail="Document not found")
diff --git a/surfsense_web/app/dashboard/[search_space_id]/editor/[documentId]/page.tsx b/surfsense_web/app/dashboard/[search_space_id]/editor/[documentId]/page.tsx
index fb5709608..5371111f3 100644
--- a/surfsense_web/app/dashboard/[search_space_id]/editor/[documentId]/page.tsx
+++ b/surfsense_web/app/dashboard/[search_space_id]/editor/[documentId]/page.tsx
@@ -2,7 +2,14 @@
import { useParams, useRouter } from "next/navigation";
import { useEffect, useState } from "react";
+import { motion } from "motion/react";
+import { Loader2, Save, X, FileText, AlertCircle } from "lucide-react";
+import { toast } from "sonner";
import { BlockNoteEditor } from "@/components/DynamicBlockNoteEditor";
+import { Button } from "@/components/ui/button";
+import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card";
+import { Separator } from "@/components/ui/separator";
+import { cn } from "@/lib/utils";
interface EditorContent {
document_id: number;
@@ -21,6 +28,7 @@ export default function EditorPage() {
const [saving, setSaving] = useState(false);
const [editorContent, setEditorContent] = useState(null);
const [error, setError] = useState(null);
+ const [hasUnsavedChanges, setHasUnsavedChanges] = useState(false);
// Get auth token
const token = typeof window !== "undefined"
@@ -77,13 +85,20 @@ export default function EditorPage() {
}
}, [documentId, token]);
+ // Track changes to mark as unsaved
+ useEffect(() => {
+ if (editorContent && document) {
+ setHasUnsavedChanges(true);
+ }
+ }, [editorContent, document]);
+
// Auto-save every 30 seconds - DIRECT CALL TO FASTAPI
useEffect(() => {
- if (!editorContent || !token) return;
+ if (!editorContent || !token || !hasUnsavedChanges) return;
const interval = setInterval(async () => {
try {
- await fetch(
+ const response = await fetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/documents/${documentId}/blocknote-content`,
{
method: "PUT",
@@ -94,24 +109,28 @@ export default function EditorPage() {
body: JSON.stringify({ blocknote_document: editorContent }),
}
);
- console.log("Auto-saved");
+
+ if (response.ok) {
+ setHasUnsavedChanges(false);
+ toast.success("Auto-saved", { duration: 2000 });
+ }
} catch (error) {
console.error("Auto-save failed:", error);
}
}, 30000); // 30 seconds
return () => clearInterval(interval);
- }, [editorContent, documentId, token]);
+ }, [editorContent, documentId, token, hasUnsavedChanges]);
// Save and exit - DIRECT CALL TO FASTAPI
const handleSave = async () => {
if (!token) {
- alert("Please login to save");
+ toast.error("Please login to save");
return;
}
if (!editorContent) {
- alert("No content to save");
+ toast.error("No content to save");
return;
}
@@ -135,75 +154,144 @@ export default function EditorPage() {
throw new Error(errorData.detail || "Failed to save document");
}
- // Redirect back to documents list
- router.push(`/dashboard/${params.search_space_id}/documents`);
+ setHasUnsavedChanges(false);
+ toast.success("Document saved successfully");
+
+ // Small delay before redirect to show success message
+ setTimeout(() => {
+ router.push(`/dashboard/${params.search_space_id}/documents`);
+ }, 500);
} catch (error) {
console.error("Error saving document:", error);
- alert(error instanceof Error ? error.message : "Failed to save document. Please try again.");
+ toast.error(error instanceof Error ? error.message : "Failed to save document. Please try again.");
} finally {
setSaving(false);
}
};
+ const handleCancel = () => {
+ if (hasUnsavedChanges) {
+ if (confirm("You have unsaved changes. Are you sure you want to leave?")) {
+ router.back();
+ }
+ } else {
+ router.back();
+ }
+ };
+
if (loading) {
- return Loading editor...
;
+ return (
+
+
+
+
+ Loading editor...
+
+
+
+ );
}
if (error) {
return (
- //
-
-
-
Error
-
{error}
-
router.back()}
- className="px-4 py-2 bg-red-600 text-white rounded hover:bg-red-700"
- >
- Go Back
-
-
+
+
+
+
+
+ {error}
+
+
+ router.back()} variant="outline" className="w-full">
+
+ Go Back
+
+
+
+
);
}
if (!document) {
- return
Document not found
;
+ return (
+
+
+
+
+ Document not found
+
+
+
+ );
}
return (
- //
-
+
{/* Toolbar */}
-
-
{document.title}
-
-
router.back()}
- className="px-4 py-2 border rounded"
+
+
+
+
+
{document.title}
+ {hasUnsavedChanges && (
+
Unsaved changes
+ )}
+
+
+
+
+
+
Cancel
-
-
+
- {saving ? "Saving..." : "Save & Exit"}
-
+ {saving ? (
+ <>
+
+ Saving...
+ >
+ ) : (
+ <>
+
+ Save & Exit
+ >
+ )}
+
- {/* Editor - Now using dynamic import */}
-
-
+ {/* Editor Container */}
+
-
+
);
}
diff --git a/surfsense_web/components/BlockNoteEditor.tsx b/surfsense_web/components/BlockNoteEditor.tsx
index e43d70fc7..57a6d380a 100644
--- a/surfsense_web/components/BlockNoteEditor.tsx
+++ b/surfsense_web/components/BlockNoteEditor.tsx
@@ -1,6 +1,7 @@
"use client";
-import { useEffect, useRef } from "react";
+import { useEffect, useRef, useMemo } from "react";
+import { useTheme } from "next-themes";
import "@blocknote/core/fonts/inter.css";
import "@blocknote/mantine/style.css";
import { useCreateBlockNote } from "@blocknote/react";
@@ -15,6 +16,8 @@ export default function BlockNoteEditor({
initialContent,
onChange,
}: BlockNoteEditorProps) {
+ const { resolvedTheme } = useTheme();
+
// Track the initial content to prevent re-initialization
const initialContentRef = useRef(null);
const isInitializedRef = useRef(false);
@@ -48,6 +51,21 @@ export default function BlockNoteEditor({
};
}, [editor, onChange]);
+ // Determine theme for BlockNote with custom dark mode background
+ const blockNoteTheme = useMemo(() => {
+ if (resolvedTheme === "dark") {
+ // Custom dark theme - only override editor background, let BlockNote handle the rest
+ return {
+ colors: {
+ editor: {
+ background: "#0A0A0A", // Custom dark background
+ },
+ },
+ };
+ }
+ return "light" as const;
+ }, [resolvedTheme]);
+
// Renders the editor instance
- return ;
+ return ;
}
From 3fac196c35234166e73739c2e2dd5253fc0a30cf Mon Sep 17 00:00:00 2001
From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com>
Date: Sun, 23 Nov 2025 16:39:23 +0530
Subject: [PATCH 03/36] code quality issues fixed
---
.../38_add_blocknote_fields_to_documents.py | 33 +-
surfsense_backend/app/db.py | 6 +-
surfsense_backend/app/routes/editor_routes.py | 46 +-
.../extension_processor.py | 4 +-
.../document_processors/file_processors.py | 25 +-
.../document_processors/markdown_processor.py | 9 +-
.../tasks/document_processors/url_crawler.py | 2 +-
.../document_processors/youtube_processor.py | 2 +-
.../app/utils/blocknote_converter.py | 48 +-
.../app/api/convert-to-blocknote/route.ts | 73 ++-
.../app/api/convert-to-markdown/route.ts | 49 +-
.../(manage)/components/RowActions.tsx | 2 +-
.../editor/[documentId]/page.tsx | 557 +++++++++---------
surfsense_web/components/BlockNoteEditor.tsx | 111 ++--
.../components/DynamicBlockNoteEditor.tsx | 5 +-
.../components/dashboard-breadcrumb.tsx | 7 +-
surfsense_web/next.config.ts | 9 +-
17 files changed, 495 insertions(+), 493 deletions(-)
diff --git a/surfsense_backend/alembic/versions/38_add_blocknote_fields_to_documents.py b/surfsense_backend/alembic/versions/38_add_blocknote_fields_to_documents.py
index d882f020b..742771322 100644
--- a/surfsense_backend/alembic/versions/38_add_blocknote_fields_to_documents.py
+++ b/surfsense_backend/alembic/versions/38_add_blocknote_fields_to_documents.py
@@ -4,6 +4,7 @@ Revision ID: 38
Revises: 37
"""
+
from collections.abc import Sequence
import sqlalchemy as sa
@@ -12,30 +13,38 @@ from sqlalchemy.dialects import postgresql
from alembic import op
# revision identifiers, used by Alembic.
-revision: str = '38'
-down_revision: str | None = '37'
+revision: str = "38"
+down_revision: str | None = "37"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
"""Upgrade schema - Add BlockNote fields only."""
-
+
op.add_column(
- 'documents',
- sa.Column('blocknote_document', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
+ "documents",
+ sa.Column(
+ "blocknote_document", postgresql.JSONB(astext_type=sa.Text()), nullable=True
+ ),
)
op.add_column(
- 'documents',
- sa.Column('content_needs_reindexing', sa.Boolean(), nullable=False, server_default=sa.false()),
+ "documents",
+ sa.Column(
+ "content_needs_reindexing",
+ sa.Boolean(),
+ nullable=False,
+ server_default=sa.false(),
+ ),
)
op.add_column(
- 'documents',
- sa.Column('last_edited_at', sa.TIMESTAMP(timezone=True), nullable=True)
+ "documents",
+ sa.Column("last_edited_at", sa.TIMESTAMP(timezone=True), nullable=True),
)
+
def downgrade() -> None:
"""Downgrade schema - Remove BlockNote fields."""
- op.drop_column('documents', 'last_edited_at')
- op.drop_column('documents', 'content_needs_reindexing')
- op.drop_column('documents', 'blocknote_document')
\ No newline at end of file
+ op.drop_column("documents", "last_edited_at")
+ op.drop_column("documents", "content_needs_reindexing")
+ op.drop_column("documents", "blocknote_document")
diff --git a/surfsense_backend/app/db.py b/surfsense_backend/app/db.py
index 38505e931..890ea2473 100644
--- a/surfsense_backend/app/db.py
+++ b/surfsense_backend/app/db.py
@@ -178,15 +178,15 @@ class Document(BaseModel, TimestampMixin):
content_hash = Column(String, nullable=False, index=True, unique=True)
unique_identifier_hash = Column(String, nullable=True, index=True, unique=True)
embedding = Column(Vector(config.embedding_model_instance.dimension))
-
+
# BlockNote live editing state (NULL when never edited)
blocknote_document = Column(JSONB, nullable=True)
-
+
# blocknote background reindex flag
content_needs_reindexing = Column(
Boolean, nullable=False, default=False, server_default=text("false")
)
-
+
# Track when blocknote document was last edited
last_edited_at = Column(TIMESTAMP(timezone=True), nullable=True)
diff --git a/surfsense_backend/app/routes/editor_routes.py b/surfsense_backend/app/routes/editor_routes.py
index 4579ec874..f4c4ca38e 100644
--- a/surfsense_backend/app/routes/editor_routes.py
+++ b/surfsense_backend/app/routes/editor_routes.py
@@ -1,6 +1,7 @@
"""
Editor routes for BlockNote document editing.
"""
+
from datetime import UTC, datetime
from typing import Any
@@ -26,7 +27,7 @@ async def get_editor_content(
):
"""
Get document content for editing.
-
+
Returns BlockNote JSON document. If blocknote_document is NULL,
attempts to convert from `content` - though this won't work well
for old documents that only have summaries.
@@ -37,24 +38,26 @@ async def get_editor_content(
.filter(Document.id == document_id, SearchSpace.user_id == user.id)
)
document = result.scalars().first()
-
+
if not document:
raise HTTPException(status_code=404, detail="Document not found")
-
+
# If blocknote_document exists, return it
if document.blocknote_document:
return {
"document_id": document.id,
"title": document.title,
"blocknote_document": document.blocknote_document,
- "last_edited_at": document.last_edited_at.isoformat() if document.last_edited_at else None,
+ "last_edited_at": document.last_edited_at.isoformat()
+ if document.last_edited_at
+ else None,
}
-
+
# For old documents without blocknote_document, return error
# (Can't convert summary back to full document)
raise HTTPException(
status_code=400,
- detail="This document was uploaded before editing was enabled. Please re-upload to enable editing."
+ detail="This document was uploaded before editing was enabled. Please re-upload to enable editing.",
)
@@ -75,21 +78,21 @@ async def update_blocknote_content(
.filter(Document.id == document_id, SearchSpace.user_id == user.id)
)
document = result.scalars().first()
-
+
if not document:
raise HTTPException(status_code=404, detail="Document not found")
-
+
blocknote_document = data.get("blocknote_document")
if not blocknote_document:
raise HTTPException(status_code=400, detail="blocknote_document is required")
-
+
# Update only blocknote_document and last_edited_at
document.blocknote_document = blocknote_document
document.last_edited_at = datetime.now(UTC)
-
+
await session.commit()
await session.refresh(document)
-
+
return {"status": "saved", "last_edited_at": document.last_edited_at.isoformat()}
@@ -110,52 +113,51 @@ async def update_blocknote_content(
# .filter(Document.id == document_id, SearchSpace.user_id == user.id)
# )
# document = result.scalars().first()
-
+
# if not document:
# raise HTTPException(status_code=404, detail="Document not found")
-
+
# if not document.blocknote_document:
# raise HTTPException(
# status_code=400,
# detail="Document has no BlockNote content to finalize"
# )
-
+
# # 1. Convert BlockNote JSON → Markdown
# full_markdown = await convert_blocknote_to_markdown(document.blocknote_document)
-
+
# if not full_markdown:
# raise HTTPException(
# status_code=500,
# detail="Failed to convert BlockNote document to markdown"
# )
-
+
# # 2. Generate new summary from full markdown
# from app.services.llm_service import get_user_long_context_llm
# from app.utils.document_converters import generate_document_summary
-
+
# user_llm = await get_user_long_context_llm(session, str(user.id), document.search_space_id)
# if not user_llm:
# raise HTTPException(
# status_code=500,
# detail="No LLM configured for summary generation"
# )
-
+
# document_metadata = document.document_metadata or {}
# summary_content, summary_embedding = await generate_document_summary(
# full_markdown, user_llm, document_metadata
# )
-
+
# # 3. Update document fields
# document.content = summary_content
# document.embedding = summary_embedding
# document.content_needs_reindexing = True # Trigger chunk regeneration
# document.last_edited_at = datetime.now(UTC)
-
+
# await session.commit()
-
+
# return {
# "status": "finalized",
# "message": "Document saved. Summary and chunks will be regenerated in the background.",
# "content_needs_reindexing": True,
# }
-
\ No newline at end of file
diff --git a/surfsense_backend/app/tasks/document_processors/extension_processor.py b/surfsense_backend/app/tasks/document_processors/extension_processor.py
index 640775372..48e3efe27 100644
--- a/surfsense_backend/app/tasks/document_processors/extension_processor.py
+++ b/surfsense_backend/app/tasks/document_processors/extension_processor.py
@@ -144,9 +144,9 @@ async def add_extension_received_document(
# Process chunks
chunks = await create_document_chunks(content.pageContent)
-
+
from app.utils.blocknote_converter import convert_markdown_to_blocknote
-
+
# Convert markdown to BlockNote JSON
blocknote_json = await convert_markdown_to_blocknote(combined_document_string)
if not blocknote_json:
diff --git a/surfsense_backend/app/tasks/document_processors/file_processors.py b/surfsense_backend/app/tasks/document_processors/file_processors.py
index c4b75b0bd..95cf1c462 100644
--- a/surfsense_backend/app/tasks/document_processors/file_processors.py
+++ b/surfsense_backend/app/tasks/document_processors/file_processors.py
@@ -99,14 +99,15 @@ async def add_received_file_document_using_unstructured(
# Process chunks
chunks = await create_document_chunks(file_in_markdown)
-
+
from app.utils.blocknote_converter import convert_markdown_to_blocknote
-
+
# Convert markdown to BlockNote JSON
blocknote_json = await convert_markdown_to_blocknote(file_in_markdown)
if not blocknote_json:
- logging.warning(f"Failed to convert {file_name} to BlockNote JSON, document will not be editable")
-
+ logging.warning(
+ f"Failed to convert {file_name} to BlockNote JSON, document will not be editable"
+ )
# Update or create document
if existing_document:
@@ -223,14 +224,15 @@ async def add_received_file_document_using_llamacloud(
# Process chunks
chunks = await create_document_chunks(file_in_markdown)
-
+
from app.utils.blocknote_converter import convert_markdown_to_blocknote
# Convert markdown to BlockNote JSON
blocknote_json = await convert_markdown_to_blocknote(file_in_markdown)
if not blocknote_json:
- logging.warning(f"Failed to convert {file_name} to BlockNote JSON, document will not be editable")
-
+ logging.warning(
+ f"Failed to convert {file_name} to BlockNote JSON, document will not be editable"
+ )
# Update or create document
if existing_document:
@@ -372,14 +374,15 @@ async def add_received_file_document_using_docling(
# Process chunks
chunks = await create_document_chunks(file_in_markdown)
-
+
from app.utils.blocknote_converter import convert_markdown_to_blocknote
-
+
# Convert markdown to BlockNote JSON
blocknote_json = await convert_markdown_to_blocknote(file_in_markdown)
if not blocknote_json:
- logging.warning(f"Failed to convert {file_name} to BlockNote JSON, document will not be editable")
-
+ logging.warning(
+ f"Failed to convert {file_name} to BlockNote JSON, document will not be editable"
+ )
# Update or create document
if existing_document:
diff --git a/surfsense_backend/app/tasks/document_processors/markdown_processor.py b/surfsense_backend/app/tasks/document_processors/markdown_processor.py
index b63d2bdf2..3036071c9 100644
--- a/surfsense_backend/app/tasks/document_processors/markdown_processor.py
+++ b/surfsense_backend/app/tasks/document_processors/markdown_processor.py
@@ -109,14 +109,15 @@ async def add_received_markdown_file_document(
# Process chunks
chunks = await create_document_chunks(file_in_markdown)
-
+
from app.utils.blocknote_converter import convert_markdown_to_blocknote
-
+
# Convert to BlockNote JSON
blocknote_json = await convert_markdown_to_blocknote(file_in_markdown)
if not blocknote_json:
- logging.warning(f"Failed to convert {file_name} to BlockNote JSON, document will not be editable")
-
+ logging.warning(
+ f"Failed to convert {file_name} to BlockNote JSON, document will not be editable"
+ )
# Update or create document
if existing_document:
diff --git a/surfsense_backend/app/tasks/document_processors/url_crawler.py b/surfsense_backend/app/tasks/document_processors/url_crawler.py
index d392031ea..1b516b5bf 100644
--- a/surfsense_backend/app/tasks/document_processors/url_crawler.py
+++ b/surfsense_backend/app/tasks/document_processors/url_crawler.py
@@ -247,7 +247,7 @@ async def add_crawled_url_document(
f"Processing content chunks for URL: {url}",
{"stage": "chunk_processing"},
)
-
+
from app.utils.blocknote_converter import convert_markdown_to_blocknote
# Convert markdown to BlockNote JSON
diff --git a/surfsense_backend/app/tasks/document_processors/youtube_processor.py b/surfsense_backend/app/tasks/document_processors/youtube_processor.py
index e479ec773..332e775e1 100644
--- a/surfsense_backend/app/tasks/document_processors/youtube_processor.py
+++ b/surfsense_backend/app/tasks/document_processors/youtube_processor.py
@@ -290,7 +290,7 @@ async def add_youtube_video_document(
f"Processing content chunks for video: {video_data.get('title', 'YouTube Video')}",
{"stage": "chunk_processing"},
)
-
+
from app.utils.blocknote_converter import convert_markdown_to_blocknote
# Convert transcript to BlockNote JSON
diff --git a/surfsense_backend/app/utils/blocknote_converter.py b/surfsense_backend/app/utils/blocknote_converter.py
index ce5cbf64b..b57a82996 100644
--- a/surfsense_backend/app/utils/blocknote_converter.py
+++ b/surfsense_backend/app/utils/blocknote_converter.py
@@ -11,17 +11,17 @@ logger = logging.getLogger(__name__)
async def convert_markdown_to_blocknote(markdown: str) -> dict[str, Any] | None:
"""
Convert markdown to BlockNote JSON via Next.js API.
-
+
Args:
markdown: Markdown string to convert
-
+
Returns:
BlockNote document as dict, or None if conversion fails
"""
if not markdown or not markdown.strip():
logger.warning("Empty markdown provided for conversion")
return None
-
+
if not markdown or len(markdown) < 10:
logger.warning("Markdown became too short after sanitization")
# Return a minimal BlockNote document
@@ -32,13 +32,13 @@ async def convert_markdown_to_blocknote(markdown: str) -> dict[str, Any] | None:
{
"type": "text",
"text": "Document content could not be converted for editing.",
- "styles": {}
+ "styles": {},
}
],
- "children": []
+ "children": [],
}
]
-
+
async with httpx.AsyncClient() as client:
try:
response = await client.post(
@@ -49,19 +49,23 @@ async def convert_markdown_to_blocknote(markdown: str) -> dict[str, Any] | None:
response.raise_for_status()
data = response.json()
blocknote_document = data.get("blocknote_document")
-
+
if blocknote_document:
- logger.info(f"Successfully converted markdown to BlockNote (original: {len(markdown)} chars, sanitized: {len(markdown)} chars)")
+ logger.info(
+ f"Successfully converted markdown to BlockNote (original: {len(markdown)} chars, sanitized: {len(markdown)} chars)"
+ )
return blocknote_document
else:
logger.warning("Next.js API returned empty blocknote_document")
return None
-
+
except httpx.TimeoutException:
logger.error("Timeout converting markdown to BlockNote after 30s")
return None
except httpx.HTTPStatusError as e:
- logger.error(f"HTTP error converting markdown to BlockNote: {e.response.status_code} - {e.response.text}")
+ logger.error(
+ f"HTTP error converting markdown to BlockNote: {e.response.status_code} - {e.response.text}"
+ )
# Log first 1000 chars of problematic markdown for debugging
logger.debug(f"Problematic markdown sample: {markdown[:1000]}")
return None
@@ -69,20 +73,23 @@ async def convert_markdown_to_blocknote(markdown: str) -> dict[str, Any] | None:
logger.error(f"Failed to convert markdown to BlockNote: {e}", exc_info=True)
return None
-async def convert_blocknote_to_markdown(blocknote_document: dict[str, Any] | list[dict[str, Any]]) -> str | None:
+
+async def convert_blocknote_to_markdown(
+ blocknote_document: dict[str, Any] | list[dict[str, Any]],
+) -> str | None:
"""
Convert BlockNote JSON to markdown via Next.js API.
-
+
Args:
blocknote_document: BlockNote document as dict or list of blocks
-
+
Returns:
Markdown string, or None if conversion fails
"""
if not blocknote_document:
logger.warning("Empty BlockNote document provided for conversion")
return None
-
+
async with httpx.AsyncClient() as client:
try:
response = await client.post(
@@ -93,21 +100,24 @@ async def convert_blocknote_to_markdown(blocknote_document: dict[str, Any] | lis
response.raise_for_status()
data = response.json()
markdown = data.get("markdown")
-
+
if markdown:
- logger.info(f"Successfully converted BlockNote to markdown ({len(markdown)} chars)")
+ logger.info(
+ f"Successfully converted BlockNote to markdown ({len(markdown)} chars)"
+ )
return markdown
else:
logger.warning("Next.js API returned empty markdown")
return None
-
+
except httpx.TimeoutException:
logger.error("Timeout converting BlockNote to markdown after 30s")
return None
except httpx.HTTPStatusError as e:
- logger.error(f"HTTP error converting BlockNote to markdown: {e.response.status_code} - {e.response.text}")
+ logger.error(
+ f"HTTP error converting BlockNote to markdown: {e.response.status_code} - {e.response.text}"
+ )
return None
except Exception as e:
logger.error(f"Failed to convert BlockNote to markdown: {e}", exc_info=True)
return None
-
\ No newline at end of file
diff --git a/surfsense_web/app/api/convert-to-blocknote/route.ts b/surfsense_web/app/api/convert-to-blocknote/route.ts
index 2cb15fb31..e11c9cb47 100644
--- a/surfsense_web/app/api/convert-to-blocknote/route.ts
+++ b/surfsense_web/app/api/convert-to-blocknote/route.ts
@@ -2,42 +2,39 @@ import { ServerBlockNoteEditor } from "@blocknote/server-util";
import { type NextRequest, NextResponse } from "next/server";
export async function POST(request: NextRequest) {
- try {
- const { markdown } = await request.json();
-
- if (!markdown || typeof markdown !== "string") {
- return NextResponse.json(
- { error: "Markdown string is required" },
- { status: 400 }
- );
- }
-
- // Log raw markdown input before conversion
- // console.log(`\n${"=".repeat(80)}`);
- // console.log("RAW MARKDOWN INPUT (BEFORE CONVERSION):");
- // console.log("=".repeat(80));
- // console.log(markdown);
- // console.log(`${"=".repeat(80)}\n`);
-
- // Create server-side editor instance
- const editor = ServerBlockNoteEditor.create();
-
- // Convert markdown directly to BlockNote blocks
- const blocks = await editor.tryParseMarkdownToBlocks(markdown);
-
- if (!blocks || blocks.length === 0) {
- throw new Error("Markdown parsing returned no blocks");
- }
-
- return NextResponse.json({ blocknote_document: blocks });
- } catch (error: any) {
- console.error("Failed to convert markdown to BlockNote:", error);
- return NextResponse.json(
- {
- error: "Failed to convert markdown to BlockNote blocks",
- details: error.message
- },
- { status: 500 }
- );
- }
+ try {
+ const { markdown } = await request.json();
+
+ if (!markdown || typeof markdown !== "string") {
+ return NextResponse.json({ error: "Markdown string is required" }, { status: 400 });
+ }
+
+ // Log raw markdown input before conversion
+ // console.log(`\n${"=".repeat(80)}`);
+ // console.log("RAW MARKDOWN INPUT (BEFORE CONVERSION):");
+ // console.log("=".repeat(80));
+ // console.log(markdown);
+ // console.log(`${"=".repeat(80)}\n`);
+
+ // Create server-side editor instance
+ const editor = ServerBlockNoteEditor.create();
+
+ // Convert markdown directly to BlockNote blocks
+ const blocks = await editor.tryParseMarkdownToBlocks(markdown);
+
+ if (!blocks || blocks.length === 0) {
+ throw new Error("Markdown parsing returned no blocks");
+ }
+
+ return NextResponse.json({ blocknote_document: blocks });
+ } catch (error: any) {
+ console.error("Failed to convert markdown to BlockNote:", error);
+ return NextResponse.json(
+ {
+ error: "Failed to convert markdown to BlockNote blocks",
+ details: error.message,
+ },
+ { status: 500 }
+ );
+ }
}
diff --git a/surfsense_web/app/api/convert-to-markdown/route.ts b/surfsense_web/app/api/convert-to-markdown/route.ts
index 76faf45f3..7005a800f 100644
--- a/surfsense_web/app/api/convert-to-markdown/route.ts
+++ b/surfsense_web/app/api/convert-to-markdown/route.ts
@@ -2,30 +2,27 @@ import { ServerBlockNoteEditor } from "@blocknote/server-util";
import { type NextRequest, NextResponse } from "next/server";
export async function POST(request: NextRequest) {
- try {
- const { blocknote_document } = await request.json();
-
- if (!blocknote_document || !Array.isArray(blocknote_document)) {
- return NextResponse.json(
- { error: "BlockNote document array is required" },
- { status: 400 }
- );
- }
-
- // Create server-side editor instance
- const editor = ServerBlockNoteEditor.create();
-
- // Convert BlockNote blocks to markdown
- const markdown = await editor.blocksToMarkdownLossy(blocknote_document);
-
- return NextResponse.json({
- markdown
- });
- } catch (error) {
- console.error("Failed to convert BlockNote to markdown:", error);
- return NextResponse.json(
- { error: "Failed to convert BlockNote blocks to markdown" },
- { status: 500 }
- );
- }
+ try {
+ const { blocknote_document } = await request.json();
+
+ if (!blocknote_document || !Array.isArray(blocknote_document)) {
+ return NextResponse.json({ error: "BlockNote document array is required" }, { status: 400 });
+ }
+
+ // Create server-side editor instance
+ const editor = ServerBlockNoteEditor.create();
+
+ // Convert BlockNote blocks to markdown
+ const markdown = await editor.blocksToMarkdownLossy(blocknote_document);
+
+ return NextResponse.json({
+ markdown,
+ });
+ } catch (error) {
+ console.error("Failed to convert BlockNote to markdown:", error);
+ return NextResponse.json(
+ { error: "Failed to convert BlockNote blocks to markdown" },
+ { status: 500 }
+ );
+ }
}
diff --git a/surfsense_web/app/dashboard/[search_space_id]/documents/(manage)/components/RowActions.tsx b/surfsense_web/app/dashboard/[search_space_id]/documents/(manage)/components/RowActions.tsx
index 5967e62ca..ea4c66228 100644
--- a/surfsense_web/app/dashboard/[search_space_id]/documents/(manage)/components/RowActions.tsx
+++ b/surfsense_web/app/dashboard/[search_space_id]/documents/(manage)/components/RowActions.tsx
@@ -1,6 +1,6 @@
"use client";
-import { MoreHorizontal, Pencil, FileText, Trash2 } from "lucide-react";
+import { FileText, MoreHorizontal, Pencil, Trash2 } from "lucide-react";
import { useRouter } from "next/navigation";
import { useState } from "react";
import { toast } from "sonner";
diff --git a/surfsense_web/app/dashboard/[search_space_id]/editor/[documentId]/page.tsx b/surfsense_web/app/dashboard/[search_space_id]/editor/[documentId]/page.tsx
index 5371111f3..d408de44c 100644
--- a/surfsense_web/app/dashboard/[search_space_id]/editor/[documentId]/page.tsx
+++ b/surfsense_web/app/dashboard/[search_space_id]/editor/[documentId]/page.tsx
@@ -1,9 +1,9 @@
"use client";
+import { AlertCircle, FileText, Loader2, Save, X } from "lucide-react";
+import { motion } from "motion/react";
import { useParams, useRouter } from "next/navigation";
import { useEffect, useState } from "react";
-import { motion } from "motion/react";
-import { Loader2, Save, X, FileText, AlertCircle } from "lucide-react";
import { toast } from "sonner";
import { BlockNoteEditor } from "@/components/DynamicBlockNoteEditor";
import { Button } from "@/components/ui/button";
@@ -12,286 +12,281 @@ import { Separator } from "@/components/ui/separator";
import { cn } from "@/lib/utils";
interface EditorContent {
- document_id: number;
- title: string;
- blocknote_document: any;
- last_edited_at: string | null;
+ document_id: number;
+ title: string;
+ blocknote_document: any;
+ last_edited_at: string | null;
}
export default function EditorPage() {
- const params = useParams();
- const router = useRouter();
- const documentId = params.documentId as string;
-
- const [document, setDocument] = useState(null);
- const [loading, setLoading] = useState(true);
- const [saving, setSaving] = useState(false);
- const [editorContent, setEditorContent] = useState(null);
- const [error, setError] = useState(null);
- const [hasUnsavedChanges, setHasUnsavedChanges] = useState(false);
-
- // Get auth token
- const token = typeof window !== "undefined"
- ? localStorage.getItem("surfsense_bearer_token")
- : null;
-
- // Fetch document content - DIRECT CALL TO FASTAPI
- useEffect(() => {
- async function fetchDocument() {
- if (!token) {
- console.error("No auth token found");
- setError("Please login to access the editor");
- setLoading(false);
- return;
- }
-
- try {
- const response = await fetch(
- `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/documents/${documentId}/editor-content`,
- {
- headers: {
- Authorization: `Bearer ${token}`,
- },
- }
- );
-
- if (!response.ok) {
- const errorData = await response.json().catch(() => ({ detail: "Failed to fetch document" }));
- throw new Error(errorData.detail || "Failed to fetch document");
- }
-
- const data = await response.json();
-
- // Check if blocknote_document exists
- if (!data.blocknote_document) {
- setError("This document does not have BlockNote content. Please re-upload the document to enable editing.");
- setLoading(false);
- return;
- }
-
- setDocument(data);
- setEditorContent(data.blocknote_document);
- setError(null);
- } catch (error) {
- console.error("Error fetching document:", error);
- setError(error instanceof Error ? error.message : "Failed to fetch document. Please try again.");
- } finally {
- setLoading(false);
- }
- }
-
- if (documentId && token) {
- fetchDocument();
- }
- }, [documentId, token]);
-
- // Track changes to mark as unsaved
- useEffect(() => {
- if (editorContent && document) {
- setHasUnsavedChanges(true);
- }
- }, [editorContent, document]);
-
- // Auto-save every 30 seconds - DIRECT CALL TO FASTAPI
- useEffect(() => {
- if (!editorContent || !token || !hasUnsavedChanges) return;
-
- const interval = setInterval(async () => {
- try {
- const response = await fetch(
- `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/documents/${documentId}/blocknote-content`,
- {
- method: "PUT",
- headers: {
- "Content-Type": "application/json",
- Authorization: `Bearer ${token}`,
- },
- body: JSON.stringify({ blocknote_document: editorContent }),
- }
- );
-
- if (response.ok) {
- setHasUnsavedChanges(false);
- toast.success("Auto-saved", { duration: 2000 });
- }
- } catch (error) {
- console.error("Auto-save failed:", error);
- }
- }, 30000); // 30 seconds
-
- return () => clearInterval(interval);
- }, [editorContent, documentId, token, hasUnsavedChanges]);
-
- // Save and exit - DIRECT CALL TO FASTAPI
- const handleSave = async () => {
- if (!token) {
- toast.error("Please login to save");
- return;
- }
-
- if (!editorContent) {
- toast.error("No content to save");
- return;
- }
-
- setSaving(true);
- try {
- // Save blocknote_document to database (without finalizing/reindexing)
- const response = await fetch(
- `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/documents/${documentId}/blocknote-content`,
- {
- method: "PUT",
- headers: {
- "Content-Type": "application/json",
- Authorization: `Bearer ${token}`,
- },
- body: JSON.stringify({ blocknote_document: editorContent }),
- }
- );
-
- if (!response.ok) {
- const errorData = await response.json().catch(() => ({ detail: "Failed to save document" }));
- throw new Error(errorData.detail || "Failed to save document");
- }
-
- setHasUnsavedChanges(false);
- toast.success("Document saved successfully");
-
- // Small delay before redirect to show success message
- setTimeout(() => {
- router.push(`/dashboard/${params.search_space_id}/documents`);
- }, 500);
- } catch (error) {
- console.error("Error saving document:", error);
- toast.error(error instanceof Error ? error.message : "Failed to save document. Please try again.");
- } finally {
- setSaving(false);
- }
- };
-
- const handleCancel = () => {
- if (hasUnsavedChanges) {
- if (confirm("You have unsaved changes. Are you sure you want to leave?")) {
- router.back();
- }
- } else {
- router.back();
- }
- };
-
- if (loading) {
- return (
-
-
-
-
- Loading editor...
-
-
-
- );
- }
-
- if (error) {
- return (
-
-
-
-
-
- {error}
-
-
- router.back()} variant="outline" className="w-full">
-
- Go Back
-
-
-
-
-
- );
- }
-
- if (!document) {
- return (
-
-
-
-
- Document not found
-
-
-
- );
- }
-
- return (
-
- {/* Toolbar */}
-
-
-
-
-
{document.title}
- {hasUnsavedChanges && (
-
Unsaved changes
- )}
-
-
-
-
-
-
- Cancel
-
-
- {saving ? (
- <>
-
- Saving...
- >
- ) : (
- <>
-
- Save & Exit
- >
- )}
-
-
-
-
- {/* Editor Container */}
-
-
- );
+ const params = useParams();
+ const router = useRouter();
+ const documentId = params.documentId as string;
+
+ const [document, setDocument] = useState(null);
+ const [loading, setLoading] = useState(true);
+ const [saving, setSaving] = useState(false);
+ const [editorContent, setEditorContent] = useState(null);
+ const [error, setError] = useState(null);
+ const [hasUnsavedChanges, setHasUnsavedChanges] = useState(false);
+
+ // Get auth token
+ const token =
+ typeof window !== "undefined" ? localStorage.getItem("surfsense_bearer_token") : null;
+
+ // Fetch document content - DIRECT CALL TO FASTAPI
+ useEffect(() => {
+ async function fetchDocument() {
+ if (!token) {
+ console.error("No auth token found");
+ setError("Please login to access the editor");
+ setLoading(false);
+ return;
+ }
+
+ try {
+ const response = await fetch(
+ `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/documents/${documentId}/editor-content`,
+ {
+ headers: {
+ Authorization: `Bearer ${token}`,
+ },
+ }
+ );
+
+ if (!response.ok) {
+ const errorData = await response
+ .json()
+ .catch(() => ({ detail: "Failed to fetch document" }));
+ throw new Error(errorData.detail || "Failed to fetch document");
+ }
+
+ const data = await response.json();
+
+ // Check if blocknote_document exists
+ if (!data.blocknote_document) {
+ setError(
+ "This document does not have BlockNote content. Please re-upload the document to enable editing."
+ );
+ setLoading(false);
+ return;
+ }
+
+ setDocument(data);
+ setEditorContent(data.blocknote_document);
+ setError(null);
+ } catch (error) {
+ console.error("Error fetching document:", error);
+ setError(
+ error instanceof Error ? error.message : "Failed to fetch document. Please try again."
+ );
+ } finally {
+ setLoading(false);
+ }
+ }
+
+ if (documentId && token) {
+ fetchDocument();
+ }
+ }, [documentId, token]);
+
+ // Track changes to mark as unsaved
+ useEffect(() => {
+ if (editorContent && document) {
+ setHasUnsavedChanges(true);
+ }
+ }, [editorContent, document]);
+
+ // Auto-save every 30 seconds - DIRECT CALL TO FASTAPI
+ useEffect(() => {
+ if (!editorContent || !token || !hasUnsavedChanges) return;
+
+ const interval = setInterval(async () => {
+ try {
+ const response = await fetch(
+ `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/documents/${documentId}/blocknote-content`,
+ {
+ method: "PUT",
+ headers: {
+ "Content-Type": "application/json",
+ Authorization: `Bearer ${token}`,
+ },
+ body: JSON.stringify({ blocknote_document: editorContent }),
+ }
+ );
+
+ if (response.ok) {
+ setHasUnsavedChanges(false);
+ toast.success("Auto-saved", { duration: 2000 });
+ }
+ } catch (error) {
+ console.error("Auto-save failed:", error);
+ }
+ }, 30000); // 30 seconds
+
+ return () => clearInterval(interval);
+ }, [editorContent, documentId, token, hasUnsavedChanges]);
+
+ // Save and exit - DIRECT CALL TO FASTAPI
+ const handleSave = async () => {
+ if (!token) {
+ toast.error("Please login to save");
+ return;
+ }
+
+ if (!editorContent) {
+ toast.error("No content to save");
+ return;
+ }
+
+ setSaving(true);
+ try {
+ // Save blocknote_document to database (without finalizing/reindexing)
+ const response = await fetch(
+ `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/documents/${documentId}/blocknote-content`,
+ {
+ method: "PUT",
+ headers: {
+ "Content-Type": "application/json",
+ Authorization: `Bearer ${token}`,
+ },
+ body: JSON.stringify({ blocknote_document: editorContent }),
+ }
+ );
+
+ if (!response.ok) {
+ const errorData = await response
+ .json()
+ .catch(() => ({ detail: "Failed to save document" }));
+ throw new Error(errorData.detail || "Failed to save document");
+ }
+
+ setHasUnsavedChanges(false);
+ toast.success("Document saved successfully");
+
+ // Small delay before redirect to show success message
+ setTimeout(() => {
+ router.push(`/dashboard/${params.search_space_id}/documents`);
+ }, 500);
+ } catch (error) {
+ console.error("Error saving document:", error);
+ toast.error(
+ error instanceof Error ? error.message : "Failed to save document. Please try again."
+ );
+ } finally {
+ setSaving(false);
+ }
+ };
+
+ const handleCancel = () => {
+ if (hasUnsavedChanges) {
+ if (confirm("You have unsaved changes. Are you sure you want to leave?")) {
+ router.back();
+ }
+ } else {
+ router.back();
+ }
+ };
+
+ if (loading) {
+ return (
+
+
+
+
+ Loading editor...
+
+
+
+ );
+ }
+
+ if (error) {
+ return (
+
+
+
+
+
+ {error}
+
+
+ router.back()} variant="outline" className="w-full">
+
+ Go Back
+
+
+
+
+
+ );
+ }
+
+ if (!document) {
+ return (
+
+
+
+
+ Document not found
+
+
+
+ );
+ }
+
+ return (
+
+ {/* Toolbar */}
+
+
+
+
+
{document.title}
+ {hasUnsavedChanges &&
Unsaved changes
}
+
+
+
+
+
+
+ Cancel
+
+
+ {saving ? (
+ <>
+
+ Saving...
+ >
+ ) : (
+ <>
+
+ Save & Exit
+ >
+ )}
+
+
+
+
+ {/* Editor Container */}
+
+
+ );
}
diff --git a/surfsense_web/components/BlockNoteEditor.tsx b/surfsense_web/components/BlockNoteEditor.tsx
index 57a6d380a..8064a0dc4 100644
--- a/surfsense_web/components/BlockNoteEditor.tsx
+++ b/surfsense_web/components/BlockNoteEditor.tsx
@@ -1,71 +1,68 @@
"use client";
-import { useEffect, useRef, useMemo } from "react";
import { useTheme } from "next-themes";
+import { useEffect, useMemo, useRef } from "react";
import "@blocknote/core/fonts/inter.css";
import "@blocknote/mantine/style.css";
-import { useCreateBlockNote } from "@blocknote/react";
import { BlockNoteView } from "@blocknote/mantine";
+import { useCreateBlockNote } from "@blocknote/react";
interface BlockNoteEditorProps {
- initialContent?: any;
- onChange?: (content: any) => void;
+ initialContent?: any;
+ onChange?: (content: any) => void;
}
-export default function BlockNoteEditor({
- initialContent,
- onChange,
-}: BlockNoteEditorProps) {
- const { resolvedTheme } = useTheme();
-
- // Track the initial content to prevent re-initialization
- const initialContentRef = useRef(null);
- const isInitializedRef = useRef(false);
-
- // Creates a new editor instance - only use initialContent on first render
- const editor = useCreateBlockNote({
- initialContent: initialContentRef.current === null ? (initialContent || undefined) : undefined,
- });
-
- // Store initial content on first render only
- useEffect(() => {
- if (initialContent && initialContentRef.current === null) {
- initialContentRef.current = initialContent;
- isInitializedRef.current = true;
- }
- }, [initialContent]);
+export default function BlockNoteEditor({ initialContent, onChange }: BlockNoteEditorProps) {
+ const { resolvedTheme } = useTheme();
- // Call onChange when document changes (but don't update from props)
- useEffect(() => {
- if (!onChange || !editor || !isInitializedRef.current) return;
-
- const handleChange = () => {
- onChange(editor.document);
- };
-
- // Subscribe to document changes
- const unsubscribe = editor.onChange(handleChange);
-
- return () => {
- unsubscribe();
- };
- }, [editor, onChange]);
+ // Track the initial content to prevent re-initialization
+ const initialContentRef = useRef(null);
+ const isInitializedRef = useRef(false);
- // Determine theme for BlockNote with custom dark mode background
- const blockNoteTheme = useMemo(() => {
- if (resolvedTheme === "dark") {
- // Custom dark theme - only override editor background, let BlockNote handle the rest
- return {
- colors: {
- editor: {
- background: "#0A0A0A", // Custom dark background
- },
- },
- };
- }
- return "light" as const;
- }, [resolvedTheme]);
+ // Creates a new editor instance - only use initialContent on first render
+ const editor = useCreateBlockNote({
+ initialContent: initialContentRef.current === null ? initialContent || undefined : undefined,
+ });
- // Renders the editor instance
- return ;
+ // Store initial content on first render only
+ useEffect(() => {
+ if (initialContent && initialContentRef.current === null) {
+ initialContentRef.current = initialContent;
+ isInitializedRef.current = true;
+ }
+ }, [initialContent]);
+
+ // Call onChange when document changes (but don't update from props)
+ useEffect(() => {
+ if (!onChange || !editor || !isInitializedRef.current) return;
+
+ const handleChange = () => {
+ onChange(editor.document);
+ };
+
+ // Subscribe to document changes
+ const unsubscribe = editor.onChange(handleChange);
+
+ return () => {
+ unsubscribe();
+ };
+ }, [editor, onChange]);
+
+ // Determine theme for BlockNote with custom dark mode background
+ const blockNoteTheme = useMemo(() => {
+ if (resolvedTheme === "dark") {
+ // Custom dark theme - only override editor background, let BlockNote handle the rest
+ return {
+ colors: {
+ editor: {
+ background: "#0A0A0A", // Custom dark background
+ },
+ },
+ };
+ }
+ return "light" as const;
+ }, [resolvedTheme]);
+
+ // Renders the editor instance
+ return ;
}
diff --git a/surfsense_web/components/DynamicBlockNoteEditor.tsx b/surfsense_web/components/DynamicBlockNoteEditor.tsx
index ceb678548..60fc6b11c 100644
--- a/surfsense_web/components/DynamicBlockNoteEditor.tsx
+++ b/surfsense_web/components/DynamicBlockNoteEditor.tsx
@@ -3,7 +3,4 @@
import dynamic from "next/dynamic";
// Dynamically import BlockNote editor with SSR disabled
-export const BlockNoteEditor = dynamic(
- () => import("./BlockNoteEditor"),
- { ssr: false }
-);
+export const BlockNoteEditor = dynamic(() => import("./BlockNoteEditor"), { ssr: false });
diff --git a/surfsense_web/components/dashboard-breadcrumb.tsx b/surfsense_web/components/dashboard-breadcrumb.tsx
index d44c2a756..8fea39b16 100644
--- a/surfsense_web/components/dashboard-breadcrumb.tsx
+++ b/surfsense_web/components/dashboard-breadcrumb.tsx
@@ -41,9 +41,8 @@ export function DashboardBreadcrumb() {
useEffect(() => {
if (segments[2] === "editor" && segments[3] && searchSpaceId) {
const documentId = segments[3];
- const token = typeof window !== "undefined"
- ? localStorage.getItem("surfsense_bearer_token")
- : null;
+ const token =
+ typeof window !== "undefined" ? localStorage.getItem("surfsense_bearer_token") : null;
if (token) {
fetch(
@@ -110,7 +109,7 @@ export function DashboardBreadcrumb() {
// Handle sub-sections
if (segments[3]) {
const subSection = segments[3];
-
+
// Handle editor sub-sections (document ID)
if (section === "editor") {
const documentLabel = documentTitle || subSection;
diff --git a/surfsense_web/next.config.ts b/surfsense_web/next.config.ts
index 0777d022b..da58fa10c 100644
--- a/surfsense_web/next.config.ts
+++ b/surfsense_web/next.config.ts
@@ -24,18 +24,13 @@ const nextConfig: NextConfig = {
],
},
// Mark BlockNote server packages as external
- serverExternalPackages: [
- '@blocknote/server-util',
- ],
+ serverExternalPackages: ["@blocknote/server-util"],
// Configure webpack to handle blocknote packages
webpack: (config, { isServer }) => {
if (isServer) {
// Don't bundle these packages on the server
- config.externals = [
- ...(config.externals || []),
- '@blocknote/server-util',
- ];
+ config.externals = [...(config.externals || []), "@blocknote/server-util"];
}
return config;
},
From 289b4de52d9b02653bd32a9ab7d7a05381cf774f Mon Sep 17 00:00:00 2001
From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com>
Date: Sun, 23 Nov 2025 16:47:09 +0530
Subject: [PATCH 04/36] refactor: comment out unused blocknote converter
imports in editor_routes.py
---
surfsense_backend/app/routes/editor_routes.py | 9 +++++----
1 file changed, 5 insertions(+), 4 deletions(-)
diff --git a/surfsense_backend/app/routes/editor_routes.py b/surfsense_backend/app/routes/editor_routes.py
index f4c4ca38e..a34c80db0 100644
--- a/surfsense_backend/app/routes/editor_routes.py
+++ b/surfsense_backend/app/routes/editor_routes.py
@@ -11,10 +11,11 @@ from sqlalchemy.ext.asyncio import AsyncSession
from app.db import Document, SearchSpace, User, get_async_session
from app.users import current_active_user
-from app.utils.blocknote_converter import (
- convert_blocknote_to_markdown,
- convert_markdown_to_blocknote,
-)
+
+# from app.utils.blocknote_converter import (
+# convert_blocknote_to_markdown,
+# convert_markdown_to_blocknote,
+# )
router = APIRouter()
From e9d32c351695299fcf41c23cd9c87bb58027fa47 Mon Sep 17 00:00:00 2001
From: "DESKTOP-RTLN3BA\\$punk"
Date: Thu, 27 Nov 2025 22:45:04 -0800
Subject: [PATCH 05/36] feat: Implement Role-Based Access Control (RBAC) for
search space resources.
-Introduce granular permissions for documents, chats, podcasts, and logs.
- Update routes to enforce permission checks for creating, reading, updating, and deleting resources. - Refactor user and search space interactions to align with RBAC model, removing ownership checks in favor of permission validation.
---
.../alembic/versions/39_add_rbac_tables.py | 179 +++
.../40_move_llm_preferences_to_searchspace.py | 63 +
.../app/agents/researcher/nodes.py | 59 +-
.../researcher/qna_agent/configuration.py | 1 -
.../app/agents/researcher/qna_agent/nodes.py | 9 +-
surfsense_backend/app/db.py | 461 +++++-
.../app/retriver/chunks_hybrid_search.py | 56 +-
.../app/retriver/documents_hybrid_search.py | 71 +-
surfsense_backend/app/routes/__init__.py | 2 +
surfsense_backend/app/routes/chats_routes.py | 182 ++-
.../app/routes/documents_routes.py | 250 +++-
.../app/routes/llm_config_routes.py | 226 +--
surfsense_backend/app/routes/logs_routes.py | 144 +-
.../app/routes/podcasts_routes.py | 189 ++-
surfsense_backend/app/routes/rbac_routes.py | 1084 ++++++++++++++
.../routes/search_source_connectors_routes.py | 156 +-
.../app/routes/search_spaces_routes.py | 206 ++-
surfsense_backend/app/schemas/__init__.py | 35 +
surfsense_backend/app/schemas/rbac_schemas.py | 186 +++
surfsense_backend/app/schemas/search_space.py | 7 +
.../app/services/connector_service.py | 112 +-
surfsense_backend/app/services/llm_service.py | 90 +-
.../app/services/query_service.py | 16 +-
.../app/utils/check_ownership.py | 19 -
surfsense_backend/app/utils/rbac.py | 274 ++++
.../[search_space_id]/client-layout.tsx | 17 +-
.../dashboard/[search_space_id]/layout.tsx | 6 +
.../[search_space_id]/logs/(manage)/page.tsx | 2 +-
.../dashboard/[search_space_id]/team/page.tsx | 1325 +++++++++++++++++
surfsense_web/app/dashboard/page.tsx | 23 +-
.../app/invite/[invite_code]/page.tsx | 336 +++++
.../components/sidebar/app-sidebar.tsx | 2 +
surfsense_web/components/sidebar/nav-main.tsx | 1 +
surfsense_web/hooks/index.ts | 1 +
surfsense_web/hooks/use-rbac.ts | 773 ++++++++++
surfsense_web/hooks/use-search-spaces.ts | 2 +
surfsense_web/messages/en.json | 4 +-
surfsense_web/messages/zh.json | 4 +-
38 files changed, 5916 insertions(+), 657 deletions(-)
create mode 100644 surfsense_backend/alembic/versions/39_add_rbac_tables.py
create mode 100644 surfsense_backend/alembic/versions/40_move_llm_preferences_to_searchspace.py
create mode 100644 surfsense_backend/app/routes/rbac_routes.py
create mode 100644 surfsense_backend/app/schemas/rbac_schemas.py
delete mode 100644 surfsense_backend/app/utils/check_ownership.py
create mode 100644 surfsense_backend/app/utils/rbac.py
create mode 100644 surfsense_web/app/dashboard/[search_space_id]/team/page.tsx
create mode 100644 surfsense_web/app/invite/[invite_code]/page.tsx
create mode 100644 surfsense_web/hooks/use-rbac.ts
diff --git a/surfsense_backend/alembic/versions/39_add_rbac_tables.py b/surfsense_backend/alembic/versions/39_add_rbac_tables.py
new file mode 100644
index 000000000..ac2df0df2
--- /dev/null
+++ b/surfsense_backend/alembic/versions/39_add_rbac_tables.py
@@ -0,0 +1,179 @@
+"""Add RBAC tables for search space access control
+
+Revision ID: 39
+Revises: 38
+Create Date: 2025-11-27 00:00:00.000000
+
+This migration adds:
+- Permission enum for granular access control
+- search_space_roles table for custom roles per search space
+- search_space_memberships table for user-searchspace-role relationships
+- search_space_invites table for invite links
+"""
+
+from collections.abc import Sequence
+
+from sqlalchemy import inspect
+
+from alembic import op
+
+revision: str = "39"
+down_revision: str | None = "38"
+branch_labels: str | Sequence[str] | None = None
+depends_on: str | Sequence[str] | None = None
+
+
+def upgrade() -> None:
+ """Upgrade schema - add RBAC tables for search space access control."""
+
+ # Create search_space_roles table
+ op.execute(
+ """
+ CREATE TABLE IF NOT EXISTS search_space_roles (
+ id SERIAL PRIMARY KEY,
+ created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+ name VARCHAR(100) NOT NULL,
+ description VARCHAR(500),
+ permissions TEXT[] NOT NULL DEFAULT '{}',
+ is_default BOOLEAN NOT NULL DEFAULT FALSE,
+ is_system_role BOOLEAN NOT NULL DEFAULT FALSE,
+ search_space_id INTEGER NOT NULL REFERENCES searchspaces(id) ON DELETE CASCADE,
+ CONSTRAINT uq_searchspace_role_name UNIQUE (search_space_id, name)
+ );
+ """
+ )
+
+ # Create search_space_invites table (needs to be created before memberships due to FK)
+ op.execute(
+ """
+ CREATE TABLE IF NOT EXISTS search_space_invites (
+ id SERIAL PRIMARY KEY,
+ created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+ invite_code VARCHAR(64) NOT NULL UNIQUE,
+ search_space_id INTEGER NOT NULL REFERENCES searchspaces(id) ON DELETE CASCADE,
+ role_id INTEGER REFERENCES search_space_roles(id) ON DELETE SET NULL,
+ created_by_id UUID REFERENCES "user"(id) ON DELETE SET NULL,
+ expires_at TIMESTAMPTZ,
+ max_uses INTEGER,
+ uses_count INTEGER NOT NULL DEFAULT 0,
+ is_active BOOLEAN NOT NULL DEFAULT TRUE,
+ name VARCHAR(100)
+ );
+ """
+ )
+
+ # Create search_space_memberships table
+ op.execute(
+ """
+ CREATE TABLE IF NOT EXISTS search_space_memberships (
+ id SERIAL PRIMARY KEY,
+ created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+ user_id UUID NOT NULL REFERENCES "user"(id) ON DELETE CASCADE,
+ search_space_id INTEGER NOT NULL REFERENCES searchspaces(id) ON DELETE CASCADE,
+ role_id INTEGER REFERENCES search_space_roles(id) ON DELETE SET NULL,
+ is_owner BOOLEAN NOT NULL DEFAULT FALSE,
+ joined_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+ invited_by_invite_id INTEGER REFERENCES search_space_invites(id) ON DELETE SET NULL,
+ CONSTRAINT uq_user_searchspace_membership UNIQUE (user_id, search_space_id)
+ );
+ """
+ )
+
+ # Get connection and inspector for checking existing indexes
+ conn = op.get_bind()
+ inspector = inspect(conn)
+
+ # Create indexes for search_space_roles
+ existing_indexes = [
+ idx["name"] for idx in inspector.get_indexes("search_space_roles")
+ ]
+ if "ix_search_space_roles_id" not in existing_indexes:
+ op.create_index("ix_search_space_roles_id", "search_space_roles", ["id"])
+ if "ix_search_space_roles_created_at" not in existing_indexes:
+ op.create_index(
+ "ix_search_space_roles_created_at", "search_space_roles", ["created_at"]
+ )
+ if "ix_search_space_roles_name" not in existing_indexes:
+ op.create_index("ix_search_space_roles_name", "search_space_roles", ["name"])
+
+ # Create indexes for search_space_memberships
+ existing_indexes = [
+ idx["name"] for idx in inspector.get_indexes("search_space_memberships")
+ ]
+ if "ix_search_space_memberships_id" not in existing_indexes:
+ op.create_index(
+ "ix_search_space_memberships_id", "search_space_memberships", ["id"]
+ )
+ if "ix_search_space_memberships_created_at" not in existing_indexes:
+ op.create_index(
+ "ix_search_space_memberships_created_at",
+ "search_space_memberships",
+ ["created_at"],
+ )
+ if "ix_search_space_memberships_user_id" not in existing_indexes:
+ op.create_index(
+ "ix_search_space_memberships_user_id",
+ "search_space_memberships",
+ ["user_id"],
+ )
+ if "ix_search_space_memberships_search_space_id" not in existing_indexes:
+ op.create_index(
+ "ix_search_space_memberships_search_space_id",
+ "search_space_memberships",
+ ["search_space_id"],
+ )
+
+ # Create indexes for search_space_invites
+ existing_indexes = [
+ idx["name"] for idx in inspector.get_indexes("search_space_invites")
+ ]
+ if "ix_search_space_invites_id" not in existing_indexes:
+ op.create_index("ix_search_space_invites_id", "search_space_invites", ["id"])
+ if "ix_search_space_invites_created_at" not in existing_indexes:
+ op.create_index(
+ "ix_search_space_invites_created_at", "search_space_invites", ["created_at"]
+ )
+ if "ix_search_space_invites_invite_code" not in existing_indexes:
+ op.create_index(
+ "ix_search_space_invites_invite_code",
+ "search_space_invites",
+ ["invite_code"],
+ )
+
+
+def downgrade() -> None:
+ """Downgrade schema - remove RBAC tables."""
+
+ # Drop indexes for search_space_memberships
+ op.drop_index(
+ "ix_search_space_memberships_search_space_id",
+ table_name="search_space_memberships",
+ )
+ op.drop_index(
+ "ix_search_space_memberships_user_id", table_name="search_space_memberships"
+ )
+ op.drop_index(
+ "ix_search_space_memberships_created_at", table_name="search_space_memberships"
+ )
+ op.drop_index(
+ "ix_search_space_memberships_id", table_name="search_space_memberships"
+ )
+
+ # Drop indexes for search_space_invites
+ op.drop_index(
+ "ix_search_space_invites_invite_code", table_name="search_space_invites"
+ )
+ op.drop_index(
+ "ix_search_space_invites_created_at", table_name="search_space_invites"
+ )
+ op.drop_index("ix_search_space_invites_id", table_name="search_space_invites")
+
+ # Drop indexes for search_space_roles
+ op.drop_index("ix_search_space_roles_name", table_name="search_space_roles")
+ op.drop_index("ix_search_space_roles_created_at", table_name="search_space_roles")
+ op.drop_index("ix_search_space_roles_id", table_name="search_space_roles")
+
+ # Drop tables in correct order (respecting foreign key constraints)
+ op.drop_table("search_space_memberships")
+ op.drop_table("search_space_invites")
+ op.drop_table("search_space_roles")
diff --git a/surfsense_backend/alembic/versions/40_move_llm_preferences_to_searchspace.py b/surfsense_backend/alembic/versions/40_move_llm_preferences_to_searchspace.py
new file mode 100644
index 000000000..1067cffcc
--- /dev/null
+++ b/surfsense_backend/alembic/versions/40_move_llm_preferences_to_searchspace.py
@@ -0,0 +1,63 @@
+"""Move LLM preferences from user-level to search space level
+
+Revision ID: 40
+Revises: 39
+Create Date: 2024-11-27
+
+This migration moves LLM preferences (long_context_llm_id, fast_llm_id, strategic_llm_id)
+from the user_search_space_preferences table to the searchspaces table itself.
+
+This change supports the RBAC model where LLM preferences are shared by all members
+of a search space, rather than being per-user.
+"""
+
+import sqlalchemy as sa
+
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = "40"
+down_revision = "39"
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # Add LLM preference columns to searchspaces table
+ op.add_column(
+ "searchspaces",
+ sa.Column("long_context_llm_id", sa.Integer(), nullable=True),
+ )
+ op.add_column(
+ "searchspaces",
+ sa.Column("fast_llm_id", sa.Integer(), nullable=True),
+ )
+ op.add_column(
+ "searchspaces",
+ sa.Column("strategic_llm_id", sa.Integer(), nullable=True),
+ )
+
+ # Migrate existing preferences from user_search_space_preferences to searchspaces
+ # We take the owner's preferences (the user who created the search space)
+ connection = op.get_bind()
+
+ # Get all search spaces and their owner's preferences
+ connection.execute(
+ sa.text("""
+ UPDATE searchspaces ss
+ SET
+ long_context_llm_id = usp.long_context_llm_id,
+ fast_llm_id = usp.fast_llm_id,
+ strategic_llm_id = usp.strategic_llm_id
+ FROM user_search_space_preferences usp
+ WHERE ss.id = usp.search_space_id
+ AND ss.user_id = usp.user_id
+ """)
+ )
+
+
+def downgrade():
+ # Remove LLM preference columns from searchspaces table
+ op.drop_column("searchspaces", "strategic_llm_id")
+ op.drop_column("searchspaces", "fast_llm_id")
+ op.drop_column("searchspaces", "long_context_llm_id")
diff --git a/surfsense_backend/app/agents/researcher/nodes.py b/surfsense_backend/app/agents/researcher/nodes.py
index 223d82b67..c53e3348f 100644
--- a/surfsense_backend/app/agents/researcher/nodes.py
+++ b/surfsense_backend/app/agents/researcher/nodes.py
@@ -11,7 +11,7 @@ from sqlalchemy.ext.asyncio import AsyncSession
# Additional imports for document fetching
from sqlalchemy.future import select
-from app.db import Document, SearchSpace
+from app.db import Document
from app.services.connector_service import ConnectorService
from app.services.query_service import QueryService
@@ -92,19 +92,18 @@ def extract_sources_from_documents(
async def fetch_documents_by_ids(
- document_ids: list[int], user_id: str, db_session: AsyncSession
+ document_ids: list[int], search_space_id: int, db_session: AsyncSession
) -> tuple[list[dict[str, Any]], list[dict[str, Any]]]:
"""
- Fetch documents by their IDs with ownership check using DOCUMENTS mode approach.
+ Fetch documents by their IDs within a search space.
- This function ensures that only documents belonging to the user are fetched,
- providing security by checking ownership through SearchSpace association.
+ This function ensures that only documents belonging to the search space are fetched.
Similar to SearchMode.DOCUMENTS, it fetches full documents and concatenates their chunks.
Also creates source objects for UI display, grouped by document type.
Args:
document_ids: List of document IDs to fetch
- user_id: The user ID to check ownership
+ search_space_id: The search space ID to filter by
db_session: The database session
Returns:
@@ -114,11 +113,12 @@ async def fetch_documents_by_ids(
return [], []
try:
- # Query documents with ownership check
+ # Query documents filtered by search space
result = await db_session.execute(
- select(Document)
- .join(SearchSpace)
- .filter(Document.id.in_(document_ids), SearchSpace.user_id == user_id)
+ select(Document).filter(
+ Document.id.in_(document_ids),
+ Document.search_space_id == search_space_id,
+ )
)
documents = result.scalars().all()
@@ -515,7 +515,6 @@ async def fetch_documents_by_ids(
async def fetch_relevant_documents(
research_questions: list[str],
- user_id: str,
search_space_id: int,
db_session: AsyncSession,
connectors_to_search: list[str],
@@ -536,7 +535,6 @@ async def fetch_relevant_documents(
Args:
research_questions: List of research questions to find documents for
- user_id: The user ID
search_space_id: The search space ID
db_session: The database session
connectors_to_search: List of connectors to search
@@ -619,7 +617,6 @@ async def fetch_relevant_documents(
youtube_chunks,
) = await connector_service.search_youtube(
user_query=reformulated_query,
- user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode,
@@ -646,7 +643,6 @@ async def fetch_relevant_documents(
extension_chunks,
) = await connector_service.search_extension(
user_query=reformulated_query,
- user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode,
@@ -673,7 +669,6 @@ async def fetch_relevant_documents(
crawled_urls_chunks,
) = await connector_service.search_crawled_urls(
user_query=reformulated_query,
- user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode,
@@ -697,7 +692,6 @@ async def fetch_relevant_documents(
elif connector == "FILE":
source_object, files_chunks = await connector_service.search_files(
user_query=reformulated_query,
- user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode,
@@ -721,7 +715,6 @@ async def fetch_relevant_documents(
elif connector == "SLACK_CONNECTOR":
source_object, slack_chunks = await connector_service.search_slack(
user_query=reformulated_query,
- user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode,
@@ -748,7 +741,6 @@ async def fetch_relevant_documents(
notion_chunks,
) = await connector_service.search_notion(
user_query=reformulated_query,
- user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode,
@@ -775,7 +767,6 @@ async def fetch_relevant_documents(
github_chunks,
) = await connector_service.search_github(
user_query=reformulated_query,
- user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode,
@@ -802,7 +793,6 @@ async def fetch_relevant_documents(
linear_chunks,
) = await connector_service.search_linear(
user_query=reformulated_query,
- user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode,
@@ -829,7 +819,6 @@ async def fetch_relevant_documents(
tavily_chunks,
) = await connector_service.search_tavily(
user_query=reformulated_query,
- user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
)
@@ -855,7 +844,6 @@ async def fetch_relevant_documents(
searx_chunks,
) = await connector_service.search_searxng(
user_query=reformulated_query,
- user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
)
@@ -881,7 +869,6 @@ async def fetch_relevant_documents(
linkup_chunks,
) = await connector_service.search_linkup(
user_query=reformulated_query,
- user_id=user_id,
search_space_id=search_space_id,
mode=linkup_mode,
)
@@ -907,7 +894,6 @@ async def fetch_relevant_documents(
baidu_chunks,
) = await connector_service.search_baidu(
user_query=reformulated_query,
- user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
)
@@ -933,7 +919,6 @@ async def fetch_relevant_documents(
discord_chunks,
) = await connector_service.search_discord(
user_query=reformulated_query,
- user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode,
@@ -955,7 +940,6 @@ async def fetch_relevant_documents(
elif connector == "JIRA_CONNECTOR":
source_object, jira_chunks = await connector_service.search_jira(
user_query=reformulated_query,
- user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode,
@@ -981,7 +965,6 @@ async def fetch_relevant_documents(
calendar_chunks,
) = await connector_service.search_google_calendar(
user_query=reformulated_query,
- user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode,
@@ -1007,7 +990,6 @@ async def fetch_relevant_documents(
airtable_chunks,
) = await connector_service.search_airtable(
user_query=reformulated_query,
- user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode,
@@ -1033,7 +1015,6 @@ async def fetch_relevant_documents(
gmail_chunks,
) = await connector_service.search_google_gmail(
user_query=reformulated_query,
- user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode,
@@ -1059,7 +1040,6 @@ async def fetch_relevant_documents(
confluence_chunks,
) = await connector_service.search_confluence(
user_query=reformulated_query,
- user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode,
@@ -1085,7 +1065,6 @@ async def fetch_relevant_documents(
clickup_chunks,
) = await connector_service.search_clickup(
user_query=reformulated_query,
- user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode,
@@ -1112,7 +1091,6 @@ async def fetch_relevant_documents(
luma_chunks,
) = await connector_service.search_luma(
user_query=reformulated_query,
- user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode,
@@ -1139,7 +1117,6 @@ async def fetch_relevant_documents(
elasticsearch_chunks,
) = await connector_service.search_elasticsearch(
user_query=reformulated_query,
- user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode,
@@ -1315,7 +1292,6 @@ async def reformulate_user_query(
reformulated_query = await QueryService.reformulate_query_with_chat_history(
user_query=user_query,
session=state.db_session,
- user_id=configuration.user_id,
search_space_id=configuration.search_space_id,
chat_history_str=chat_history_str,
)
@@ -1389,7 +1365,7 @@ async def handle_qna_workflow(
user_selected_documents,
) = await fetch_documents_by_ids(
document_ids=configuration.document_ids_to_add_in_context,
- user_id=configuration.user_id,
+ search_space_id=configuration.search_space_id,
db_session=state.db_session,
)
@@ -1404,7 +1380,7 @@ async def handle_qna_workflow(
# Create connector service using state db_session
connector_service = ConnectorService(
- state.db_session, user_id=configuration.user_id
+ state.db_session, search_space_id=configuration.search_space_id
)
await connector_service.initialize_counter()
@@ -1413,7 +1389,6 @@ async def handle_qna_workflow(
relevant_documents = await fetch_relevant_documents(
research_questions=research_questions,
- user_id=configuration.user_id,
search_space_id=configuration.search_space_id,
db_session=state.db_session,
connectors_to_search=configuration.connectors_to_search,
@@ -1459,7 +1434,6 @@ async def handle_qna_workflow(
"user_query": user_query, # Use the reformulated query
"reformulated_query": reformulated_query,
"relevant_documents": all_documents, # Use combined documents
- "user_id": configuration.user_id,
"search_space_id": configuration.search_space_id,
"language": configuration.language,
}
@@ -1551,12 +1525,11 @@ async def generate_further_questions(
Returns:
Dict containing the further questions in the "further_questions" key for state update.
"""
- from app.services.llm_service import get_user_fast_llm
+ from app.services.llm_service import get_fast_llm
# Get configuration and state data
configuration = Configuration.from_runnable_config(config)
chat_history = state.chat_history
- user_id = configuration.user_id
search_space_id = configuration.search_space_id
streaming_service = state.streaming_service
@@ -1571,10 +1544,10 @@ async def generate_further_questions(
}
)
- # Get user's fast LLM
- llm = await get_user_fast_llm(state.db_session, user_id, search_space_id)
+ # Get search space's fast LLM
+ llm = await get_fast_llm(state.db_session, search_space_id)
if not llm:
- error_message = f"No fast LLM configured for user {user_id} in search space {search_space_id}"
+ error_message = f"No fast LLM configured for search space {search_space_id}"
print(error_message)
writer({"yield_value": streaming_service.format_error(error_message)})
diff --git a/surfsense_backend/app/agents/researcher/qna_agent/configuration.py b/surfsense_backend/app/agents/researcher/qna_agent/configuration.py
index ea107a575..e7dd9175e 100644
--- a/surfsense_backend/app/agents/researcher/qna_agent/configuration.py
+++ b/surfsense_backend/app/agents/researcher/qna_agent/configuration.py
@@ -18,7 +18,6 @@ class Configuration:
relevant_documents: list[
Any
] # Documents provided directly to the agent for answering
- user_id: str # User identifier
search_space_id: int # Search space identifier
language: str | None = None # Language for responses
diff --git a/surfsense_backend/app/agents/researcher/qna_agent/nodes.py b/surfsense_backend/app/agents/researcher/qna_agent/nodes.py
index 3112a581a..37bdbc362 100644
--- a/surfsense_backend/app/agents/researcher/qna_agent/nodes.py
+++ b/surfsense_backend/app/agents/researcher/qna_agent/nodes.py
@@ -142,13 +142,12 @@ async def answer_question(state: State, config: RunnableConfig) -> dict[str, Any
Returns:
Dict containing the final answer in the "final_answer" key.
"""
- from app.services.llm_service import get_user_fast_llm
+ from app.services.llm_service import get_fast_llm
# Get configuration and relevant documents from configuration
configuration = Configuration.from_runnable_config(config)
documents = state.reranked_documents
user_query = configuration.user_query
- user_id = configuration.user_id
search_space_id = configuration.search_space_id
language = configuration.language
@@ -178,10 +177,10 @@ async def answer_question(state: State, config: RunnableConfig) -> dict[str, Any
else ""
)
- # Get user's fast LLM
- llm = await get_user_fast_llm(state.db_session, user_id, search_space_id)
+ # Get search space's fast LLM
+ llm = await get_fast_llm(state.db_session, search_space_id)
if not llm:
- error_message = f"No fast LLM configured for user {user_id} in search space {search_space_id}"
+ error_message = f"No fast LLM configured for search space {search_space_id}"
print(error_message)
raise RuntimeError(error_message)
diff --git a/surfsense_backend/app/db.py b/surfsense_backend/app/db.py
index 06abb7a39..6195bec87 100644
--- a/surfsense_backend/app/db.py
+++ b/surfsense_backend/app/db.py
@@ -131,6 +131,169 @@ class LogStatus(str, Enum):
FAILED = "FAILED"
+class Permission(str, Enum):
+ """
+ Granular permissions for search space resources.
+ Use '*' (FULL_ACCESS) to grant all permissions.
+ """
+
+ # Documents
+ DOCUMENTS_CREATE = "documents:create"
+ DOCUMENTS_READ = "documents:read"
+ DOCUMENTS_UPDATE = "documents:update"
+ DOCUMENTS_DELETE = "documents:delete"
+
+ # Chats
+ CHATS_CREATE = "chats:create"
+ CHATS_READ = "chats:read"
+ CHATS_UPDATE = "chats:update"
+ CHATS_DELETE = "chats:delete"
+
+ # LLM Configs
+ LLM_CONFIGS_CREATE = "llm_configs:create"
+ LLM_CONFIGS_READ = "llm_configs:read"
+ LLM_CONFIGS_UPDATE = "llm_configs:update"
+ LLM_CONFIGS_DELETE = "llm_configs:delete"
+
+ # Podcasts
+ PODCASTS_CREATE = "podcasts:create"
+ PODCASTS_READ = "podcasts:read"
+ PODCASTS_UPDATE = "podcasts:update"
+ PODCASTS_DELETE = "podcasts:delete"
+
+ # Connectors
+ CONNECTORS_CREATE = "connectors:create"
+ CONNECTORS_READ = "connectors:read"
+ CONNECTORS_UPDATE = "connectors:update"
+ CONNECTORS_DELETE = "connectors:delete"
+
+ # Logs
+ LOGS_READ = "logs:read"
+ LOGS_DELETE = "logs:delete"
+
+ # Members
+ MEMBERS_INVITE = "members:invite"
+ MEMBERS_VIEW = "members:view"
+ MEMBERS_REMOVE = "members:remove"
+ MEMBERS_MANAGE_ROLES = "members:manage_roles"
+
+ # Roles
+ ROLES_CREATE = "roles:create"
+ ROLES_READ = "roles:read"
+ ROLES_UPDATE = "roles:update"
+ ROLES_DELETE = "roles:delete"
+
+ # Search Space Settings
+ SETTINGS_VIEW = "settings:view"
+ SETTINGS_UPDATE = "settings:update"
+ SETTINGS_DELETE = "settings:delete" # Delete the entire search space
+
+ # Full access wildcard
+ FULL_ACCESS = "*"
+
+
+# Predefined role permission sets for convenience
+DEFAULT_ROLE_PERMISSIONS = {
+ "Owner": [Permission.FULL_ACCESS.value],
+ "Admin": [
+ # Documents
+ Permission.DOCUMENTS_CREATE.value,
+ Permission.DOCUMENTS_READ.value,
+ Permission.DOCUMENTS_UPDATE.value,
+ Permission.DOCUMENTS_DELETE.value,
+ # Chats
+ Permission.CHATS_CREATE.value,
+ Permission.CHATS_READ.value,
+ Permission.CHATS_UPDATE.value,
+ Permission.CHATS_DELETE.value,
+ # LLM Configs
+ Permission.LLM_CONFIGS_CREATE.value,
+ Permission.LLM_CONFIGS_READ.value,
+ Permission.LLM_CONFIGS_UPDATE.value,
+ Permission.LLM_CONFIGS_DELETE.value,
+ # Podcasts
+ Permission.PODCASTS_CREATE.value,
+ Permission.PODCASTS_READ.value,
+ Permission.PODCASTS_UPDATE.value,
+ Permission.PODCASTS_DELETE.value,
+ # Connectors
+ Permission.CONNECTORS_CREATE.value,
+ Permission.CONNECTORS_READ.value,
+ Permission.CONNECTORS_UPDATE.value,
+ Permission.CONNECTORS_DELETE.value,
+ # Logs
+ Permission.LOGS_READ.value,
+ Permission.LOGS_DELETE.value,
+ # Members
+ Permission.MEMBERS_INVITE.value,
+ Permission.MEMBERS_VIEW.value,
+ Permission.MEMBERS_REMOVE.value,
+ Permission.MEMBERS_MANAGE_ROLES.value,
+ # Roles
+ Permission.ROLES_CREATE.value,
+ Permission.ROLES_READ.value,
+ Permission.ROLES_UPDATE.value,
+ Permission.ROLES_DELETE.value,
+ # Settings (no delete)
+ Permission.SETTINGS_VIEW.value,
+ Permission.SETTINGS_UPDATE.value,
+ ],
+ "Editor": [
+ # Documents
+ Permission.DOCUMENTS_CREATE.value,
+ Permission.DOCUMENTS_READ.value,
+ Permission.DOCUMENTS_UPDATE.value,
+ Permission.DOCUMENTS_DELETE.value,
+ # Chats
+ Permission.CHATS_CREATE.value,
+ Permission.CHATS_READ.value,
+ Permission.CHATS_UPDATE.value,
+ Permission.CHATS_DELETE.value,
+ # LLM Configs (read only)
+ Permission.LLM_CONFIGS_READ.value,
+ Permission.LLM_CONFIGS_CREATE.value,
+ Permission.LLM_CONFIGS_UPDATE.value,
+ # Podcasts
+ Permission.PODCASTS_CREATE.value,
+ Permission.PODCASTS_READ.value,
+ Permission.PODCASTS_UPDATE.value,
+ Permission.PODCASTS_DELETE.value,
+ # Connectors (full access for editors)
+ Permission.CONNECTORS_CREATE.value,
+ Permission.CONNECTORS_READ.value,
+ Permission.CONNECTORS_UPDATE.value,
+ # Logs
+ Permission.LOGS_READ.value,
+ # Members (view only)
+ Permission.MEMBERS_VIEW.value,
+ # Roles (read only)
+ Permission.ROLES_READ.value,
+ # Settings (view only)
+ Permission.SETTINGS_VIEW.value,
+ ],
+ "Viewer": [
+ # Documents (read only)
+ Permission.DOCUMENTS_READ.value,
+ # Chats (read only)
+ Permission.CHATS_READ.value,
+ # LLM Configs (read only)
+ Permission.LLM_CONFIGS_READ.value,
+ # Podcasts (read only)
+ Permission.PODCASTS_READ.value,
+ # Connectors (read only)
+ Permission.CONNECTORS_READ.value,
+ # Logs (read only)
+ Permission.LOGS_READ.value,
+ # Members (view only)
+ Permission.MEMBERS_VIEW.value,
+ # Roles (read only)
+ Permission.ROLES_READ.value,
+ # Settings (view only)
+ Permission.SETTINGS_VIEW.value,
+ ],
+}
+
+
class Base(DeclarativeBase):
pass
@@ -230,6 +393,13 @@ class SearchSpace(BaseModel, TimestampMixin):
qna_custom_instructions = Column(
Text, nullable=True, default=""
) # User's custom instructions
+
+ # Search space-level LLM preferences (shared by all members)
+ # Note: These can be negative IDs for global configs (from YAML) or positive IDs for custom configs (from DB)
+ long_context_llm_id = Column(Integer, nullable=True)
+ fast_llm_id = Column(Integer, nullable=True)
+ strategic_llm_id = Column(Integer, nullable=True)
+
user_id = Column(
UUID(as_uuid=True), ForeignKey("user.id", ondelete="CASCADE"), nullable=False
)
@@ -277,6 +447,26 @@ class SearchSpace(BaseModel, TimestampMixin):
cascade="all, delete-orphan",
)
+ # RBAC relationships
+ roles = relationship(
+ "SearchSpaceRole",
+ back_populates="search_space",
+ order_by="SearchSpaceRole.id",
+ cascade="all, delete-orphan",
+ )
+ memberships = relationship(
+ "SearchSpaceMembership",
+ back_populates="search_space",
+ order_by="SearchSpaceMembership.id",
+ cascade="all, delete-orphan",
+ )
+ invites = relationship(
+ "SearchSpaceInvite",
+ back_populates="search_space",
+ order_by="SearchSpaceInvite.id",
+ cascade="all, delete-orphan",
+ )
+
class SearchSourceConnector(BaseModel, TimestampMixin):
__tablename__ = "search_source_connectors"
@@ -368,13 +558,6 @@ class UserSearchSpacePreference(BaseModel, TimestampMixin):
user = relationship("User", back_populates="search_space_preferences")
search_space = relationship("SearchSpace", back_populates="user_preferences")
- # Note: Relationships removed because foreign keys no longer exist
- # Global configs (negative IDs) don't exist in llm_configs table
- # Application code manually fetches configs when needed
- # long_context_llm = relationship("LLMConfig", foreign_keys=[long_context_llm_id], post_update=True)
- # fast_llm = relationship("LLMConfig", foreign_keys=[fast_llm_id], post_update=True)
- # strategic_llm = relationship("LLMConfig", foreign_keys=[strategic_llm_id], post_update=True)
-
class Log(BaseModel, TimestampMixin):
__tablename__ = "logs"
@@ -393,6 +576,140 @@ class Log(BaseModel, TimestampMixin):
search_space = relationship("SearchSpace", back_populates="logs")
+class SearchSpaceRole(BaseModel, TimestampMixin):
+ """
+ Custom roles that can be defined per search space.
+ Each search space can have multiple roles with different permission sets.
+ """
+
+ __tablename__ = "search_space_roles"
+ __table_args__ = (
+ UniqueConstraint(
+ "search_space_id",
+ "name",
+ name="uq_searchspace_role_name",
+ ),
+ )
+
+ name = Column(String(100), nullable=False, index=True)
+ description = Column(String(500), nullable=True)
+ # List of Permission enum values (e.g., ["documents:read", "chats:create"])
+ permissions = Column(ARRAY(String), nullable=False, default=[])
+ # Whether this role is assigned to new members by default when they join via invite
+ is_default = Column(Boolean, nullable=False, default=False)
+ # System roles (Owner, Admin, Editor, Viewer) cannot be deleted
+ is_system_role = Column(Boolean, nullable=False, default=False)
+
+ search_space_id = Column(
+ Integer, ForeignKey("searchspaces.id", ondelete="CASCADE"), nullable=False
+ )
+ search_space = relationship("SearchSpace", back_populates="roles")
+
+ memberships = relationship(
+ "SearchSpaceMembership", back_populates="role", passive_deletes=True
+ )
+ invites = relationship(
+ "SearchSpaceInvite", back_populates="role", passive_deletes=True
+ )
+
+
+class SearchSpaceMembership(BaseModel, TimestampMixin):
+ """
+ Tracks user membership in search spaces with their assigned role.
+ Each user can be a member of multiple search spaces with different roles.
+ """
+
+ __tablename__ = "search_space_memberships"
+ __table_args__ = (
+ UniqueConstraint(
+ "user_id",
+ "search_space_id",
+ name="uq_user_searchspace_membership",
+ ),
+ )
+
+ user_id = Column(
+ UUID(as_uuid=True), ForeignKey("user.id", ondelete="CASCADE"), nullable=False
+ )
+ search_space_id = Column(
+ Integer, ForeignKey("searchspaces.id", ondelete="CASCADE"), nullable=False
+ )
+ role_id = Column(
+ Integer,
+ ForeignKey("search_space_roles.id", ondelete="SET NULL"),
+ nullable=True,
+ )
+ # Indicates if this user is the original creator/owner of the search space
+ is_owner = Column(Boolean, nullable=False, default=False)
+ # Timestamp when the user joined (via invite or as creator)
+ joined_at = Column(
+ TIMESTAMP(timezone=True),
+ nullable=False,
+ default=lambda: datetime.now(UTC),
+ )
+ # Reference to the invite used to join (null if owner/creator)
+ invited_by_invite_id = Column(
+ Integer,
+ ForeignKey("search_space_invites.id", ondelete="SET NULL"),
+ nullable=True,
+ )
+
+ user = relationship("User", back_populates="search_space_memberships")
+ search_space = relationship("SearchSpace", back_populates="memberships")
+ role = relationship("SearchSpaceRole", back_populates="memberships")
+ invited_by_invite = relationship(
+ "SearchSpaceInvite", back_populates="used_by_memberships"
+ )
+
+
+class SearchSpaceInvite(BaseModel, TimestampMixin):
+ """
+ Invite links for search spaces.
+ Users can create invite links with specific roles that others can use to join.
+ """
+
+ __tablename__ = "search_space_invites"
+
+ # Unique invite code (used in invite URLs)
+ invite_code = Column(String(64), nullable=False, unique=True, index=True)
+
+ search_space_id = Column(
+ Integer, ForeignKey("searchspaces.id", ondelete="CASCADE"), nullable=False
+ )
+ # Role to assign when invite is used (null means use default role)
+ role_id = Column(
+ Integer,
+ ForeignKey("search_space_roles.id", ondelete="SET NULL"),
+ nullable=True,
+ )
+ # User who created this invite
+ created_by_id = Column(
+ UUID(as_uuid=True),
+ ForeignKey("user.id", ondelete="SET NULL"),
+ nullable=True,
+ )
+
+ # Expiration timestamp (null means never expires)
+ expires_at = Column(TIMESTAMP(timezone=True), nullable=True)
+ # Maximum number of times this invite can be used (null means unlimited)
+ max_uses = Column(Integer, nullable=True)
+ # Number of times this invite has been used
+ uses_count = Column(Integer, nullable=False, default=0)
+ # Whether this invite is currently active
+ is_active = Column(Boolean, nullable=False, default=True)
+ # Optional custom name/label for the invite
+ name = Column(String(100), nullable=True)
+
+ search_space = relationship("SearchSpace", back_populates="invites")
+ role = relationship("SearchSpaceRole", back_populates="invites")
+ created_by = relationship("User", back_populates="created_invites")
+ used_by_memberships = relationship(
+ "SearchSpaceMembership",
+ back_populates="invited_by_invite",
+ passive_deletes=True,
+ )
+
+
if config.AUTH_TYPE == "GOOGLE":
class OAuthAccount(SQLAlchemyBaseOAuthAccountTableUUID, Base):
@@ -409,6 +726,18 @@ if config.AUTH_TYPE == "GOOGLE":
cascade="all, delete-orphan",
)
+ # RBAC relationships
+ search_space_memberships = relationship(
+ "SearchSpaceMembership",
+ back_populates="user",
+ cascade="all, delete-orphan",
+ )
+ created_invites = relationship(
+ "SearchSpaceInvite",
+ back_populates="created_by",
+ passive_deletes=True,
+ )
+
# Page usage tracking for ETL services
pages_limit = Column(Integer, nullable=False, default=500, server_default="500")
pages_used = Column(Integer, nullable=False, default=0, server_default="0")
@@ -423,6 +752,18 @@ else:
cascade="all, delete-orphan",
)
+ # RBAC relationships
+ search_space_memberships = relationship(
+ "SearchSpaceMembership",
+ back_populates="user",
+ cascade="all, delete-orphan",
+ )
+ created_invites = relationship(
+ "SearchSpaceInvite",
+ back_populates="created_by",
+ passive_deletes=True,
+ )
+
# Page usage tracking for ETL services
pages_limit = Column(Integer, nullable=False, default=500, server_default="500")
pages_used = Column(Integer, nullable=False, default=0, server_default="0")
@@ -492,3 +833,109 @@ async def get_documents_hybrid_search_retriever(
session: AsyncSession = Depends(get_async_session),
):
return DocumentHybridSearchRetriever(session)
+
+
+def has_permission(user_permissions: list[str], required_permission: str) -> bool:
+ """
+ Check if the user has the required permission.
+ Supports wildcard (*) for full access.
+
+ Args:
+ user_permissions: List of permission strings the user has
+ required_permission: The permission string to check for
+
+ Returns:
+ True if user has the permission, False otherwise
+ """
+ if not user_permissions:
+ return False
+
+ # Full access wildcard grants all permissions
+ if Permission.FULL_ACCESS.value in user_permissions:
+ return True
+
+ return required_permission in user_permissions
+
+
+def has_any_permission(
+ user_permissions: list[str], required_permissions: list[str]
+) -> bool:
+ """
+ Check if the user has any of the required permissions.
+
+ Args:
+ user_permissions: List of permission strings the user has
+ required_permissions: List of permission strings to check for (any match)
+
+ Returns:
+ True if user has at least one of the permissions, False otherwise
+ """
+ if not user_permissions:
+ return False
+
+ if Permission.FULL_ACCESS.value in user_permissions:
+ return True
+
+ return any(perm in user_permissions for perm in required_permissions)
+
+
+def has_all_permissions(
+ user_permissions: list[str], required_permissions: list[str]
+) -> bool:
+ """
+ Check if the user has all of the required permissions.
+
+ Args:
+ user_permissions: List of permission strings the user has
+ required_permissions: List of permission strings to check for (all must match)
+
+ Returns:
+ True if user has all of the permissions, False otherwise
+ """
+ if not user_permissions:
+ return False
+
+ if Permission.FULL_ACCESS.value in user_permissions:
+ return True
+
+ return all(perm in user_permissions for perm in required_permissions)
+
+
+def get_default_roles_config() -> list[dict]:
+ """
+ Get the configuration for default system roles.
+ These roles are created automatically when a search space is created.
+
+ Returns:
+ List of role configurations with name, description, permissions, and flags
+ """
+ return [
+ {
+ "name": "Owner",
+ "description": "Full access to all search space resources and settings",
+ "permissions": DEFAULT_ROLE_PERMISSIONS["Owner"],
+ "is_default": False,
+ "is_system_role": True,
+ },
+ {
+ "name": "Admin",
+ "description": "Can manage most resources except deleting the search space",
+ "permissions": DEFAULT_ROLE_PERMISSIONS["Admin"],
+ "is_default": False,
+ "is_system_role": True,
+ },
+ {
+ "name": "Editor",
+ "description": "Can create and edit documents, chats, and podcasts",
+ "permissions": DEFAULT_ROLE_PERMISSIONS["Editor"],
+ "is_default": True, # Default role for new members via invite
+ "is_system_role": True,
+ },
+ {
+ "name": "Viewer",
+ "description": "Read-only access to search space resources",
+ "permissions": DEFAULT_ROLE_PERMISSIONS["Viewer"],
+ "is_default": False,
+ "is_system_role": True,
+ },
+ ]
diff --git a/surfsense_backend/app/retriver/chunks_hybrid_search.py b/surfsense_backend/app/retriver/chunks_hybrid_search.py
index cb96ac695..25a121ad7 100644
--- a/surfsense_backend/app/retriver/chunks_hybrid_search.py
+++ b/surfsense_backend/app/retriver/chunks_hybrid_search.py
@@ -12,8 +12,7 @@ class ChucksHybridSearchRetriever:
self,
query_text: str,
top_k: int,
- user_id: str,
- search_space_id: int | None = None,
+ search_space_id: int,
) -> list:
"""
Perform vector similarity search on chunks.
@@ -21,8 +20,7 @@ class ChucksHybridSearchRetriever:
Args:
query_text: The search query text
top_k: Number of results to return
- user_id: The ID of the user performing the search
- search_space_id: Optional search space ID to filter results
+ search_space_id: The search space ID to search within
Returns:
List of chunks sorted by vector similarity
@@ -31,25 +29,20 @@ class ChucksHybridSearchRetriever:
from sqlalchemy.orm import joinedload
from app.config import config
- from app.db import Chunk, Document, SearchSpace
+ from app.db import Chunk, Document
# Get embedding for the query
embedding_model = config.embedding_model_instance
query_embedding = embedding_model.embed(query_text)
- # Build the base query with user ownership check
+ # Build the query filtered by search space
query = (
select(Chunk)
.options(joinedload(Chunk.document).joinedload(Document.search_space))
.join(Document, Chunk.document_id == Document.id)
- .join(SearchSpace, Document.search_space_id == SearchSpace.id)
- .where(SearchSpace.user_id == user_id)
+ .where(Document.search_space_id == search_space_id)
)
- # Add search space filter if provided
- if search_space_id is not None:
- query = query.where(Document.search_space_id == search_space_id)
-
# Add vector similarity ordering
query = query.order_by(Chunk.embedding.op("<=>")(query_embedding)).limit(top_k)
@@ -63,8 +56,7 @@ class ChucksHybridSearchRetriever:
self,
query_text: str,
top_k: int,
- user_id: str,
- search_space_id: int | None = None,
+ search_space_id: int,
) -> list:
"""
Perform full-text keyword search on chunks.
@@ -72,8 +64,7 @@ class ChucksHybridSearchRetriever:
Args:
query_text: The search query text
top_k: Number of results to return
- user_id: The ID of the user performing the search
- search_space_id: Optional search space ID to filter results
+ search_space_id: The search space ID to search within
Returns:
List of chunks sorted by text relevance
@@ -81,28 +72,23 @@ class ChucksHybridSearchRetriever:
from sqlalchemy import func, select
from sqlalchemy.orm import joinedload
- from app.db import Chunk, Document, SearchSpace
+ from app.db import Chunk, Document
# Create tsvector and tsquery for PostgreSQL full-text search
tsvector = func.to_tsvector("english", Chunk.content)
tsquery = func.plainto_tsquery("english", query_text)
- # Build the base query with user ownership check
+ # Build the query filtered by search space
query = (
select(Chunk)
.options(joinedload(Chunk.document).joinedload(Document.search_space))
.join(Document, Chunk.document_id == Document.id)
- .join(SearchSpace, Document.search_space_id == SearchSpace.id)
- .where(SearchSpace.user_id == user_id)
+ .where(Document.search_space_id == search_space_id)
.where(
tsvector.op("@@")(tsquery)
) # Only include results that match the query
)
- # Add search space filter if provided
- if search_space_id is not None:
- query = query.where(Document.search_space_id == search_space_id)
-
# Add text search ranking
query = query.order_by(func.ts_rank_cd(tsvector, tsquery).desc()).limit(top_k)
@@ -116,8 +102,7 @@ class ChucksHybridSearchRetriever:
self,
query_text: str,
top_k: int,
- user_id: str,
- search_space_id: int | None = None,
+ search_space_id: int,
document_type: str | None = None,
) -> list:
"""
@@ -126,8 +111,7 @@ class ChucksHybridSearchRetriever:
Args:
query_text: The search query text
top_k: Number of results to return
- user_id: The ID of the user performing the search
- search_space_id: Optional search space ID to filter results
+ search_space_id: The search space ID to search within
document_type: Optional document type to filter results (e.g., "FILE", "CRAWLED_URL")
Returns:
@@ -137,7 +121,7 @@ class ChucksHybridSearchRetriever:
from sqlalchemy.orm import joinedload
from app.config import config
- from app.db import Chunk, Document, DocumentType, SearchSpace
+ from app.db import Chunk, Document, DocumentType
# Get embedding for the query
embedding_model = config.embedding_model_instance
@@ -151,12 +135,8 @@ class ChucksHybridSearchRetriever:
tsvector = func.to_tsvector("english", Chunk.content)
tsquery = func.plainto_tsquery("english", query_text)
- # Base conditions for document filtering
- base_conditions = [SearchSpace.user_id == user_id]
-
- # Add search space filter if provided
- if search_space_id is not None:
- base_conditions.append(Document.search_space_id == search_space_id)
+ # Base conditions for chunk filtering - search space is required
+ base_conditions = [Document.search_space_id == search_space_id]
# Add document type filter if provided
if document_type is not None:
@@ -171,7 +151,7 @@ class ChucksHybridSearchRetriever:
else:
base_conditions.append(Document.document_type == document_type)
- # CTE for semantic search with user ownership check
+ # CTE for semantic search filtered by search space
semantic_search_cte = (
select(
Chunk.id,
@@ -180,7 +160,6 @@ class ChucksHybridSearchRetriever:
.label("rank"),
)
.join(Document, Chunk.document_id == Document.id)
- .join(SearchSpace, Document.search_space_id == SearchSpace.id)
.where(*base_conditions)
)
@@ -190,7 +169,7 @@ class ChucksHybridSearchRetriever:
.cte("semantic_search")
)
- # CTE for keyword search with user ownership check
+ # CTE for keyword search filtered by search space
keyword_search_cte = (
select(
Chunk.id,
@@ -199,7 +178,6 @@ class ChucksHybridSearchRetriever:
.label("rank"),
)
.join(Document, Chunk.document_id == Document.id)
- .join(SearchSpace, Document.search_space_id == SearchSpace.id)
.where(*base_conditions)
.where(tsvector.op("@@")(tsquery))
)
diff --git a/surfsense_backend/app/retriver/documents_hybrid_search.py b/surfsense_backend/app/retriver/documents_hybrid_search.py
index b4e826189..0c08ecc05 100644
--- a/surfsense_backend/app/retriver/documents_hybrid_search.py
+++ b/surfsense_backend/app/retriver/documents_hybrid_search.py
@@ -12,8 +12,7 @@ class DocumentHybridSearchRetriever:
self,
query_text: str,
top_k: int,
- user_id: str,
- search_space_id: int | None = None,
+ search_space_id: int,
) -> list:
"""
Perform vector similarity search on documents.
@@ -21,8 +20,7 @@ class DocumentHybridSearchRetriever:
Args:
query_text: The search query text
top_k: Number of results to return
- user_id: The ID of the user performing the search
- search_space_id: Optional search space ID to filter results
+ search_space_id: The search space ID to search within
Returns:
List of documents sorted by vector similarity
@@ -31,24 +29,19 @@ class DocumentHybridSearchRetriever:
from sqlalchemy.orm import joinedload
from app.config import config
- from app.db import Document, SearchSpace
+ from app.db import Document
# Get embedding for the query
embedding_model = config.embedding_model_instance
query_embedding = embedding_model.embed(query_text)
- # Build the base query with user ownership check
+ # Build the query filtered by search space
query = (
select(Document)
.options(joinedload(Document.search_space))
- .join(SearchSpace, Document.search_space_id == SearchSpace.id)
- .where(SearchSpace.user_id == user_id)
+ .where(Document.search_space_id == search_space_id)
)
- # Add search space filter if provided
- if search_space_id is not None:
- query = query.where(Document.search_space_id == search_space_id)
-
# Add vector similarity ordering
query = query.order_by(Document.embedding.op("<=>")(query_embedding)).limit(
top_k
@@ -64,8 +57,7 @@ class DocumentHybridSearchRetriever:
self,
query_text: str,
top_k: int,
- user_id: str,
- search_space_id: int | None = None,
+ search_space_id: int,
) -> list:
"""
Perform full-text keyword search on documents.
@@ -73,8 +65,7 @@ class DocumentHybridSearchRetriever:
Args:
query_text: The search query text
top_k: Number of results to return
- user_id: The ID of the user performing the search
- search_space_id: Optional search space ID to filter results
+ search_space_id: The search space ID to search within
Returns:
List of documents sorted by text relevance
@@ -82,27 +73,22 @@ class DocumentHybridSearchRetriever:
from sqlalchemy import func, select
from sqlalchemy.orm import joinedload
- from app.db import Document, SearchSpace
+ from app.db import Document
# Create tsvector and tsquery for PostgreSQL full-text search
tsvector = func.to_tsvector("english", Document.content)
tsquery = func.plainto_tsquery("english", query_text)
- # Build the base query with user ownership check
+ # Build the query filtered by search space
query = (
select(Document)
.options(joinedload(Document.search_space))
- .join(SearchSpace, Document.search_space_id == SearchSpace.id)
- .where(SearchSpace.user_id == user_id)
+ .where(Document.search_space_id == search_space_id)
.where(
tsvector.op("@@")(tsquery)
) # Only include results that match the query
)
- # Add search space filter if provided
- if search_space_id is not None:
- query = query.where(Document.search_space_id == search_space_id)
-
# Add text search ranking
query = query.order_by(func.ts_rank_cd(tsvector, tsquery).desc()).limit(top_k)
@@ -116,8 +102,7 @@ class DocumentHybridSearchRetriever:
self,
query_text: str,
top_k: int,
- user_id: str,
- search_space_id: int | None = None,
+ search_space_id: int,
document_type: str | None = None,
) -> list:
"""
@@ -126,8 +111,7 @@ class DocumentHybridSearchRetriever:
Args:
query_text: The search query text
top_k: Number of results to return
- user_id: The ID of the user performing the search
- search_space_id: Optional search space ID to filter results
+ search_space_id: The search space ID to search within
document_type: Optional document type to filter results (e.g., "FILE", "CRAWLED_URL")
"""
@@ -135,7 +119,7 @@ class DocumentHybridSearchRetriever:
from sqlalchemy.orm import joinedload
from app.config import config
- from app.db import Document, DocumentType, SearchSpace
+ from app.db import Document, DocumentType
# Get embedding for the query
embedding_model = config.embedding_model_instance
@@ -149,12 +133,8 @@ class DocumentHybridSearchRetriever:
tsvector = func.to_tsvector("english", Document.content)
tsquery = func.plainto_tsquery("english", query_text)
- # Base conditions for document filtering
- base_conditions = [SearchSpace.user_id == user_id]
-
- # Add search space filter if provided
- if search_space_id is not None:
- base_conditions.append(Document.search_space_id == search_space_id)
+ # Base conditions for document filtering - search space is required
+ base_conditions = [Document.search_space_id == search_space_id]
# Add document type filter if provided
if document_type is not None:
@@ -169,17 +149,13 @@ class DocumentHybridSearchRetriever:
else:
base_conditions.append(Document.document_type == document_type)
- # CTE for semantic search with user ownership check
- semantic_search_cte = (
- select(
- Document.id,
- func.rank()
- .over(order_by=Document.embedding.op("<=>")(query_embedding))
- .label("rank"),
- )
- .join(SearchSpace, Document.search_space_id == SearchSpace.id)
- .where(*base_conditions)
- )
+ # CTE for semantic search filtered by search space
+ semantic_search_cte = select(
+ Document.id,
+ func.rank()
+ .over(order_by=Document.embedding.op("<=>")(query_embedding))
+ .label("rank"),
+ ).where(*base_conditions)
semantic_search_cte = (
semantic_search_cte.order_by(Document.embedding.op("<=>")(query_embedding))
@@ -187,7 +163,7 @@ class DocumentHybridSearchRetriever:
.cte("semantic_search")
)
- # CTE for keyword search with user ownership check
+ # CTE for keyword search filtered by search space
keyword_search_cte = (
select(
Document.id,
@@ -195,7 +171,6 @@ class DocumentHybridSearchRetriever:
.over(order_by=func.ts_rank_cd(tsvector, tsquery).desc())
.label("rank"),
)
- .join(SearchSpace, Document.search_space_id == SearchSpace.id)
.where(*base_conditions)
.where(tsvector.op("@@")(tsquery))
)
diff --git a/surfsense_backend/app/routes/__init__.py b/surfsense_backend/app/routes/__init__.py
index 1c7e3505f..127a8d927 100644
--- a/surfsense_backend/app/routes/__init__.py
+++ b/surfsense_backend/app/routes/__init__.py
@@ -15,12 +15,14 @@ from .llm_config_routes import router as llm_config_router
from .logs_routes import router as logs_router
from .luma_add_connector_route import router as luma_add_connector_router
from .podcasts_routes import router as podcasts_router
+from .rbac_routes import router as rbac_router
from .search_source_connectors_routes import router as search_source_connectors_router
from .search_spaces_routes import router as search_spaces_router
router = APIRouter()
router.include_router(search_spaces_router)
+router.include_router(rbac_router) # RBAC routes for roles, members, invites
router.include_router(documents_router)
router.include_router(podcasts_router)
router.include_router(chats_router)
diff --git a/surfsense_backend/app/routes/chats_routes.py b/surfsense_backend/app/routes/chats_routes.py
index 05360cee0..d7aff102b 100644
--- a/surfsense_backend/app/routes/chats_routes.py
+++ b/surfsense_backend/app/routes/chats_routes.py
@@ -6,7 +6,14 @@ from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from sqlalchemy.orm import selectinload
-from app.db import Chat, SearchSpace, User, UserSearchSpacePreference, get_async_session
+from app.db import (
+ Chat,
+ Permission,
+ SearchSpace,
+ SearchSpaceMembership,
+ User,
+ get_async_session,
+)
from app.schemas import (
AISDKChatRequest,
ChatCreate,
@@ -16,7 +23,7 @@ from app.schemas import (
)
from app.tasks.stream_connector_search_results import stream_connector_search_results
from app.users import current_active_user
-from app.utils.check_ownership import check_ownership
+from app.utils.rbac import check_permission
from app.utils.validators import (
validate_connectors,
validate_document_ids,
@@ -59,45 +66,38 @@ async def handle_chat_data(
# print("RESQUEST DATA:", request_data)
# print("SELECTED CONNECTORS:", selected_connectors)
- # Check if the search space belongs to the current user
+ # Check if the user has chat access to the search space
try:
- await check_ownership(session, SearchSpace, search_space_id, user)
- language_result = await session.execute(
- select(UserSearchSpacePreference)
- .options(
- selectinload(UserSearchSpacePreference.search_space).selectinload(
- SearchSpace.llm_configs
- ),
- # Note: Removed selectinload for LLM relationships as they no longer exist
- # Global configs (negative IDs) don't have foreign keys
- # LLM configs are now fetched manually when needed
- )
- .filter(
- UserSearchSpacePreference.search_space_id == search_space_id,
- UserSearchSpacePreference.user_id == user.id,
- )
+ await check_permission(
+ session,
+ user,
+ search_space_id,
+ Permission.CHATS_CREATE.value,
+ "You don't have permission to use chat in this search space",
)
- user_preference = language_result.scalars().first()
- # print("UserSearchSpacePreference:", user_preference)
+
+ # Get search space with LLM configs (preferences are now stored at search space level)
+ search_space_result = await session.execute(
+ select(SearchSpace)
+ .options(selectinload(SearchSpace.llm_configs))
+ .filter(SearchSpace.id == search_space_id)
+ )
+ search_space = search_space_result.scalars().first()
language = None
llm_configs = [] # Initialize to empty list
- if (
- user_preference
- and user_preference.search_space
- and user_preference.search_space.llm_configs
- ):
- llm_configs = user_preference.search_space.llm_configs
+ if search_space and search_space.llm_configs:
+ llm_configs = search_space.llm_configs
- # Manually fetch LLM configs since relationships no longer exist
- # Check fast_llm, long_context_llm, and strategic_llm IDs
+ # Get language from configured LLM preferences
+ # LLM preferences are now stored on the SearchSpace model
from app.config import config as app_config
for llm_id in [
- user_preference.fast_llm_id,
- user_preference.long_context_llm_id,
- user_preference.strategic_llm_id,
+ search_space.fast_llm_id,
+ search_space.long_context_llm_id,
+ search_space.strategic_llm_id,
]:
if llm_id is not None:
# Check if it's a global config (negative ID)
@@ -161,8 +161,18 @@ async def create_chat(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
+ """
+ Create a new chat.
+ Requires CHATS_CREATE permission.
+ """
try:
- await check_ownership(session, SearchSpace, chat.search_space_id, user)
+ await check_permission(
+ session,
+ user,
+ chat.search_space_id,
+ Permission.CHATS_CREATE.value,
+ "You don't have permission to create chats in this search space",
+ )
db_chat = Chat(**chat.model_dump())
session.add(db_chat)
await session.commit()
@@ -197,6 +207,10 @@ async def read_chats(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
+ """
+ List chats the user has access to.
+ Requires CHATS_READ permission for the search space(s).
+ """
# Validate pagination parameters
if skip < 0:
raise HTTPException(
@@ -212,9 +226,17 @@ async def read_chats(
status_code=400, detail="search_space_id must be a positive integer"
)
try:
- # Select specific fields excluding messages
- query = (
- select(
+ if search_space_id is not None:
+ # Check permission for specific search space
+ await check_permission(
+ session,
+ user,
+ search_space_id,
+ Permission.CHATS_READ.value,
+ "You don't have permission to read chats in this search space",
+ )
+ # Select specific fields excluding messages
+ query = select(
Chat.id,
Chat.type,
Chat.title,
@@ -222,17 +244,28 @@ async def read_chats(
Chat.search_space_id,
Chat.created_at,
Chat.state_version,
+ ).filter(Chat.search_space_id == search_space_id)
+ else:
+ # Get chats from all search spaces user has membership in
+ query = (
+ select(
+ Chat.id,
+ Chat.type,
+ Chat.title,
+ Chat.initial_connectors,
+ Chat.search_space_id,
+ Chat.created_at,
+ Chat.state_version,
+ )
+ .join(SearchSpace)
+ .join(SearchSpaceMembership)
+ .filter(SearchSpaceMembership.user_id == user.id)
)
- .join(SearchSpace)
- .filter(SearchSpace.user_id == user.id)
- )
-
- # Filter by search_space_id if provided
- if search_space_id is not None:
- query = query.filter(Chat.search_space_id == search_space_id)
result = await session.execute(query.offset(skip).limit(limit))
return result.all()
+ except HTTPException:
+ raise
except OperationalError:
raise HTTPException(
status_code=503, detail="Database operation failed. Please try again later."
@@ -249,19 +282,32 @@ async def read_chat(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
+ """
+ Get a specific chat by ID.
+ Requires CHATS_READ permission for the search space.
+ """
try:
- result = await session.execute(
- select(Chat)
- .join(SearchSpace)
- .filter(Chat.id == chat_id, SearchSpace.user_id == user.id)
- )
+ result = await session.execute(select(Chat).filter(Chat.id == chat_id))
chat = result.scalars().first()
+
if not chat:
raise HTTPException(
status_code=404,
- detail="Chat not found or you don't have permission to access it",
+ detail="Chat not found",
)
+
+ # Check permission for the search space
+ await check_permission(
+ session,
+ user,
+ chat.search_space_id,
+ Permission.CHATS_READ.value,
+ "You don't have permission to read chats in this search space",
+ )
+
return chat
+ except HTTPException:
+ raise
except OperationalError:
raise HTTPException(
status_code=503, detail="Database operation failed. Please try again later."
@@ -280,8 +326,26 @@ async def update_chat(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
+ """
+ Update a chat.
+ Requires CHATS_UPDATE permission for the search space.
+ """
try:
- db_chat = await read_chat(chat_id, session, user)
+ result = await session.execute(select(Chat).filter(Chat.id == chat_id))
+ db_chat = result.scalars().first()
+
+ if not db_chat:
+ raise HTTPException(status_code=404, detail="Chat not found")
+
+ # Check permission for the search space
+ await check_permission(
+ session,
+ user,
+ db_chat.search_space_id,
+ Permission.CHATS_UPDATE.value,
+ "You don't have permission to update chats in this search space",
+ )
+
update_data = chat_update.model_dump(exclude_unset=True)
for key, value in update_data.items():
if key == "messages":
@@ -318,8 +382,26 @@ async def delete_chat(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
+ """
+ Delete a chat.
+ Requires CHATS_DELETE permission for the search space.
+ """
try:
- db_chat = await read_chat(chat_id, session, user)
+ result = await session.execute(select(Chat).filter(Chat.id == chat_id))
+ db_chat = result.scalars().first()
+
+ if not db_chat:
+ raise HTTPException(status_code=404, detail="Chat not found")
+
+ # Check permission for the search space
+ await check_permission(
+ session,
+ user,
+ db_chat.search_space_id,
+ Permission.CHATS_DELETE.value,
+ "You don't have permission to delete chats in this search space",
+ )
+
await session.delete(db_chat)
await session.commit()
return {"message": "Chat deleted successfully"}
diff --git a/surfsense_backend/app/routes/documents_routes.py b/surfsense_backend/app/routes/documents_routes.py
index ae9df0cf4..67015243f 100644
--- a/surfsense_backend/app/routes/documents_routes.py
+++ b/surfsense_backend/app/routes/documents_routes.py
@@ -10,7 +10,9 @@ from app.db import (
Chunk,
Document,
DocumentType,
+ Permission,
SearchSpace,
+ SearchSpaceMembership,
User,
get_async_session,
)
@@ -22,7 +24,7 @@ from app.schemas import (
PaginatedResponse,
)
from app.users import current_active_user
-from app.utils.check_ownership import check_ownership
+from app.utils.rbac import check_permission
try:
asyncio.set_event_loop_policy(asyncio.DefaultEventLoopPolicy())
@@ -44,9 +46,19 @@ async def create_documents(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
+ """
+ Create new documents.
+ Requires DOCUMENTS_CREATE permission.
+ """
try:
- # Check if the user owns the search space
- await check_ownership(session, SearchSpace, request.search_space_id, user)
+ # Check permission
+ await check_permission(
+ session,
+ user,
+ request.search_space_id,
+ Permission.DOCUMENTS_CREATE.value,
+ "You don't have permission to create documents in this search space",
+ )
if request.document_type == DocumentType.EXTENSION:
from app.tasks.celery_tasks.document_tasks import (
@@ -93,8 +105,19 @@ async def create_documents_file_upload(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
+ """
+ Upload files as documents.
+ Requires DOCUMENTS_CREATE permission.
+ """
try:
- await check_ownership(session, SearchSpace, search_space_id, user)
+ # Check permission
+ await check_permission(
+ session,
+ user,
+ search_space_id,
+ Permission.DOCUMENTS_CREATE.value,
+ "You don't have permission to create documents in this search space",
+ )
if not files:
raise HTTPException(status_code=400, detail="No files provided")
@@ -151,7 +174,8 @@ async def read_documents(
user: User = Depends(current_active_user),
):
"""
- List documents owned by the current user, with optional filtering and pagination.
+ List documents the user has access to, with optional filtering and pagination.
+ Requires DOCUMENTS_READ permission for the search space(s).
Args:
skip: Absolute number of items to skip from the beginning. If provided, it takes precedence over 'page'.
@@ -167,40 +191,49 @@ async def read_documents(
Notes:
- If both 'skip' and 'page' are provided, 'skip' is used.
- - Results are scoped to documents owned by the current user.
+ - Results are scoped to documents in search spaces the user has membership in.
"""
try:
from sqlalchemy import func
- query = (
- select(Document).join(SearchSpace).filter(SearchSpace.user_id == user.id)
- )
-
- # Filter by search_space_id if provided
+ # If specific search_space_id, check permission
if search_space_id is not None:
- query = query.filter(Document.search_space_id == search_space_id)
+ await check_permission(
+ session,
+ user,
+ search_space_id,
+ Permission.DOCUMENTS_READ.value,
+ "You don't have permission to read documents in this search space",
+ )
+ query = select(Document).filter(Document.search_space_id == search_space_id)
+ count_query = (
+ select(func.count())
+ .select_from(Document)
+ .filter(Document.search_space_id == search_space_id)
+ )
+ else:
+ # Get documents from all search spaces user has membership in
+ query = (
+ select(Document)
+ .join(SearchSpace)
+ .join(SearchSpaceMembership)
+ .filter(SearchSpaceMembership.user_id == user.id)
+ )
+ count_query = (
+ select(func.count())
+ .select_from(Document)
+ .join(SearchSpace)
+ .join(SearchSpaceMembership)
+ .filter(SearchSpaceMembership.user_id == user.id)
+ )
# Filter by document_types if provided
if document_types is not None and document_types.strip():
type_list = [t.strip() for t in document_types.split(",") if t.strip()]
if type_list:
query = query.filter(Document.document_type.in_(type_list))
-
- # Get total count
- count_query = (
- select(func.count())
- .select_from(Document)
- .join(SearchSpace)
- .filter(SearchSpace.user_id == user.id)
- )
- if search_space_id is not None:
- count_query = count_query.filter(
- Document.search_space_id == search_space_id
- )
- if document_types is not None and document_types.strip():
- type_list = [t.strip() for t in document_types.split(",") if t.strip()]
- if type_list:
count_query = count_query.filter(Document.document_type.in_(type_list))
+
total_result = await session.execute(count_query)
total = total_result.scalar() or 0
@@ -235,6 +268,8 @@ async def read_documents(
)
return PaginatedResponse(items=api_documents, total=total)
+ except HTTPException:
+ raise
except Exception as e:
raise HTTPException(
status_code=500, detail=f"Failed to fetch documents: {e!s}"
@@ -254,6 +289,7 @@ async def search_documents(
):
"""
Search documents by title substring, optionally filtered by search_space_id and document_types.
+ Requires DOCUMENTS_READ permission for the search space(s).
Args:
title: Case-insensitive substring to match against document titles. Required.
@@ -275,37 +311,48 @@ async def search_documents(
try:
from sqlalchemy import func
- query = (
- select(Document).join(SearchSpace).filter(SearchSpace.user_id == user.id)
- )
+ # If specific search_space_id, check permission
if search_space_id is not None:
- query = query.filter(Document.search_space_id == search_space_id)
+ await check_permission(
+ session,
+ user,
+ search_space_id,
+ Permission.DOCUMENTS_READ.value,
+ "You don't have permission to read documents in this search space",
+ )
+ query = select(Document).filter(Document.search_space_id == search_space_id)
+ count_query = (
+ select(func.count())
+ .select_from(Document)
+ .filter(Document.search_space_id == search_space_id)
+ )
+ else:
+ # Get documents from all search spaces user has membership in
+ query = (
+ select(Document)
+ .join(SearchSpace)
+ .join(SearchSpaceMembership)
+ .filter(SearchSpaceMembership.user_id == user.id)
+ )
+ count_query = (
+ select(func.count())
+ .select_from(Document)
+ .join(SearchSpace)
+ .join(SearchSpaceMembership)
+ .filter(SearchSpaceMembership.user_id == user.id)
+ )
# Only search by title (case-insensitive)
query = query.filter(Document.title.ilike(f"%{title}%"))
+ count_query = count_query.filter(Document.title.ilike(f"%{title}%"))
# Filter by document_types if provided
if document_types is not None and document_types.strip():
type_list = [t.strip() for t in document_types.split(",") if t.strip()]
if type_list:
query = query.filter(Document.document_type.in_(type_list))
-
- # Get total count
- count_query = (
- select(func.count())
- .select_from(Document)
- .join(SearchSpace)
- .filter(SearchSpace.user_id == user.id)
- )
- if search_space_id is not None:
- count_query = count_query.filter(
- Document.search_space_id == search_space_id
- )
- count_query = count_query.filter(Document.title.ilike(f"%{title}%"))
- if document_types is not None and document_types.strip():
- type_list = [t.strip() for t in document_types.split(",") if t.strip()]
- if type_list:
count_query = count_query.filter(Document.document_type.in_(type_list))
+
total_result = await session.execute(count_query)
total = total_result.scalar() or 0
@@ -340,6 +387,8 @@ async def search_documents(
)
return PaginatedResponse(items=api_documents, total=total)
+ except HTTPException:
+ raise
except Exception as e:
raise HTTPException(
status_code=500, detail=f"Failed to search documents: {e!s}"
@@ -353,7 +402,8 @@ async def get_document_type_counts(
user: User = Depends(current_active_user),
):
"""
- Get counts of documents by type for the current user.
+ Get counts of documents by type for search spaces the user has access to.
+ Requires DOCUMENTS_READ permission for the search space(s).
Args:
search_space_id: If provided, restrict counts to a specific search space.
@@ -366,20 +416,36 @@ async def get_document_type_counts(
try:
from sqlalchemy import func
- query = (
- select(Document.document_type, func.count(Document.id))
- .join(SearchSpace)
- .filter(SearchSpace.user_id == user.id)
- .group_by(Document.document_type)
- )
-
if search_space_id is not None:
- query = query.filter(Document.search_space_id == search_space_id)
+ # Check permission for specific search space
+ await check_permission(
+ session,
+ user,
+ search_space_id,
+ Permission.DOCUMENTS_READ.value,
+ "You don't have permission to read documents in this search space",
+ )
+ query = (
+ select(Document.document_type, func.count(Document.id))
+ .filter(Document.search_space_id == search_space_id)
+ .group_by(Document.document_type)
+ )
+ else:
+ # Get counts from all search spaces user has membership in
+ query = (
+ select(Document.document_type, func.count(Document.id))
+ .join(SearchSpace)
+ .join(SearchSpaceMembership)
+ .filter(SearchSpaceMembership.user_id == user.id)
+ .group_by(Document.document_type)
+ )
result = await session.execute(query)
type_counts = dict(result.all())
return type_counts
+ except HTTPException:
+ raise
except Exception as e:
raise HTTPException(
status_code=500, detail=f"Failed to fetch document type counts: {e!s}"
@@ -394,6 +460,7 @@ async def get_document_by_chunk_id(
):
"""
Retrieves a document based on a chunk ID, including all its chunks ordered by creation time.
+ Requires DOCUMENTS_READ permission for the search space.
The document's embedding and chunk embeddings are excluded from the response.
"""
try:
@@ -406,21 +473,29 @@ async def get_document_by_chunk_id(
status_code=404, detail=f"Chunk with id {chunk_id} not found"
)
- # Get the associated document and verify ownership
+ # Get the associated document
document_result = await session.execute(
select(Document)
.options(selectinload(Document.chunks))
- .join(SearchSpace)
- .filter(Document.id == chunk.document_id, SearchSpace.user_id == user.id)
+ .filter(Document.id == chunk.document_id)
)
document = document_result.scalars().first()
if not document:
raise HTTPException(
status_code=404,
- detail="Document not found or you don't have access to it",
+ detail="Document not found",
)
+ # Check permission for the search space
+ await check_permission(
+ session,
+ user,
+ document.search_space_id,
+ Permission.DOCUMENTS_READ.value,
+ "You don't have permission to read documents in this search space",
+ )
+
# Sort chunks by creation time
sorted_chunks = sorted(document.chunks, key=lambda x: x.created_at)
@@ -449,11 +524,13 @@ async def read_document(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
+ """
+ Get a specific document by ID.
+ Requires DOCUMENTS_READ permission for the search space.
+ """
try:
result = await session.execute(
- select(Document)
- .join(SearchSpace)
- .filter(Document.id == document_id, SearchSpace.user_id == user.id)
+ select(Document).filter(Document.id == document_id)
)
document = result.scalars().first()
@@ -462,6 +539,15 @@ async def read_document(
status_code=404, detail=f"Document with id {document_id} not found"
)
+ # Check permission for the search space
+ await check_permission(
+ session,
+ user,
+ document.search_space_id,
+ Permission.DOCUMENTS_READ.value,
+ "You don't have permission to read documents in this search space",
+ )
+
# Convert database object to API-friendly format
return DocumentRead(
id=document.id,
@@ -472,6 +558,8 @@ async def read_document(
created_at=document.created_at,
search_space_id=document.search_space_id,
)
+ except HTTPException:
+ raise
except Exception as e:
raise HTTPException(
status_code=500, detail=f"Failed to fetch document: {e!s}"
@@ -485,12 +573,13 @@ async def update_document(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
+ """
+ Update a document.
+ Requires DOCUMENTS_UPDATE permission for the search space.
+ """
try:
- # Query the document directly instead of using read_document function
result = await session.execute(
- select(Document)
- .join(SearchSpace)
- .filter(Document.id == document_id, SearchSpace.user_id == user.id)
+ select(Document).filter(Document.id == document_id)
)
db_document = result.scalars().first()
@@ -499,6 +588,15 @@ async def update_document(
status_code=404, detail=f"Document with id {document_id} not found"
)
+ # Check permission for the search space
+ await check_permission(
+ session,
+ user,
+ db_document.search_space_id,
+ Permission.DOCUMENTS_UPDATE.value,
+ "You don't have permission to update documents in this search space",
+ )
+
update_data = document_update.model_dump(exclude_unset=True)
for key, value in update_data.items():
setattr(db_document, key, value)
@@ -530,12 +628,13 @@ async def delete_document(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
+ """
+ Delete a document.
+ Requires DOCUMENTS_DELETE permission for the search space.
+ """
try:
- # Query the document directly instead of using read_document function
result = await session.execute(
- select(Document)
- .join(SearchSpace)
- .filter(Document.id == document_id, SearchSpace.user_id == user.id)
+ select(Document).filter(Document.id == document_id)
)
document = result.scalars().first()
@@ -544,6 +643,15 @@ async def delete_document(
status_code=404, detail=f"Document with id {document_id} not found"
)
+ # Check permission for the search space
+ await check_permission(
+ session,
+ user,
+ document.search_space_id,
+ Permission.DOCUMENTS_DELETE.value,
+ "You don't have permission to delete documents in this search space",
+ )
+
await session.delete(document)
await session.commit()
return {"message": "Document deleted successfully"}
diff --git a/surfsense_backend/app/routes/llm_config_routes.py b/surfsense_backend/app/routes/llm_config_routes.py
index 35c3ce574..31c7200f5 100644
--- a/surfsense_backend/app/routes/llm_config_routes.py
+++ b/surfsense_backend/app/routes/llm_config_routes.py
@@ -8,67 +8,22 @@ from sqlalchemy.future import select
from app.config import config
from app.db import (
LLMConfig,
+ Permission,
SearchSpace,
User,
- UserSearchSpacePreference,
get_async_session,
)
from app.schemas import LLMConfigCreate, LLMConfigRead, LLMConfigUpdate
from app.services.llm_service import validate_llm_config
from app.users import current_active_user
+from app.utils.rbac import check_permission
router = APIRouter()
logger = logging.getLogger(__name__)
-# Helper function to check search space access
-async def check_search_space_access(
- session: AsyncSession, search_space_id: int, user: User
-) -> SearchSpace:
- """Verify that the user has access to the search space"""
- result = await session.execute(
- select(SearchSpace).filter(
- SearchSpace.id == search_space_id, SearchSpace.user_id == user.id
- )
- )
- search_space = result.scalars().first()
- if not search_space:
- raise HTTPException(
- status_code=404,
- detail="Search space not found or you don't have permission to access it",
- )
- return search_space
-
-
-# Helper function to get or create user search space preference
-async def get_or_create_user_preference(
- session: AsyncSession, user_id, search_space_id: int
-) -> UserSearchSpacePreference:
- """Get or create user preference for a search space"""
- result = await session.execute(
- select(UserSearchSpacePreference).filter(
- UserSearchSpacePreference.user_id == user_id,
- UserSearchSpacePreference.search_space_id == search_space_id,
- )
- # Removed selectinload options since relationships no longer exist
- )
- preference = result.scalars().first()
-
- if not preference:
- # Create new preference entry
- preference = UserSearchSpacePreference(
- user_id=user_id,
- search_space_id=search_space_id,
- )
- session.add(preference)
- await session.commit()
- await session.refresh(preference)
-
- return preference
-
-
class LLMPreferencesUpdate(BaseModel):
- """Schema for updating user LLM preferences"""
+ """Schema for updating search space LLM preferences"""
long_context_llm_id: int | None = None
fast_llm_id: int | None = None
@@ -76,7 +31,7 @@ class LLMPreferencesUpdate(BaseModel):
class LLMPreferencesRead(BaseModel):
- """Schema for reading user LLM preferences"""
+ """Schema for reading search space LLM preferences"""
long_context_llm_id: int | None = None
fast_llm_id: int | None = None
@@ -144,10 +99,19 @@ async def create_llm_config(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
- """Create a new LLM configuration for a search space"""
+ """
+ Create a new LLM configuration for a search space.
+ Requires LLM_CONFIGS_CREATE permission.
+ """
try:
- # Verify user has access to the search space
- await check_search_space_access(session, llm_config.search_space_id, user)
+ # Verify user has permission to create LLM configs
+ await check_permission(
+ session,
+ user,
+ llm_config.search_space_id,
+ Permission.LLM_CONFIGS_CREATE.value,
+ "You don't have permission to create LLM configurations in this search space",
+ )
# Validate the LLM configuration by making a test API call
is_valid, error_message = await validate_llm_config(
@@ -187,10 +151,19 @@ async def read_llm_configs(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
- """Get all LLM configurations for a search space"""
+ """
+ Get all LLM configurations for a search space.
+ Requires LLM_CONFIGS_READ permission.
+ """
try:
- # Verify user has access to the search space
- await check_search_space_access(session, search_space_id, user)
+ # Verify user has permission to read LLM configs
+ await check_permission(
+ session,
+ user,
+ search_space_id,
+ Permission.LLM_CONFIGS_READ.value,
+ "You don't have permission to view LLM configurations in this search space",
+ )
result = await session.execute(
select(LLMConfig)
@@ -213,7 +186,10 @@ async def read_llm_config(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
- """Get a specific LLM configuration by ID"""
+ """
+ Get a specific LLM configuration by ID.
+ Requires LLM_CONFIGS_READ permission.
+ """
try:
# Get the LLM config
result = await session.execute(
@@ -224,8 +200,14 @@ async def read_llm_config(
if not llm_config:
raise HTTPException(status_code=404, detail="LLM configuration not found")
- # Verify user has access to the search space
- await check_search_space_access(session, llm_config.search_space_id, user)
+ # Verify user has permission to read LLM configs
+ await check_permission(
+ session,
+ user,
+ llm_config.search_space_id,
+ Permission.LLM_CONFIGS_READ.value,
+ "You don't have permission to view LLM configurations in this search space",
+ )
return llm_config
except HTTPException:
@@ -243,7 +225,10 @@ async def update_llm_config(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
- """Update an existing LLM configuration"""
+ """
+ Update an existing LLM configuration.
+ Requires LLM_CONFIGS_UPDATE permission.
+ """
try:
# Get the LLM config
result = await session.execute(
@@ -254,8 +239,14 @@ async def update_llm_config(
if not db_llm_config:
raise HTTPException(status_code=404, detail="LLM configuration not found")
- # Verify user has access to the search space
- await check_search_space_access(session, db_llm_config.search_space_id, user)
+ # Verify user has permission to update LLM configs
+ await check_permission(
+ session,
+ user,
+ db_llm_config.search_space_id,
+ Permission.LLM_CONFIGS_UPDATE.value,
+ "You don't have permission to update LLM configurations in this search space",
+ )
update_data = llm_config_update.model_dump(exclude_unset=True)
@@ -311,7 +302,10 @@ async def delete_llm_config(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
- """Delete an LLM configuration"""
+ """
+ Delete an LLM configuration.
+ Requires LLM_CONFIGS_DELETE permission.
+ """
try:
# Get the LLM config
result = await session.execute(
@@ -322,8 +316,14 @@ async def delete_llm_config(
if not db_llm_config:
raise HTTPException(status_code=404, detail="LLM configuration not found")
- # Verify user has access to the search space
- await check_search_space_access(session, db_llm_config.search_space_id, user)
+ # Verify user has permission to delete LLM configs
+ await check_permission(
+ session,
+ user,
+ db_llm_config.search_space_id,
+ Permission.LLM_CONFIGS_DELETE.value,
+ "You don't have permission to delete LLM configurations in this search space",
+ )
await session.delete(db_llm_config)
await session.commit()
@@ -337,28 +337,42 @@ async def delete_llm_config(
) from e
-# User LLM Preferences endpoints
+# Search Space LLM Preferences endpoints
@router.get(
"/search-spaces/{search_space_id}/llm-preferences",
response_model=LLMPreferencesRead,
)
-async def get_user_llm_preferences(
+async def get_llm_preferences(
search_space_id: int,
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
- """Get the current user's LLM preferences for a specific search space"""
+ """
+ Get the LLM preferences for a specific search space.
+ LLM preferences are shared by all members of the search space.
+ Requires LLM_CONFIGS_READ permission.
+ """
try:
- # Verify user has access to the search space
- await check_search_space_access(session, search_space_id, user)
-
- # Get or create user preference for this search space
- preference = await get_or_create_user_preference(
- session, user.id, search_space_id
+ # Verify user has permission to read LLM configs
+ await check_permission(
+ session,
+ user,
+ search_space_id,
+ Permission.LLM_CONFIGS_READ.value,
+ "You don't have permission to view LLM preferences in this search space",
)
+ # Get the search space
+ result = await session.execute(
+ select(SearchSpace).filter(SearchSpace.id == search_space_id)
+ )
+ search_space = result.scalars().first()
+
+ if not search_space:
+ raise HTTPException(status_code=404, detail="Search space not found")
+
# Helper function to get config (global or custom)
async def get_config_for_id(config_id):
if config_id is None:
@@ -391,14 +405,14 @@ async def get_user_llm_preferences(
return result.scalars().first()
# Get the configs (from DB for custom, or constructed for global)
- long_context_llm = await get_config_for_id(preference.long_context_llm_id)
- fast_llm = await get_config_for_id(preference.fast_llm_id)
- strategic_llm = await get_config_for_id(preference.strategic_llm_id)
+ long_context_llm = await get_config_for_id(search_space.long_context_llm_id)
+ fast_llm = await get_config_for_id(search_space.fast_llm_id)
+ strategic_llm = await get_config_for_id(search_space.strategic_llm_id)
return {
- "long_context_llm_id": preference.long_context_llm_id,
- "fast_llm_id": preference.fast_llm_id,
- "strategic_llm_id": preference.strategic_llm_id,
+ "long_context_llm_id": search_space.long_context_llm_id,
+ "fast_llm_id": search_space.fast_llm_id,
+ "strategic_llm_id": search_space.strategic_llm_id,
"long_context_llm": long_context_llm,
"fast_llm": fast_llm,
"strategic_llm": strategic_llm,
@@ -415,22 +429,37 @@ async def get_user_llm_preferences(
"/search-spaces/{search_space_id}/llm-preferences",
response_model=LLMPreferencesRead,
)
-async def update_user_llm_preferences(
+async def update_llm_preferences(
search_space_id: int,
preferences: LLMPreferencesUpdate,
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
- """Update the current user's LLM preferences for a specific search space"""
+ """
+ Update the LLM preferences for a specific search space.
+ LLM preferences are shared by all members of the search space.
+ Requires SETTINGS_UPDATE permission (only users with settings access can change).
+ """
try:
- # Verify user has access to the search space
- await check_search_space_access(session, search_space_id, user)
-
- # Get or create user preference for this search space
- preference = await get_or_create_user_preference(
- session, user.id, search_space_id
+ # Verify user has permission to update settings (not just LLM configs)
+ # This ensures only users with settings access can change shared LLM preferences
+ await check_permission(
+ session,
+ user,
+ search_space_id,
+ Permission.SETTINGS_UPDATE.value,
+ "You don't have permission to update LLM preferences in this search space",
)
+ # Get the search space
+ result = await session.execute(
+ select(SearchSpace).filter(SearchSpace.id == search_space_id)
+ )
+ search_space = result.scalars().first()
+
+ if not search_space:
+ raise HTTPException(status_code=404, detail="Search space not found")
+
# Validate that all provided LLM config IDs belong to the search space
update_data = preferences.model_dump(exclude_unset=True)
@@ -485,18 +514,13 @@ async def update_user_llm_preferences(
f"Multiple languages detected in LLM selection for search_space {search_space_id}: {languages}. "
"This may affect response quality."
)
- # Don't raise an exception - allow users to proceed
- # raise HTTPException(
- # status_code=400,
- # detail="All selected LLM configurations must have the same language setting",
- # )
- # Update user preferences
+ # Update search space LLM preferences
for key, value in update_data.items():
- setattr(preference, key, value)
+ setattr(search_space, key, value)
await session.commit()
- await session.refresh(preference)
+ await session.refresh(search_space)
# Helper function to get config (global or custom)
async def get_config_for_id(config_id):
@@ -530,15 +554,15 @@ async def update_user_llm_preferences(
return result.scalars().first()
# Get the configs (from DB for custom, or constructed for global)
- long_context_llm = await get_config_for_id(preference.long_context_llm_id)
- fast_llm = await get_config_for_id(preference.fast_llm_id)
- strategic_llm = await get_config_for_id(preference.strategic_llm_id)
+ long_context_llm = await get_config_for_id(search_space.long_context_llm_id)
+ fast_llm = await get_config_for_id(search_space.fast_llm_id)
+ strategic_llm = await get_config_for_id(search_space.strategic_llm_id)
# Return updated preferences
return {
- "long_context_llm_id": preference.long_context_llm_id,
- "fast_llm_id": preference.fast_llm_id,
- "strategic_llm_id": preference.strategic_llm_id,
+ "long_context_llm_id": search_space.long_context_llm_id,
+ "fast_llm_id": search_space.fast_llm_id,
+ "strategic_llm_id": search_space.strategic_llm_id,
"long_context_llm": long_context_llm,
"fast_llm": fast_llm,
"strategic_llm": strategic_llm,
diff --git a/surfsense_backend/app/routes/logs_routes.py b/surfsense_backend/app/routes/logs_routes.py
index d9dd997ce..98fd9141e 100644
--- a/surfsense_backend/app/routes/logs_routes.py
+++ b/surfsense_backend/app/routes/logs_routes.py
@@ -5,10 +5,19 @@ from sqlalchemy import and_, desc
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
-from app.db import Log, LogLevel, LogStatus, SearchSpace, User, get_async_session
+from app.db import (
+ Log,
+ LogLevel,
+ LogStatus,
+ Permission,
+ SearchSpace,
+ SearchSpaceMembership,
+ User,
+ get_async_session,
+)
from app.schemas import LogCreate, LogRead, LogUpdate
from app.users import current_active_user
-from app.utils.check_ownership import check_ownership
+from app.utils.rbac import check_permission
router = APIRouter()
@@ -19,10 +28,19 @@ async def create_log(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
- """Create a new log entry."""
+ """
+ Create a new log entry.
+ Note: This is typically called internally. Requires LOGS_READ permission (since logs are usually system-generated).
+ """
try:
- # Check if the user owns the search space
- await check_ownership(session, SearchSpace, log.search_space_id, user)
+ # Check if the user has access to the search space
+ await check_permission(
+ session,
+ user,
+ log.search_space_id,
+ Permission.LOGS_READ.value,
+ "You don't have permission to access logs in this search space",
+ )
db_log = Log(**log.model_dump())
session.add(db_log)
@@ -51,22 +69,38 @@ async def read_logs(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
- """Get logs with optional filtering."""
+ """
+ Get logs with optional filtering.
+ Requires LOGS_READ permission for the search space(s).
+ """
try:
- # Build base query - only logs from user's search spaces
- query = (
- select(Log)
- .join(SearchSpace)
- .filter(SearchSpace.user_id == user.id)
- .order_by(desc(Log.created_at)) # Most recent first
- )
-
# Apply filters
filters = []
if search_space_id is not None:
- await check_ownership(session, SearchSpace, search_space_id, user)
- filters.append(Log.search_space_id == search_space_id)
+ # Check permission for specific search space
+ await check_permission(
+ session,
+ user,
+ search_space_id,
+ Permission.LOGS_READ.value,
+ "You don't have permission to read logs in this search space",
+ )
+ # Build query for specific search space
+ query = (
+ select(Log)
+ .filter(Log.search_space_id == search_space_id)
+ .order_by(desc(Log.created_at))
+ )
+ else:
+ # Build base query - logs from search spaces user has membership in
+ query = (
+ select(Log)
+ .join(SearchSpace)
+ .join(SearchSpaceMembership)
+ .filter(SearchSpaceMembership.user_id == user.id)
+ .order_by(desc(Log.created_at))
+ )
if level is not None:
filters.append(Log.level == level)
@@ -104,19 +138,26 @@ async def read_log(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
- """Get a specific log by ID."""
+ """
+ Get a specific log by ID.
+ Requires LOGS_READ permission for the search space.
+ """
try:
- # Get log and verify user owns the search space
- result = await session.execute(
- select(Log)
- .join(SearchSpace)
- .filter(Log.id == log_id, SearchSpace.user_id == user.id)
- )
+ result = await session.execute(select(Log).filter(Log.id == log_id))
log = result.scalars().first()
if not log:
raise HTTPException(status_code=404, detail="Log not found")
+ # Check permission for the search space
+ await check_permission(
+ session,
+ user,
+ log.search_space_id,
+ Permission.LOGS_READ.value,
+ "You don't have permission to read logs in this search space",
+ )
+
return log
except HTTPException:
raise
@@ -133,19 +174,26 @@ async def update_log(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
- """Update a log entry."""
+ """
+ Update a log entry.
+ Requires LOGS_READ permission (logs are typically updated by system).
+ """
try:
- # Get log and verify user owns the search space
- result = await session.execute(
- select(Log)
- .join(SearchSpace)
- .filter(Log.id == log_id, SearchSpace.user_id == user.id)
- )
+ result = await session.execute(select(Log).filter(Log.id == log_id))
db_log = result.scalars().first()
if not db_log:
raise HTTPException(status_code=404, detail="Log not found")
+ # Check permission for the search space
+ await check_permission(
+ session,
+ user,
+ db_log.search_space_id,
+ Permission.LOGS_READ.value,
+ "You don't have permission to access logs in this search space",
+ )
+
# Update only provided fields
update_data = log_update.model_dump(exclude_unset=True)
for field, value in update_data.items():
@@ -169,19 +217,26 @@ async def delete_log(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
- """Delete a log entry."""
+ """
+ Delete a log entry.
+ Requires LOGS_DELETE permission for the search space.
+ """
try:
- # Get log and verify user owns the search space
- result = await session.execute(
- select(Log)
- .join(SearchSpace)
- .filter(Log.id == log_id, SearchSpace.user_id == user.id)
- )
+ result = await session.execute(select(Log).filter(Log.id == log_id))
db_log = result.scalars().first()
if not db_log:
raise HTTPException(status_code=404, detail="Log not found")
+ # Check permission for the search space
+ await check_permission(
+ session,
+ user,
+ db_log.search_space_id,
+ Permission.LOGS_DELETE.value,
+ "You don't have permission to delete logs in this search space",
+ )
+
await session.delete(db_log)
await session.commit()
return {"message": "Log deleted successfully"}
@@ -201,10 +256,19 @@ async def get_logs_summary(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
- """Get a summary of logs for a search space in the last X hours."""
+ """
+ Get a summary of logs for a search space in the last X hours.
+ Requires LOGS_READ permission for the search space.
+ """
try:
- # Check ownership
- await check_ownership(session, SearchSpace, search_space_id, user)
+ # Check permission
+ await check_permission(
+ session,
+ user,
+ search_space_id,
+ Permission.LOGS_READ.value,
+ "You don't have permission to read logs in this search space",
+ )
# Calculate time window
since = datetime.utcnow().replace(microsecond=0) - timedelta(hours=hours)
diff --git a/surfsense_backend/app/routes/podcasts_routes.py b/surfsense_backend/app/routes/podcasts_routes.py
index ae1fdaeef..deb9d9744 100644
--- a/surfsense_backend/app/routes/podcasts_routes.py
+++ b/surfsense_backend/app/routes/podcasts_routes.py
@@ -7,7 +7,15 @@ from sqlalchemy.exc import IntegrityError, SQLAlchemyError
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
-from app.db import Chat, Podcast, SearchSpace, User, get_async_session
+from app.db import (
+ Chat,
+ Permission,
+ Podcast,
+ SearchSpace,
+ SearchSpaceMembership,
+ User,
+ get_async_session,
+)
from app.schemas import (
PodcastCreate,
PodcastGenerateRequest,
@@ -16,7 +24,7 @@ from app.schemas import (
)
from app.tasks.podcast_tasks import generate_chat_podcast
from app.users import current_active_user
-from app.utils.check_ownership import check_ownership
+from app.utils.rbac import check_permission
router = APIRouter()
@@ -27,8 +35,18 @@ async def create_podcast(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
+ """
+ Create a new podcast.
+ Requires PODCASTS_CREATE permission.
+ """
try:
- await check_ownership(session, SearchSpace, podcast.search_space_id, user)
+ await check_permission(
+ session,
+ user,
+ podcast.search_space_id,
+ Permission.PODCASTS_CREATE.value,
+ "You don't have permission to create podcasts in this search space",
+ )
db_podcast = Podcast(**podcast.model_dump())
session.add(db_podcast)
await session.commit()
@@ -58,20 +76,45 @@ async def create_podcast(
async def read_podcasts(
skip: int = 0,
limit: int = 100,
+ search_space_id: int | None = None,
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
+ """
+ List podcasts the user has access to.
+ Requires PODCASTS_READ permission for the search space(s).
+ """
if skip < 0 or limit < 1:
raise HTTPException(status_code=400, detail="Invalid pagination parameters")
try:
- result = await session.execute(
- select(Podcast)
- .join(SearchSpace)
- .filter(SearchSpace.user_id == user.id)
- .offset(skip)
- .limit(limit)
- )
+ if search_space_id is not None:
+ # Check permission for specific search space
+ await check_permission(
+ session,
+ user,
+ search_space_id,
+ Permission.PODCASTS_READ.value,
+ "You don't have permission to read podcasts in this search space",
+ )
+ result = await session.execute(
+ select(Podcast)
+ .filter(Podcast.search_space_id == search_space_id)
+ .offset(skip)
+ .limit(limit)
+ )
+ else:
+ # Get podcasts from all search spaces user has membership in
+ result = await session.execute(
+ select(Podcast)
+ .join(SearchSpace)
+ .join(SearchSpaceMembership)
+ .filter(SearchSpaceMembership.user_id == user.id)
+ .offset(skip)
+ .limit(limit)
+ )
return result.scalars().all()
+ except HTTPException:
+ raise
except SQLAlchemyError:
raise HTTPException(
status_code=500, detail="Database error occurred while fetching podcasts"
@@ -84,18 +127,29 @@ async def read_podcast(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
+ """
+ Get a specific podcast by ID.
+ Requires PODCASTS_READ permission for the search space.
+ """
try:
- result = await session.execute(
- select(Podcast)
- .join(SearchSpace)
- .filter(Podcast.id == podcast_id, SearchSpace.user_id == user.id)
- )
+ result = await session.execute(select(Podcast).filter(Podcast.id == podcast_id))
podcast = result.scalars().first()
+
if not podcast:
raise HTTPException(
status_code=404,
- detail="Podcast not found or you don't have permission to access it",
+ detail="Podcast not found",
)
+
+ # Check permission for the search space
+ await check_permission(
+ session,
+ user,
+ podcast.search_space_id,
+ Permission.PODCASTS_READ.value,
+ "You don't have permission to read podcasts in this search space",
+ )
+
return podcast
except HTTPException as he:
raise he
@@ -112,8 +166,26 @@ async def update_podcast(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
+ """
+ Update a podcast.
+ Requires PODCASTS_UPDATE permission for the search space.
+ """
try:
- db_podcast = await read_podcast(podcast_id, session, user)
+ result = await session.execute(select(Podcast).filter(Podcast.id == podcast_id))
+ db_podcast = result.scalars().first()
+
+ if not db_podcast:
+ raise HTTPException(status_code=404, detail="Podcast not found")
+
+ # Check permission for the search space
+ await check_permission(
+ session,
+ user,
+ db_podcast.search_space_id,
+ Permission.PODCASTS_UPDATE.value,
+ "You don't have permission to update podcasts in this search space",
+ )
+
update_data = podcast_update.model_dump(exclude_unset=True)
for key, value in update_data.items():
setattr(db_podcast, key, value)
@@ -140,8 +212,26 @@ async def delete_podcast(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
+ """
+ Delete a podcast.
+ Requires PODCASTS_DELETE permission for the search space.
+ """
try:
- db_podcast = await read_podcast(podcast_id, session, user)
+ result = await session.execute(select(Podcast).filter(Podcast.id == podcast_id))
+ db_podcast = result.scalars().first()
+
+ if not db_podcast:
+ raise HTTPException(status_code=404, detail="Podcast not found")
+
+ # Check permission for the search space
+ await check_permission(
+ session,
+ user,
+ db_podcast.search_space_id,
+ Permission.PODCASTS_DELETE.value,
+ "You don't have permission to delete podcasts in this search space",
+ )
+
await session.delete(db_podcast)
await session.commit()
return {"message": "Podcast deleted successfully"}
@@ -181,9 +271,19 @@ async def generate_podcast(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
+ """
+ Generate a podcast from a chat or document.
+ Requires PODCASTS_CREATE permission.
+ """
try:
- # Check if the user owns the search space
- await check_ownership(session, SearchSpace, request.search_space_id, user)
+ # Check if the user has permission to create podcasts
+ await check_permission(
+ session,
+ user,
+ request.search_space_id,
+ Permission.PODCASTS_CREATE.value,
+ "You don't have permission to create podcasts in this search space",
+ )
if request.type == "CHAT":
# Verify that all chat IDs belong to this user and search space
@@ -251,22 +351,29 @@ async def stream_podcast(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
- """Stream a podcast audio file."""
+ """
+ Stream a podcast audio file.
+ Requires PODCASTS_READ permission for the search space.
+ """
try:
- # Get the podcast and check if user has access
- result = await session.execute(
- select(Podcast)
- .join(SearchSpace)
- .filter(Podcast.id == podcast_id, SearchSpace.user_id == user.id)
- )
+ result = await session.execute(select(Podcast).filter(Podcast.id == podcast_id))
podcast = result.scalars().first()
if not podcast:
raise HTTPException(
status_code=404,
- detail="Podcast not found or you don't have permission to access it",
+ detail="Podcast not found",
)
+ # Check permission for the search space
+ await check_permission(
+ session,
+ user,
+ podcast.search_space_id,
+ Permission.PODCASTS_READ.value,
+ "You don't have permission to access podcasts in this search space",
+ )
+
# Get the file path
file_path = podcast.file_location
@@ -303,12 +410,30 @@ async def get_podcast_by_chat_id(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
+ """
+ Get a podcast by its associated chat ID.
+ Requires PODCASTS_READ permission for the search space.
+ """
try:
- # Get the podcast and check if user has access
+ # First get the chat to find its search space
+ chat_result = await session.execute(select(Chat).filter(Chat.id == chat_id))
+ chat = chat_result.scalars().first()
+
+ if not chat:
+ return None
+
+ # Check permission for the search space
+ await check_permission(
+ session,
+ user,
+ chat.search_space_id,
+ Permission.PODCASTS_READ.value,
+ "You don't have permission to read podcasts in this search space",
+ )
+
+ # Get the podcast
result = await session.execute(
- select(Podcast)
- .join(SearchSpace)
- .filter(Podcast.chat_id == chat_id, SearchSpace.user_id == user.id)
+ select(Podcast).filter(Podcast.chat_id == chat_id)
)
podcast = result.scalars().first()
diff --git a/surfsense_backend/app/routes/rbac_routes.py b/surfsense_backend/app/routes/rbac_routes.py
new file mode 100644
index 000000000..c5392f284
--- /dev/null
+++ b/surfsense_backend/app/routes/rbac_routes.py
@@ -0,0 +1,1084 @@
+"""
+RBAC (Role-Based Access Control) routes for managing roles, memberships, and invites.
+
+Endpoints:
+- /searchspaces/{search_space_id}/roles - CRUD for roles
+- /searchspaces/{search_space_id}/members - CRUD for memberships
+- /searchspaces/{search_space_id}/invites - CRUD for invites
+- /invites/{invite_code}/info - Get invite info (public)
+- /invites/accept - Accept an invite
+- /permissions - List all available permissions
+"""
+
+import logging
+from datetime import UTC, datetime
+
+from fastapi import APIRouter, Depends, HTTPException
+from sqlalchemy.ext.asyncio import AsyncSession
+from sqlalchemy.future import select
+from sqlalchemy.orm import selectinload
+
+from app.db import (
+ Permission,
+ SearchSpace,
+ SearchSpaceInvite,
+ SearchSpaceMembership,
+ SearchSpaceRole,
+ User,
+ get_async_session,
+)
+from app.schemas import (
+ InviteAcceptRequest,
+ InviteAcceptResponse,
+ InviteCreate,
+ InviteInfoResponse,
+ InviteRead,
+ InviteUpdate,
+ MembershipRead,
+ MembershipUpdate,
+ PermissionInfo,
+ PermissionsListResponse,
+ RoleCreate,
+ RoleRead,
+ RoleUpdate,
+ UserSearchSpaceAccess,
+)
+from app.users import current_active_user
+from app.utils.rbac import (
+ check_permission,
+ check_search_space_access,
+ generate_invite_code,
+ get_default_role,
+ get_user_permissions,
+)
+
+logger = logging.getLogger(__name__)
+
+router = APIRouter()
+
+
+# ============ Permissions Endpoints ============
+
+
+@router.get("/permissions", response_model=PermissionsListResponse)
+async def list_all_permissions(
+ user: User = Depends(current_active_user),
+):
+ """
+ List all available permissions that can be assigned to roles.
+ """
+ permissions = []
+ for perm in Permission:
+ # Extract category from permission value (e.g., "documents:read" -> "documents")
+ category = perm.value.split(":")[0] if ":" in perm.value else "general"
+
+ permissions.append(
+ PermissionInfo(
+ value=perm.value,
+ name=perm.name,
+ category=category,
+ )
+ )
+
+ return PermissionsListResponse(permissions=permissions)
+
+
+# ============ Role Endpoints ============
+
+
+@router.post(
+ "/searchspaces/{search_space_id}/roles",
+ response_model=RoleRead,
+)
+async def create_role(
+ search_space_id: int,
+ role_data: RoleCreate,
+ session: AsyncSession = Depends(get_async_session),
+ user: User = Depends(current_active_user),
+):
+ """
+ Create a new custom role in a search space.
+ Requires ROLES_CREATE permission.
+ """
+ try:
+ await check_permission(
+ session,
+ user,
+ search_space_id,
+ Permission.ROLES_CREATE.value,
+ "You don't have permission to create roles",
+ )
+
+ # Check if role with same name already exists
+ result = await session.execute(
+ select(SearchSpaceRole).filter(
+ SearchSpaceRole.search_space_id == search_space_id,
+ SearchSpaceRole.name == role_data.name,
+ )
+ )
+ if result.scalars().first():
+ raise HTTPException(
+ status_code=409,
+ detail=f"A role with name '{role_data.name}' already exists in this search space",
+ )
+
+ # Validate permissions
+ valid_permissions = {p.value for p in Permission}
+ for perm in role_data.permissions:
+ if perm not in valid_permissions:
+ raise HTTPException(
+ status_code=400,
+ detail=f"Invalid permission: {perm}",
+ )
+
+ # If setting is_default to True, unset any existing default
+ if role_data.is_default:
+ await session.execute(
+ select(SearchSpaceRole).filter(
+ SearchSpaceRole.search_space_id == search_space_id,
+ SearchSpaceRole.is_default == True, # noqa: E712
+ )
+ )
+ existing_defaults = await session.execute(
+ select(SearchSpaceRole).filter(
+ SearchSpaceRole.search_space_id == search_space_id,
+ SearchSpaceRole.is_default == True, # noqa: E712
+ )
+ )
+ for existing in existing_defaults.scalars().all():
+ existing.is_default = False
+
+ db_role = SearchSpaceRole(
+ **role_data.model_dump(),
+ search_space_id=search_space_id,
+ is_system_role=False,
+ )
+ session.add(db_role)
+ await session.commit()
+ await session.refresh(db_role)
+ return db_role
+
+ except HTTPException:
+ raise
+ except Exception as e:
+ await session.rollback()
+ logger.error(f"Failed to create role: {e!s}", exc_info=True)
+ raise HTTPException(
+ status_code=500, detail=f"Failed to create role: {e!s}"
+ ) from e
+
+
+@router.get(
+ "/searchspaces/{search_space_id}/roles",
+ response_model=list[RoleRead],
+)
+async def list_roles(
+ search_space_id: int,
+ session: AsyncSession = Depends(get_async_session),
+ user: User = Depends(current_active_user),
+):
+ """
+ List all roles in a search space.
+ Requires ROLES_READ permission.
+ """
+ try:
+ await check_permission(
+ session,
+ user,
+ search_space_id,
+ Permission.ROLES_READ.value,
+ "You don't have permission to view roles",
+ )
+
+ result = await session.execute(
+ select(SearchSpaceRole).filter(
+ SearchSpaceRole.search_space_id == search_space_id
+ )
+ )
+ return result.scalars().all()
+
+ except HTTPException:
+ raise
+ except Exception as e:
+ raise HTTPException(
+ status_code=500, detail=f"Failed to fetch roles: {e!s}"
+ ) from e
+
+
+@router.get(
+ "/searchspaces/{search_space_id}/roles/{role_id}",
+ response_model=RoleRead,
+)
+async def get_role(
+ search_space_id: int,
+ role_id: int,
+ session: AsyncSession = Depends(get_async_session),
+ user: User = Depends(current_active_user),
+):
+ """
+ Get a specific role by ID.
+ Requires ROLES_READ permission.
+ """
+ try:
+ await check_permission(
+ session,
+ user,
+ search_space_id,
+ Permission.ROLES_READ.value,
+ "You don't have permission to view roles",
+ )
+
+ result = await session.execute(
+ select(SearchSpaceRole).filter(
+ SearchSpaceRole.id == role_id,
+ SearchSpaceRole.search_space_id == search_space_id,
+ )
+ )
+ role = result.scalars().first()
+
+ if not role:
+ raise HTTPException(status_code=404, detail="Role not found")
+
+ return role
+
+ except HTTPException:
+ raise
+ except Exception as e:
+ raise HTTPException(
+ status_code=500, detail=f"Failed to fetch role: {e!s}"
+ ) from e
+
+
+@router.put(
+ "/searchspaces/{search_space_id}/roles/{role_id}",
+ response_model=RoleRead,
+)
+async def update_role(
+ search_space_id: int,
+ role_id: int,
+ role_update: RoleUpdate,
+ session: AsyncSession = Depends(get_async_session),
+ user: User = Depends(current_active_user),
+):
+ """
+ Update a role.
+ Requires ROLES_UPDATE permission.
+ System roles can only have their permissions updated, not name/description.
+ """
+ try:
+ await check_permission(
+ session,
+ user,
+ search_space_id,
+ Permission.ROLES_UPDATE.value,
+ "You don't have permission to update roles",
+ )
+
+ result = await session.execute(
+ select(SearchSpaceRole).filter(
+ SearchSpaceRole.id == role_id,
+ SearchSpaceRole.search_space_id == search_space_id,
+ )
+ )
+ db_role = result.scalars().first()
+
+ if not db_role:
+ raise HTTPException(status_code=404, detail="Role not found")
+
+ update_data = role_update.model_dump(exclude_unset=True)
+
+ # System roles have restrictions on what can be updated
+ if db_role.is_system_role:
+ # Can only update permissions for system roles
+ restricted_fields = {"name", "description", "is_default"}
+ if any(field in update_data for field in restricted_fields):
+ raise HTTPException(
+ status_code=400,
+ detail="Cannot modify name, description, or default status of system roles",
+ )
+
+ # Check for name conflict if updating name
+ if "name" in update_data and update_data["name"] != db_role.name:
+ existing = await session.execute(
+ select(SearchSpaceRole).filter(
+ SearchSpaceRole.search_space_id == search_space_id,
+ SearchSpaceRole.name == update_data["name"],
+ )
+ )
+ if existing.scalars().first():
+ raise HTTPException(
+ status_code=409,
+ detail=f"A role with name '{update_data['name']}' already exists",
+ )
+
+ # Validate permissions if provided
+ if "permissions" in update_data:
+ valid_permissions = {p.value for p in Permission}
+ for perm in update_data["permissions"]:
+ if perm not in valid_permissions:
+ raise HTTPException(
+ status_code=400,
+ detail=f"Invalid permission: {perm}",
+ )
+
+ # Handle is_default change
+ if update_data.get("is_default") and not db_role.is_default:
+ # Unset existing default
+ existing_defaults = await session.execute(
+ select(SearchSpaceRole).filter(
+ SearchSpaceRole.search_space_id == search_space_id,
+ SearchSpaceRole.is_default == True, # noqa: E712
+ )
+ )
+ for existing in existing_defaults.scalars().all():
+ existing.is_default = False
+
+ for key, value in update_data.items():
+ setattr(db_role, key, value)
+
+ await session.commit()
+ await session.refresh(db_role)
+ return db_role
+
+ except HTTPException:
+ raise
+ except Exception as e:
+ await session.rollback()
+ logger.error(f"Failed to update role: {e!s}", exc_info=True)
+ raise HTTPException(
+ status_code=500, detail=f"Failed to update role: {e!s}"
+ ) from e
+
+
+@router.delete("/searchspaces/{search_space_id}/roles/{role_id}")
+async def delete_role(
+ search_space_id: int,
+ role_id: int,
+ session: AsyncSession = Depends(get_async_session),
+ user: User = Depends(current_active_user),
+):
+ """
+ Delete a custom role.
+ Requires ROLES_DELETE permission.
+ System roles cannot be deleted.
+ """
+ try:
+ await check_permission(
+ session,
+ user,
+ search_space_id,
+ Permission.ROLES_DELETE.value,
+ "You don't have permission to delete roles",
+ )
+
+ result = await session.execute(
+ select(SearchSpaceRole).filter(
+ SearchSpaceRole.id == role_id,
+ SearchSpaceRole.search_space_id == search_space_id,
+ )
+ )
+ db_role = result.scalars().first()
+
+ if not db_role:
+ raise HTTPException(status_code=404, detail="Role not found")
+
+ if db_role.is_system_role:
+ raise HTTPException(
+ status_code=400,
+ detail="System roles cannot be deleted",
+ )
+
+ await session.delete(db_role)
+ await session.commit()
+ return {"message": "Role deleted successfully"}
+
+ except HTTPException:
+ raise
+ except Exception as e:
+ await session.rollback()
+ logger.error(f"Failed to delete role: {e!s}", exc_info=True)
+ raise HTTPException(
+ status_code=500, detail=f"Failed to delete role: {e!s}"
+ ) from e
+
+
+# ============ Membership Endpoints ============
+
+
+@router.get(
+ "/searchspaces/{search_space_id}/members",
+ response_model=list[MembershipRead],
+)
+async def list_members(
+ search_space_id: int,
+ session: AsyncSession = Depends(get_async_session),
+ user: User = Depends(current_active_user),
+):
+ """
+ List all members of a search space.
+ Requires MEMBERS_VIEW permission.
+ """
+ try:
+ await check_permission(
+ session,
+ user,
+ search_space_id,
+ Permission.MEMBERS_VIEW.value,
+ "You don't have permission to view members",
+ )
+
+ result = await session.execute(
+ select(SearchSpaceMembership)
+ .options(selectinload(SearchSpaceMembership.role))
+ .filter(SearchSpaceMembership.search_space_id == search_space_id)
+ )
+ memberships = result.scalars().all()
+
+ # Fetch user emails for each membership
+ response = []
+ for membership in memberships:
+ user_result = await session.execute(
+ select(User).filter(User.id == membership.user_id)
+ )
+ member_user = user_result.scalars().first()
+
+ membership_dict = {
+ "id": membership.id,
+ "user_id": membership.user_id,
+ "search_space_id": membership.search_space_id,
+ "role_id": membership.role_id,
+ "is_owner": membership.is_owner,
+ "joined_at": membership.joined_at,
+ "created_at": membership.created_at,
+ "role": membership.role,
+ "user_email": member_user.email if member_user else None,
+ }
+ response.append(membership_dict)
+
+ return response
+
+ except HTTPException:
+ raise
+ except Exception as e:
+ raise HTTPException(
+ status_code=500, detail=f"Failed to fetch members: {e!s}"
+ ) from e
+
+
+@router.put(
+ "/searchspaces/{search_space_id}/members/{membership_id}",
+ response_model=MembershipRead,
+)
+async def update_member_role(
+ search_space_id: int,
+ membership_id: int,
+ membership_update: MembershipUpdate,
+ session: AsyncSession = Depends(get_async_session),
+ user: User = Depends(current_active_user),
+):
+ """
+ Update a member's role.
+ Requires MEMBERS_MANAGE_ROLES permission.
+ Cannot change owner's role.
+ """
+ try:
+ await check_permission(
+ session,
+ user,
+ search_space_id,
+ Permission.MEMBERS_MANAGE_ROLES.value,
+ "You don't have permission to manage member roles",
+ )
+
+ result = await session.execute(
+ select(SearchSpaceMembership)
+ .options(selectinload(SearchSpaceMembership.role))
+ .filter(
+ SearchSpaceMembership.id == membership_id,
+ SearchSpaceMembership.search_space_id == search_space_id,
+ )
+ )
+ db_membership = result.scalars().first()
+
+ if not db_membership:
+ raise HTTPException(status_code=404, detail="Membership not found")
+
+ # Cannot change owner's role
+ if db_membership.is_owner:
+ raise HTTPException(
+ status_code=400,
+ detail="Cannot change the owner's role",
+ )
+
+ # Verify the new role exists in this search space
+ if membership_update.role_id:
+ role_result = await session.execute(
+ select(SearchSpaceRole).filter(
+ SearchSpaceRole.id == membership_update.role_id,
+ SearchSpaceRole.search_space_id == search_space_id,
+ )
+ )
+ if not role_result.scalars().first():
+ raise HTTPException(
+ status_code=404,
+ detail="Role not found in this search space",
+ )
+
+ db_membership.role_id = membership_update.role_id
+ await session.commit()
+ await session.refresh(db_membership)
+
+ # Fetch user email
+ user_result = await session.execute(
+ select(User).filter(User.id == db_membership.user_id)
+ )
+ member_user = user_result.scalars().first()
+
+ return {
+ "id": db_membership.id,
+ "user_id": db_membership.user_id,
+ "search_space_id": db_membership.search_space_id,
+ "role_id": db_membership.role_id,
+ "is_owner": db_membership.is_owner,
+ "joined_at": db_membership.joined_at,
+ "created_at": db_membership.created_at,
+ "role": db_membership.role,
+ "user_email": member_user.email if member_user else None,
+ }
+
+ except HTTPException:
+ raise
+ except Exception as e:
+ await session.rollback()
+ logger.error(f"Failed to update member role: {e!s}", exc_info=True)
+ raise HTTPException(
+ status_code=500, detail=f"Failed to update member role: {e!s}"
+ ) from e
+
+
+@router.delete("/searchspaces/{search_space_id}/members/{membership_id}")
+async def remove_member(
+ search_space_id: int,
+ membership_id: int,
+ session: AsyncSession = Depends(get_async_session),
+ user: User = Depends(current_active_user),
+):
+ """
+ Remove a member from a search space.
+ Requires MEMBERS_REMOVE permission.
+ Cannot remove the owner.
+ """
+ try:
+ await check_permission(
+ session,
+ user,
+ search_space_id,
+ Permission.MEMBERS_REMOVE.value,
+ "You don't have permission to remove members",
+ )
+
+ result = await session.execute(
+ select(SearchSpaceMembership).filter(
+ SearchSpaceMembership.id == membership_id,
+ SearchSpaceMembership.search_space_id == search_space_id,
+ )
+ )
+ db_membership = result.scalars().first()
+
+ if not db_membership:
+ raise HTTPException(status_code=404, detail="Membership not found")
+
+ if db_membership.is_owner:
+ raise HTTPException(
+ status_code=400,
+ detail="Cannot remove the owner from the search space",
+ )
+
+ await session.delete(db_membership)
+ await session.commit()
+ return {"message": "Member removed successfully"}
+
+ except HTTPException:
+ raise
+ except Exception as e:
+ await session.rollback()
+ logger.error(f"Failed to remove member: {e!s}", exc_info=True)
+ raise HTTPException(
+ status_code=500, detail=f"Failed to remove member: {e!s}"
+ ) from e
+
+
+@router.delete("/searchspaces/{search_space_id}/members/me")
+async def leave_search_space(
+ search_space_id: int,
+ session: AsyncSession = Depends(get_async_session),
+ user: User = Depends(current_active_user),
+):
+ """
+ Leave a search space (remove own membership).
+ Owners cannot leave their search space.
+ """
+ try:
+ result = await session.execute(
+ select(SearchSpaceMembership).filter(
+ SearchSpaceMembership.user_id == user.id,
+ SearchSpaceMembership.search_space_id == search_space_id,
+ )
+ )
+ db_membership = result.scalars().first()
+
+ if not db_membership:
+ raise HTTPException(
+ status_code=404,
+ detail="You are not a member of this search space",
+ )
+
+ if db_membership.is_owner:
+ raise HTTPException(
+ status_code=400,
+ detail="Owners cannot leave their search space. Transfer ownership first or delete the search space.",
+ )
+
+ await session.delete(db_membership)
+ await session.commit()
+ return {"message": "Successfully left the search space"}
+
+ except HTTPException:
+ raise
+ except Exception as e:
+ await session.rollback()
+ logger.error(f"Failed to leave search space: {e!s}", exc_info=True)
+ raise HTTPException(
+ status_code=500, detail=f"Failed to leave search space: {e!s}"
+ ) from e
+
+
+# ============ Invite Endpoints ============
+
+
+@router.post(
+ "/searchspaces/{search_space_id}/invites",
+ response_model=InviteRead,
+)
+async def create_invite(
+ search_space_id: int,
+ invite_data: InviteCreate,
+ session: AsyncSession = Depends(get_async_session),
+ user: User = Depends(current_active_user),
+):
+ """
+ Create a new invite link for a search space.
+ Requires MEMBERS_INVITE permission.
+ """
+ try:
+ await check_permission(
+ session,
+ user,
+ search_space_id,
+ Permission.MEMBERS_INVITE.value,
+ "You don't have permission to create invites",
+ )
+
+ # Verify role exists if specified
+ if invite_data.role_id:
+ role_result = await session.execute(
+ select(SearchSpaceRole).filter(
+ SearchSpaceRole.id == invite_data.role_id,
+ SearchSpaceRole.search_space_id == search_space_id,
+ )
+ )
+ if not role_result.scalars().first():
+ raise HTTPException(
+ status_code=404,
+ detail="Role not found in this search space",
+ )
+
+ db_invite = SearchSpaceInvite(
+ **invite_data.model_dump(),
+ invite_code=generate_invite_code(),
+ search_space_id=search_space_id,
+ created_by_id=user.id,
+ )
+ session.add(db_invite)
+ await session.commit()
+
+ # Reload with role
+ result = await session.execute(
+ select(SearchSpaceInvite)
+ .options(selectinload(SearchSpaceInvite.role))
+ .filter(SearchSpaceInvite.id == db_invite.id)
+ )
+ db_invite = result.scalars().first()
+
+ return db_invite
+
+ except HTTPException:
+ raise
+ except Exception as e:
+ await session.rollback()
+ logger.error(f"Failed to create invite: {e!s}", exc_info=True)
+ raise HTTPException(
+ status_code=500, detail=f"Failed to create invite: {e!s}"
+ ) from e
+
+
+@router.get(
+ "/searchspaces/{search_space_id}/invites",
+ response_model=list[InviteRead],
+)
+async def list_invites(
+ search_space_id: int,
+ session: AsyncSession = Depends(get_async_session),
+ user: User = Depends(current_active_user),
+):
+ """
+ List all invites for a search space.
+ Requires MEMBERS_INVITE permission.
+ """
+ try:
+ await check_permission(
+ session,
+ user,
+ search_space_id,
+ Permission.MEMBERS_INVITE.value,
+ "You don't have permission to view invites",
+ )
+
+ result = await session.execute(
+ select(SearchSpaceInvite)
+ .options(selectinload(SearchSpaceInvite.role))
+ .filter(SearchSpaceInvite.search_space_id == search_space_id)
+ )
+ return result.scalars().all()
+
+ except HTTPException:
+ raise
+ except Exception as e:
+ raise HTTPException(
+ status_code=500, detail=f"Failed to fetch invites: {e!s}"
+ ) from e
+
+
+@router.put(
+ "/searchspaces/{search_space_id}/invites/{invite_id}",
+ response_model=InviteRead,
+)
+async def update_invite(
+ search_space_id: int,
+ invite_id: int,
+ invite_update: InviteUpdate,
+ session: AsyncSession = Depends(get_async_session),
+ user: User = Depends(current_active_user),
+):
+ """
+ Update an invite.
+ Requires MEMBERS_INVITE permission.
+ """
+ try:
+ await check_permission(
+ session,
+ user,
+ search_space_id,
+ Permission.MEMBERS_INVITE.value,
+ "You don't have permission to update invites",
+ )
+
+ result = await session.execute(
+ select(SearchSpaceInvite)
+ .options(selectinload(SearchSpaceInvite.role))
+ .filter(
+ SearchSpaceInvite.id == invite_id,
+ SearchSpaceInvite.search_space_id == search_space_id,
+ )
+ )
+ db_invite = result.scalars().first()
+
+ if not db_invite:
+ raise HTTPException(status_code=404, detail="Invite not found")
+
+ update_data = invite_update.model_dump(exclude_unset=True)
+
+ # Verify role exists if updating role_id
+ if update_data.get("role_id"):
+ role_result = await session.execute(
+ select(SearchSpaceRole).filter(
+ SearchSpaceRole.id == update_data["role_id"],
+ SearchSpaceRole.search_space_id == search_space_id,
+ )
+ )
+ if not role_result.scalars().first():
+ raise HTTPException(
+ status_code=404,
+ detail="Role not found in this search space",
+ )
+
+ for key, value in update_data.items():
+ setattr(db_invite, key, value)
+
+ await session.commit()
+ await session.refresh(db_invite)
+ return db_invite
+
+ except HTTPException:
+ raise
+ except Exception as e:
+ await session.rollback()
+ logger.error(f"Failed to update invite: {e!s}", exc_info=True)
+ raise HTTPException(
+ status_code=500, detail=f"Failed to update invite: {e!s}"
+ ) from e
+
+
+@router.delete("/searchspaces/{search_space_id}/invites/{invite_id}")
+async def revoke_invite(
+ search_space_id: int,
+ invite_id: int,
+ session: AsyncSession = Depends(get_async_session),
+ user: User = Depends(current_active_user),
+):
+ """
+ Revoke (delete) an invite.
+ Requires MEMBERS_INVITE permission.
+ """
+ try:
+ await check_permission(
+ session,
+ user,
+ search_space_id,
+ Permission.MEMBERS_INVITE.value,
+ "You don't have permission to revoke invites",
+ )
+
+ result = await session.execute(
+ select(SearchSpaceInvite).filter(
+ SearchSpaceInvite.id == invite_id,
+ SearchSpaceInvite.search_space_id == search_space_id,
+ )
+ )
+ db_invite = result.scalars().first()
+
+ if not db_invite:
+ raise HTTPException(status_code=404, detail="Invite not found")
+
+ await session.delete(db_invite)
+ await session.commit()
+ return {"message": "Invite revoked successfully"}
+
+ except HTTPException:
+ raise
+ except Exception as e:
+ await session.rollback()
+ logger.error(f"Failed to revoke invite: {e!s}", exc_info=True)
+ raise HTTPException(
+ status_code=500, detail=f"Failed to revoke invite: {e!s}"
+ ) from e
+
+
+# ============ Public Invite Endpoints ============
+
+
+@router.get("/invites/{invite_code}/info", response_model=InviteInfoResponse)
+async def get_invite_info(
+ invite_code: str,
+ session: AsyncSession = Depends(get_async_session),
+):
+ """
+ Get information about an invite (public endpoint, no auth required).
+ Returns minimal info for displaying on invite acceptance page.
+ """
+ try:
+ result = await session.execute(
+ select(SearchSpaceInvite)
+ .options(
+ selectinload(SearchSpaceInvite.role),
+ selectinload(SearchSpaceInvite.search_space),
+ )
+ .filter(SearchSpaceInvite.invite_code == invite_code)
+ )
+ invite = result.scalars().first()
+
+ if not invite:
+ return InviteInfoResponse(
+ search_space_name="",
+ role_name=None,
+ is_valid=False,
+ message="Invite not found",
+ )
+
+ # Check if invite is still valid
+ if not invite.is_active:
+ return InviteInfoResponse(
+ search_space_name=invite.search_space.name
+ if invite.search_space
+ else "",
+ role_name=invite.role.name if invite.role else None,
+ is_valid=False,
+ message="This invite is no longer active",
+ )
+
+ if invite.expires_at and invite.expires_at < datetime.now(UTC):
+ return InviteInfoResponse(
+ search_space_name=invite.search_space.name
+ if invite.search_space
+ else "",
+ role_name=invite.role.name if invite.role else None,
+ is_valid=False,
+ message="This invite has expired",
+ )
+
+ if invite.max_uses and invite.uses_count >= invite.max_uses:
+ return InviteInfoResponse(
+ search_space_name=invite.search_space.name
+ if invite.search_space
+ else "",
+ role_name=invite.role.name if invite.role else None,
+ is_valid=False,
+ message="This invite has reached its maximum uses",
+ )
+
+ return InviteInfoResponse(
+ search_space_name=invite.search_space.name if invite.search_space else "",
+ role_name=invite.role.name if invite.role else "Default",
+ is_valid=True,
+ )
+
+ except Exception as e:
+ logger.error(f"Failed to get invite info: {e!s}", exc_info=True)
+ raise HTTPException(
+ status_code=500, detail=f"Failed to get invite info: {e!s}"
+ ) from e
+
+
+@router.post("/invites/accept", response_model=InviteAcceptResponse)
+async def accept_invite(
+ request: InviteAcceptRequest,
+ session: AsyncSession = Depends(get_async_session),
+ user: User = Depends(current_active_user),
+):
+ """
+ Accept an invite and join a search space.
+ """
+ try:
+ result = await session.execute(
+ select(SearchSpaceInvite)
+ .options(
+ selectinload(SearchSpaceInvite.role),
+ selectinload(SearchSpaceInvite.search_space),
+ )
+ .filter(SearchSpaceInvite.invite_code == request.invite_code)
+ )
+ invite = result.scalars().first()
+
+ if not invite:
+ raise HTTPException(status_code=404, detail="Invite not found")
+
+ # Validate invite
+ if not invite.is_active:
+ raise HTTPException(
+ status_code=400, detail="This invite is no longer active"
+ )
+
+ if invite.expires_at and invite.expires_at < datetime.now(UTC):
+ raise HTTPException(status_code=400, detail="This invite has expired")
+
+ if invite.max_uses and invite.uses_count >= invite.max_uses:
+ raise HTTPException(
+ status_code=400, detail="This invite has reached its maximum uses"
+ )
+
+ # Check if user is already a member
+ existing_membership = await session.execute(
+ select(SearchSpaceMembership).filter(
+ SearchSpaceMembership.user_id == user.id,
+ SearchSpaceMembership.search_space_id == invite.search_space_id,
+ )
+ )
+ if existing_membership.scalars().first():
+ raise HTTPException(
+ status_code=400,
+ detail="You are already a member of this search space",
+ )
+
+ # Determine role to assign
+ role_id = invite.role_id
+ if not role_id:
+ # Use default role
+ default_role = await get_default_role(session, invite.search_space_id)
+ role_id = default_role.id if default_role else None
+
+ # Create membership
+ membership = SearchSpaceMembership(
+ user_id=user.id,
+ search_space_id=invite.search_space_id,
+ role_id=role_id,
+ is_owner=False,
+ invited_by_invite_id=invite.id,
+ )
+ session.add(membership)
+
+ # Increment invite usage
+ invite.uses_count += 1
+
+ await session.commit()
+
+ role_name = invite.role.name if invite.role else "Default"
+ search_space_name = invite.search_space.name if invite.search_space else ""
+
+ return InviteAcceptResponse(
+ message="Successfully joined the search space",
+ search_space_id=invite.search_space_id,
+ search_space_name=search_space_name,
+ role_name=role_name,
+ )
+
+ except HTTPException:
+ raise
+ except Exception as e:
+ await session.rollback()
+ logger.error(f"Failed to accept invite: {e!s}", exc_info=True)
+ raise HTTPException(
+ status_code=500, detail=f"Failed to accept invite: {e!s}"
+ ) from e
+
+
+# ============ User Access Info ============
+
+
+@router.get(
+ "/searchspaces/{search_space_id}/my-access",
+ response_model=UserSearchSpaceAccess,
+)
+async def get_my_access(
+ search_space_id: int,
+ session: AsyncSession = Depends(get_async_session),
+ user: User = Depends(current_active_user),
+):
+ """
+ Get the current user's access info for a search space.
+ """
+ try:
+ membership = await check_search_space_access(session, user, search_space_id)
+
+ # Get search space name
+ result = await session.execute(
+ select(SearchSpace).filter(SearchSpace.id == search_space_id)
+ )
+ search_space = result.scalars().first()
+
+ # Get permissions
+ permissions = await get_user_permissions(session, user.id, search_space_id)
+
+ return UserSearchSpaceAccess(
+ search_space_id=search_space_id,
+ search_space_name=search_space.name if search_space else "",
+ is_owner=membership.is_owner,
+ role_name=membership.role.name if membership.role else None,
+ permissions=permissions,
+ )
+
+ except HTTPException:
+ raise
+ except Exception as e:
+ raise HTTPException(
+ status_code=500, detail=f"Failed to get access info: {e!s}"
+ ) from e
diff --git a/surfsense_backend/app/routes/search_source_connectors_routes.py b/surfsense_backend/app/routes/search_source_connectors_routes.py
index bf397a352..624353e19 100644
--- a/surfsense_backend/app/routes/search_source_connectors_routes.py
+++ b/surfsense_backend/app/routes/search_source_connectors_routes.py
@@ -22,9 +22,9 @@ from sqlalchemy.future import select
from app.connectors.github_connector import GitHubConnector
from app.db import (
+ Permission,
SearchSourceConnector,
SearchSourceConnectorType,
- SearchSpace,
User,
async_session_maker,
get_async_session,
@@ -52,12 +52,12 @@ from app.tasks.connector_indexers import (
index_slack_messages,
)
from app.users import current_active_user
-from app.utils.check_ownership import check_ownership
from app.utils.periodic_scheduler import (
create_periodic_schedule,
delete_periodic_schedule,
update_periodic_schedule,
)
+from app.utils.rbac import check_permission
# Set up logging
logger = logging.getLogger(__name__)
@@ -108,19 +108,25 @@ async def create_search_source_connector(
):
"""
Create a new search source connector.
+ Requires CONNECTORS_CREATE permission.
- Each search space can have only one connector of each type per user (based on search_space_id, user_id, and connector_type).
+ Each search space can have only one connector of each type (based on search_space_id and connector_type).
The config must contain the appropriate keys for the connector type.
"""
try:
- # Check if the search space belongs to the user
- await check_ownership(session, SearchSpace, search_space_id, user)
+ # Check if user has permission to create connectors
+ await check_permission(
+ session,
+ user,
+ search_space_id,
+ Permission.CONNECTORS_CREATE.value,
+ "You don't have permission to create connectors in this search space",
+ )
- # Check if a connector with the same type already exists for this search space and user
+ # Check if a connector with the same type already exists for this search space
result = await session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id == search_space_id,
- SearchSourceConnector.user_id == user.id,
SearchSourceConnector.connector_type == connector.connector_type,
)
)
@@ -128,7 +134,7 @@ async def create_search_source_connector(
if existing_connector:
raise HTTPException(
status_code=409,
- detail=f"A connector with type {connector.connector_type} already exists in this search space. Each search space can have only one connector of each type per user.",
+ detail=f"A connector with type {connector.connector_type} already exists in this search space.",
)
# Prepare connector data
@@ -198,22 +204,34 @@ async def read_search_source_connectors(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
- """List all search source connectors for the current user, optionally filtered by search space."""
+ """
+ List all search source connectors for a search space.
+ Requires CONNECTORS_READ permission.
+ """
try:
- query = select(SearchSourceConnector).filter(
- SearchSourceConnector.user_id == user.id
+ if search_space_id is None:
+ raise HTTPException(
+ status_code=400,
+ detail="search_space_id is required",
+ )
+
+ # Check if user has permission to read connectors
+ await check_permission(
+ session,
+ user,
+ search_space_id,
+ Permission.CONNECTORS_READ.value,
+ "You don't have permission to view connectors in this search space",
)
- # Filter by search_space_id if provided
- if search_space_id is not None:
- # Verify the search space belongs to the user
- await check_ownership(session, SearchSpace, search_space_id, user)
- query = query.filter(
- SearchSourceConnector.search_space_id == search_space_id
- )
+ query = select(SearchSourceConnector).filter(
+ SearchSourceConnector.search_space_id == search_space_id
+ )
result = await session.execute(query.offset(skip).limit(limit))
return result.scalars().all()
+ except HTTPException:
+ raise
except Exception as e:
raise HTTPException(
status_code=500,
@@ -229,9 +247,32 @@ async def read_search_source_connector(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
- """Get a specific search source connector by ID."""
+ """
+ Get a specific search source connector by ID.
+ Requires CONNECTORS_READ permission.
+ """
try:
- return await check_ownership(session, SearchSourceConnector, connector_id, user)
+ # Get the connector first
+ result = await session.execute(
+ select(SearchSourceConnector).filter(
+ SearchSourceConnector.id == connector_id
+ )
+ )
+ connector = result.scalars().first()
+
+ if not connector:
+ raise HTTPException(status_code=404, detail="Connector not found")
+
+ # Check permission
+ await check_permission(
+ session,
+ user,
+ connector.search_space_id,
+ Permission.CONNECTORS_READ.value,
+ "You don't have permission to view this connector",
+ )
+
+ return connector
except HTTPException:
raise
except Exception as e:
@@ -251,10 +292,25 @@ async def update_search_source_connector(
):
"""
Update a search source connector.
+ Requires CONNECTORS_UPDATE permission.
Handles partial updates, including merging changes into the 'config' field.
"""
- db_connector = await check_ownership(
- session, SearchSourceConnector, connector_id, user
+ # Get the connector first
+ result = await session.execute(
+ select(SearchSourceConnector).filter(SearchSourceConnector.id == connector_id)
+ )
+ db_connector = result.scalars().first()
+
+ if not db_connector:
+ raise HTTPException(status_code=404, detail="Connector not found")
+
+ # Check permission
+ await check_permission(
+ session,
+ user,
+ db_connector.search_space_id,
+ Permission.CONNECTORS_UPDATE.value,
+ "You don't have permission to update this connector",
)
# Convert the sparse update data (only fields present in request) to a dict
@@ -349,20 +405,19 @@ async def update_search_source_connector(
for key, value in update_data.items():
# Prevent changing connector_type if it causes a duplicate (check moved here)
if key == "connector_type" and value != db_connector.connector_type:
- result = await session.execute(
+ check_result = await session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id
== db_connector.search_space_id,
- SearchSourceConnector.user_id == user.id,
SearchSourceConnector.connector_type == value,
SearchSourceConnector.id != connector_id,
)
)
- existing_connector = result.scalars().first()
+ existing_connector = check_result.scalars().first()
if existing_connector:
raise HTTPException(
status_code=409,
- detail=f"A connector with type {value} already exists in this search space. Each search space can have only one connector of each type per user.",
+ detail=f"A connector with type {value} already exists in this search space.",
)
setattr(db_connector, key, value)
@@ -425,10 +480,29 @@ async def delete_search_source_connector(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
- """Delete a search source connector."""
+ """
+ Delete a search source connector.
+ Requires CONNECTORS_DELETE permission.
+ """
try:
- db_connector = await check_ownership(
- session, SearchSourceConnector, connector_id, user
+ # Get the connector first
+ result = await session.execute(
+ select(SearchSourceConnector).filter(
+ SearchSourceConnector.id == connector_id
+ )
+ )
+ db_connector = result.scalars().first()
+
+ if not db_connector:
+ raise HTTPException(status_code=404, detail="Connector not found")
+
+ # Check permission
+ await check_permission(
+ session,
+ user,
+ db_connector.search_space_id,
+ Permission.CONNECTORS_DELETE.value,
+ "You don't have permission to delete this connector",
)
# Delete any periodic schedule associated with this connector
@@ -473,6 +547,7 @@ async def index_connector_content(
):
"""
Index content from a connector to a search space.
+ Requires CONNECTORS_UPDATE permission (to trigger indexing).
Currently supports:
- SLACK_CONNECTOR: Indexes messages from all accessible Slack channels
@@ -488,20 +563,29 @@ async def index_connector_content(
Args:
connector_id: ID of the connector to use
search_space_id: ID of the search space to store indexed content
- background_tasks: FastAPI background tasks
Returns:
Dictionary with indexing status
"""
try:
- # Check if the connector belongs to the user
- connector = await check_ownership(
- session, SearchSourceConnector, connector_id, user
+ # Get the connector first
+ result = await session.execute(
+ select(SearchSourceConnector).filter(
+ SearchSourceConnector.id == connector_id
+ )
)
+ connector = result.scalars().first()
- # Check if the search space belongs to the user
- _search_space = await check_ownership(
- session, SearchSpace, search_space_id, user
+ if not connector:
+ raise HTTPException(status_code=404, detail="Connector not found")
+
+ # Check if user has permission to update connectors (indexing is an update operation)
+ await check_permission(
+ session,
+ user,
+ search_space_id,
+ Permission.CONNECTORS_UPDATE.value,
+ "You don't have permission to index content in this search space",
)
# Handle different connector types
diff --git a/surfsense_backend/app/routes/search_spaces_routes.py b/surfsense_backend/app/routes/search_spaces_routes.py
index 7a01f2171..d04cf11ce 100644
--- a/surfsense_backend/app/routes/search_spaces_routes.py
+++ b/surfsense_backend/app/routes/search_spaces_routes.py
@@ -1,18 +1,77 @@
+import logging
from pathlib import Path
import yaml
from fastapi import APIRouter, Depends, HTTPException
+from sqlalchemy import func
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
-from app.db import SearchSpace, User, get_async_session
-from app.schemas import SearchSpaceCreate, SearchSpaceRead, SearchSpaceUpdate
+from app.db import (
+ Permission,
+ SearchSpace,
+ SearchSpaceMembership,
+ SearchSpaceRole,
+ User,
+ get_async_session,
+ get_default_roles_config,
+)
+from app.schemas import (
+ SearchSpaceCreate,
+ SearchSpaceRead,
+ SearchSpaceUpdate,
+ SearchSpaceWithStats,
+)
from app.users import current_active_user
-from app.utils.check_ownership import check_ownership
+from app.utils.rbac import check_permission, check_search_space_access
+
+logger = logging.getLogger(__name__)
router = APIRouter()
+async def create_default_roles_and_membership(
+ session: AsyncSession,
+ search_space_id: int,
+ owner_user_id,
+) -> None:
+ """
+ Create default system roles for a search space and add the owner as a member.
+
+ Args:
+ session: Database session
+ search_space_id: The ID of the newly created search space
+ owner_user_id: The UUID of the user who created the search space
+ """
+ # Create default roles
+ default_roles = get_default_roles_config()
+ owner_role_id = None
+
+ for role_config in default_roles:
+ db_role = SearchSpaceRole(
+ name=role_config["name"],
+ description=role_config["description"],
+ permissions=role_config["permissions"],
+ is_default=role_config["is_default"],
+ is_system_role=role_config["is_system_role"],
+ search_space_id=search_space_id,
+ )
+ session.add(db_role)
+ await session.flush() # Get the ID
+
+ if role_config["name"] == "Owner":
+ owner_role_id = db_role.id
+
+ # Create owner membership
+ owner_membership = SearchSpaceMembership(
+ user_id=owner_user_id,
+ search_space_id=search_space_id,
+ role_id=owner_role_id,
+ is_owner=True,
+ )
+ session.add(owner_membership)
+
+
@router.post("/searchspaces", response_model=SearchSpaceRead)
async def create_search_space(
search_space: SearchSpaceCreate,
@@ -27,6 +86,11 @@ async def create_search_space(
db_search_space = SearchSpace(**search_space_data, user_id=user.id)
session.add(db_search_space)
+ await session.flush() # Get the search space ID
+
+ # Create default roles and owner membership
+ await create_default_roles_and_membership(session, db_search_space.id, user.id)
+
await session.commit()
await session.refresh(db_search_space)
return db_search_space
@@ -34,26 +98,86 @@ async def create_search_space(
raise
except Exception as e:
await session.rollback()
+ logger.error(f"Failed to create search space: {e!s}", exc_info=True)
raise HTTPException(
status_code=500, detail=f"Failed to create search space: {e!s}"
) from e
-@router.get("/searchspaces", response_model=list[SearchSpaceRead])
+@router.get("/searchspaces", response_model=list[SearchSpaceWithStats])
async def read_search_spaces(
skip: int = 0,
limit: int = 200,
+ owned_only: bool = False,
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
+ """
+ Get all search spaces the user has access to, with member count and ownership info.
+
+ Args:
+ skip: Number of items to skip
+ limit: Maximum number of items to return
+ owned_only: If True, only return search spaces owned by the user.
+ If False (default), return all search spaces the user has access to.
+ """
try:
- result = await session.execute(
- select(SearchSpace)
- .filter(SearchSpace.user_id == user.id)
- .offset(skip)
- .limit(limit)
- )
- return result.scalars().all()
+ if owned_only:
+ # Return only search spaces where user is the original creator (user_id)
+ result = await session.execute(
+ select(SearchSpace)
+ .filter(SearchSpace.user_id == user.id)
+ .offset(skip)
+ .limit(limit)
+ )
+ else:
+ # Return all search spaces the user has membership in
+ result = await session.execute(
+ select(SearchSpace)
+ .join(SearchSpaceMembership)
+ .filter(SearchSpaceMembership.user_id == user.id)
+ .offset(skip)
+ .limit(limit)
+ )
+
+ search_spaces = result.scalars().all()
+
+ # Get member counts and ownership info for each search space
+ search_spaces_with_stats = []
+ for space in search_spaces:
+ # Get member count
+ count_result = await session.execute(
+ select(func.count(SearchSpaceMembership.id)).filter(
+ SearchSpaceMembership.search_space_id == space.id
+ )
+ )
+ member_count = count_result.scalar() or 1
+
+ # Check if current user is owner
+ ownership_result = await session.execute(
+ select(SearchSpaceMembership).filter(
+ SearchSpaceMembership.search_space_id == space.id,
+ SearchSpaceMembership.user_id == user.id,
+ SearchSpaceMembership.is_owner == True, # noqa: E712
+ )
+ )
+ is_owner = ownership_result.scalars().first() is not None
+
+ search_spaces_with_stats.append(
+ SearchSpaceWithStats(
+ id=space.id,
+ name=space.name,
+ description=space.description,
+ created_at=space.created_at,
+ user_id=space.user_id,
+ citations_enabled=space.citations_enabled,
+ qna_custom_instructions=space.qna_custom_instructions,
+ member_count=member_count,
+ is_owner=is_owner,
+ )
+ )
+
+ return search_spaces_with_stats
except Exception as e:
raise HTTPException(
status_code=500, detail=f"Failed to fetch search spaces: {e!s}"
@@ -97,10 +221,22 @@ async def read_search_space(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
+ """
+ Get a specific search space by ID.
+ Requires SETTINGS_VIEW permission or membership.
+ """
try:
- search_space = await check_ownership(
- session, SearchSpace, search_space_id, user
+ # Check if user has access (is a member)
+ await check_search_space_access(session, user, search_space_id)
+
+ result = await session.execute(
+ select(SearchSpace).filter(SearchSpace.id == search_space_id)
)
+ search_space = result.scalars().first()
+
+ if not search_space:
+ raise HTTPException(status_code=404, detail="Search space not found")
+
return search_space
except HTTPException:
@@ -118,10 +254,28 @@ async def update_search_space(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
+ """
+ Update a search space.
+ Requires SETTINGS_UPDATE permission.
+ """
try:
- db_search_space = await check_ownership(
- session, SearchSpace, search_space_id, user
+ # Check permission
+ await check_permission(
+ session,
+ user,
+ search_space_id,
+ Permission.SETTINGS_UPDATE.value,
+ "You don't have permission to update this search space",
)
+
+ result = await session.execute(
+ select(SearchSpace).filter(SearchSpace.id == search_space_id)
+ )
+ db_search_space = result.scalars().first()
+
+ if not db_search_space:
+ raise HTTPException(status_code=404, detail="Search space not found")
+
update_data = search_space_update.model_dump(exclude_unset=True)
for key, value in update_data.items():
setattr(db_search_space, key, value)
@@ -143,10 +297,28 @@ async def delete_search_space(
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
+ """
+ Delete a search space.
+ Requires SETTINGS_DELETE permission (only owners have this by default).
+ """
try:
- db_search_space = await check_ownership(
- session, SearchSpace, search_space_id, user
+ # Check permission - only those with SETTINGS_DELETE can delete
+ await check_permission(
+ session,
+ user,
+ search_space_id,
+ Permission.SETTINGS_DELETE.value,
+ "You don't have permission to delete this search space",
)
+
+ result = await session.execute(
+ select(SearchSpace).filter(SearchSpace.id == search_space_id)
+ )
+ db_search_space = result.scalars().first()
+
+ if not db_search_space:
+ raise HTTPException(status_code=404, detail="Search space not found")
+
await session.delete(db_search_space)
await session.commit()
return {"message": "Search space deleted successfully"}
diff --git a/surfsense_backend/app/schemas/__init__.py b/surfsense_backend/app/schemas/__init__.py
index 41b2ce23c..d48d1b7f3 100644
--- a/surfsense_backend/app/schemas/__init__.py
+++ b/surfsense_backend/app/schemas/__init__.py
@@ -27,6 +27,23 @@ from .podcasts import (
PodcastRead,
PodcastUpdate,
)
+from .rbac_schemas import (
+ InviteAcceptRequest,
+ InviteAcceptResponse,
+ InviteCreate,
+ InviteInfoResponse,
+ InviteRead,
+ InviteUpdate,
+ MembershipRead,
+ MembershipReadWithUser,
+ MembershipUpdate,
+ PermissionInfo,
+ PermissionsListResponse,
+ RoleCreate,
+ RoleRead,
+ RoleUpdate,
+ UserSearchSpaceAccess,
+)
from .search_source_connector import (
SearchSourceConnectorBase,
SearchSourceConnectorCreate,
@@ -38,6 +55,7 @@ from .search_space import (
SearchSpaceCreate,
SearchSpaceRead,
SearchSpaceUpdate,
+ SearchSpaceWithStats,
)
from .users import UserCreate, UserRead, UserUpdate
@@ -60,6 +78,13 @@ __all__ = [
"ExtensionDocumentContent",
"ExtensionDocumentMetadata",
"IDModel",
+ # RBAC schemas
+ "InviteAcceptRequest",
+ "InviteAcceptResponse",
+ "InviteCreate",
+ "InviteInfoResponse",
+ "InviteRead",
+ "InviteUpdate",
"LLMConfigBase",
"LLMConfigCreate",
"LLMConfigRead",
@@ -69,12 +94,20 @@ __all__ = [
"LogFilter",
"LogRead",
"LogUpdate",
+ "MembershipRead",
+ "MembershipReadWithUser",
+ "MembershipUpdate",
"PaginatedResponse",
+ "PermissionInfo",
+ "PermissionsListResponse",
"PodcastBase",
"PodcastCreate",
"PodcastGenerateRequest",
"PodcastRead",
"PodcastUpdate",
+ "RoleCreate",
+ "RoleRead",
+ "RoleUpdate",
"SearchSourceConnectorBase",
"SearchSourceConnectorCreate",
"SearchSourceConnectorRead",
@@ -83,8 +116,10 @@ __all__ = [
"SearchSpaceCreate",
"SearchSpaceRead",
"SearchSpaceUpdate",
+ "SearchSpaceWithStats",
"TimestampModel",
"UserCreate",
"UserRead",
+ "UserSearchSpaceAccess",
"UserUpdate",
]
diff --git a/surfsense_backend/app/schemas/rbac_schemas.py b/surfsense_backend/app/schemas/rbac_schemas.py
new file mode 100644
index 000000000..736d40807
--- /dev/null
+++ b/surfsense_backend/app/schemas/rbac_schemas.py
@@ -0,0 +1,186 @@
+"""
+Pydantic schemas for RBAC (Role-Based Access Control) endpoints.
+"""
+
+from datetime import datetime
+from uuid import UUID
+
+from pydantic import BaseModel, Field
+
+# ============ Role Schemas ============
+
+
+class RoleBase(BaseModel):
+ """Base schema for roles."""
+
+ name: str = Field(..., min_length=1, max_length=100)
+ description: str | None = Field(None, max_length=500)
+ permissions: list[str] = Field(default_factory=list)
+ is_default: bool = False
+
+
+class RoleCreate(RoleBase):
+ """Schema for creating a new role."""
+
+ pass
+
+
+class RoleUpdate(BaseModel):
+ """Schema for updating a role (partial update)."""
+
+ name: str | None = Field(None, min_length=1, max_length=100)
+ description: str | None = Field(None, max_length=500)
+ permissions: list[str] | None = None
+ is_default: bool | None = None
+
+
+class RoleRead(RoleBase):
+ """Schema for reading a role."""
+
+ id: int
+ search_space_id: int
+ is_system_role: bool
+ created_at: datetime
+
+ class Config:
+ from_attributes = True
+
+
+# ============ Membership Schemas ============
+
+
+class MembershipBase(BaseModel):
+ """Base schema for memberships."""
+
+ pass
+
+
+class MembershipUpdate(BaseModel):
+ """Schema for updating a membership (change role)."""
+
+ role_id: int | None = None
+
+
+class MembershipRead(BaseModel):
+ """Schema for reading a membership."""
+
+ id: int
+ user_id: UUID
+ search_space_id: int
+ role_id: int | None
+ is_owner: bool
+ joined_at: datetime
+ created_at: datetime
+ # Nested role info
+ role: RoleRead | None = None
+ # User email (populated separately)
+ user_email: str | None = None
+
+ class Config:
+ from_attributes = True
+
+
+class MembershipReadWithUser(MembershipRead):
+ """Schema for reading a membership with user details."""
+
+ user_email: str | None = None
+ user_is_active: bool | None = None
+
+
+# ============ Invite Schemas ============
+
+
+class InviteBase(BaseModel):
+ """Base schema for invites."""
+
+ name: str | None = Field(None, max_length=100)
+ role_id: int | None = None
+ expires_at: datetime | None = None
+ max_uses: int | None = Field(None, ge=1)
+
+
+class InviteCreate(InviteBase):
+ """Schema for creating a new invite."""
+
+ pass
+
+
+class InviteUpdate(BaseModel):
+ """Schema for updating an invite (partial update)."""
+
+ name: str | None = Field(None, max_length=100)
+ role_id: int | None = None
+ expires_at: datetime | None = None
+ max_uses: int | None = Field(None, ge=1)
+ is_active: bool | None = None
+
+
+class InviteRead(InviteBase):
+ """Schema for reading an invite."""
+
+ id: int
+ invite_code: str
+ search_space_id: int
+ created_by_id: UUID | None
+ uses_count: int
+ is_active: bool
+ created_at: datetime
+ # Nested role info
+ role: RoleRead | None = None
+
+ class Config:
+ from_attributes = True
+
+
+class InviteAcceptRequest(BaseModel):
+ """Schema for accepting an invite."""
+
+ invite_code: str = Field(..., min_length=1)
+
+
+class InviteAcceptResponse(BaseModel):
+ """Response schema for accepting an invite."""
+
+ message: str
+ search_space_id: int
+ search_space_name: str
+ role_name: str | None
+
+
+class InviteInfoResponse(BaseModel):
+ """Response schema for getting invite info (public endpoint)."""
+
+ search_space_name: str
+ role_name: str | None
+ is_valid: bool
+ message: str | None = None
+
+
+# ============ Permission Schemas ============
+
+
+class PermissionInfo(BaseModel):
+ """Schema for permission information."""
+
+ value: str
+ name: str
+ category: str
+
+
+class PermissionsListResponse(BaseModel):
+ """Response schema for listing all available permissions."""
+
+ permissions: list[PermissionInfo]
+
+
+# ============ User Access Info ============
+
+
+class UserSearchSpaceAccess(BaseModel):
+ """Schema for user's access info in a search space."""
+
+ search_space_id: int
+ search_space_name: str
+ is_owner: bool
+ role_name: str | None
+ permissions: list[str]
diff --git a/surfsense_backend/app/schemas/search_space.py b/surfsense_backend/app/schemas/search_space.py
index 49cc0791f..729ff4e7d 100644
--- a/surfsense_backend/app/schemas/search_space.py
+++ b/surfsense_backend/app/schemas/search_space.py
@@ -34,3 +34,10 @@ class SearchSpaceRead(SearchSpaceBase, IDModel, TimestampModel):
qna_custom_instructions: str | None = None
model_config = ConfigDict(from_attributes=True)
+
+
+class SearchSpaceWithStats(SearchSpaceRead):
+ """Extended search space info with member count and ownership status."""
+
+ member_count: int = 1
+ is_owner: bool = False
diff --git a/surfsense_backend/app/services/connector_service.py b/surfsense_backend/app/services/connector_service.py
index 3445d69f7..20a9ffa32 100644
--- a/surfsense_backend/app/services/connector_service.py
+++ b/surfsense_backend/app/services/connector_service.py
@@ -15,18 +15,17 @@ from app.db import (
Document,
SearchSourceConnector,
SearchSourceConnectorType,
- SearchSpace,
)
from app.retriver.chunks_hybrid_search import ChucksHybridSearchRetriever
from app.retriver.documents_hybrid_search import DocumentHybridSearchRetriever
class ConnectorService:
- def __init__(self, session: AsyncSession, user_id: str | None = None):
+ def __init__(self, session: AsyncSession, search_space_id: int | None = None):
self.session = session
self.chunk_retriever = ChucksHybridSearchRetriever(session)
self.document_retriever = DocumentHybridSearchRetriever(session)
- self.user_id = user_id
+ self.search_space_id = search_space_id
self.source_id_counter = (
100000 # High starting value to avoid collisions with existing IDs
)
@@ -36,23 +35,22 @@ class ConnectorService:
async def initialize_counter(self):
"""
- Initialize the source_id_counter based on the total number of chunks for the user.
+ Initialize the source_id_counter based on the total number of chunks for the search space.
This ensures unique IDs across different sessions.
"""
- if self.user_id:
+ if self.search_space_id:
try:
- # Count total chunks for documents belonging to this user
+ # Count total chunks for documents belonging to this search space
result = await self.session.execute(
select(func.count(Chunk.id))
.join(Document)
- .join(SearchSpace)
- .filter(SearchSpace.user_id == self.user_id)
+ .filter(Document.search_space_id == self.search_space_id)
)
chunk_count = result.scalar() or 0
self.source_id_counter = chunk_count + 1
print(
- f"Initialized source_id_counter to {self.source_id_counter} for user {self.user_id}"
+ f"Initialized source_id_counter to {self.source_id_counter} for search space {self.search_space_id}"
)
except Exception as e:
print(f"Error initializing source_id_counter: {e!s}")
@@ -62,7 +60,6 @@ class ConnectorService:
async def search_crawled_urls(
self,
user_query: str,
- user_id: str,
search_space_id: int,
top_k: int = 20,
search_mode: SearchMode = SearchMode.CHUNKS,
@@ -72,7 +69,6 @@ class ConnectorService:
Args:
user_query: The user's query
- user_id: The user's ID
search_space_id: The search space ID to search in
top_k: Maximum number of results to return
search_mode: Search mode (CHUNKS or DOCUMENTS)
@@ -84,7 +80,6 @@ class ConnectorService:
crawled_urls_chunks = await self.chunk_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
- user_id=user_id,
search_space_id=search_space_id,
document_type="CRAWLED_URL",
)
@@ -92,7 +87,6 @@ class ConnectorService:
crawled_urls_chunks = await self.document_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
- user_id=user_id,
search_space_id=search_space_id,
document_type="CRAWLED_URL",
)
@@ -171,7 +165,6 @@ class ConnectorService:
async def search_files(
self,
user_query: str,
- user_id: str,
search_space_id: int,
top_k: int = 20,
search_mode: SearchMode = SearchMode.CHUNKS,
@@ -186,7 +179,6 @@ class ConnectorService:
files_chunks = await self.chunk_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
- user_id=user_id,
search_space_id=search_space_id,
document_type="FILE",
)
@@ -194,7 +186,6 @@ class ConnectorService:
files_chunks = await self.document_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
- user_id=user_id,
search_space_id=search_space_id,
document_type="FILE",
)
@@ -274,43 +265,35 @@ class ConnectorService:
async def get_connector_by_type(
self,
- user_id: str,
connector_type: SearchSourceConnectorType,
- search_space_id: int | None = None,
+ search_space_id: int,
) -> SearchSourceConnector | None:
"""
- Get a connector by type for a specific user and optionally a search space
+ Get a connector by type for a specific search space
Args:
- user_id: The user's ID
connector_type: The connector type to retrieve
- search_space_id: Optional search space ID to filter by
+ search_space_id: The search space ID to filter by
Returns:
Optional[SearchSourceConnector]: The connector if found, None otherwise
"""
query = select(SearchSourceConnector).filter(
- SearchSourceConnector.user_id == user_id,
+ SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.connector_type == connector_type,
)
- if search_space_id is not None:
- query = query.filter(
- SearchSourceConnector.search_space_id == search_space_id
- )
-
result = await self.session.execute(query)
return result.scalars().first()
async def search_tavily(
- self, user_query: str, user_id: str, search_space_id: int, top_k: int = 20
+ self, user_query: str, search_space_id: int, top_k: int = 20
) -> tuple:
"""
Search using Tavily API and return both the source information and documents
Args:
user_query: The user's query
- user_id: The user's ID
search_space_id: The search space ID
top_k: Maximum number of results to return
@@ -319,7 +302,7 @@ class ConnectorService:
"""
# Get Tavily connector configuration
tavily_connector = await self.get_connector_by_type(
- user_id, SearchSourceConnectorType.TAVILY_API, search_space_id
+ SearchSourceConnectorType.TAVILY_API, search_space_id
)
if not tavily_connector:
@@ -412,7 +395,6 @@ class ConnectorService:
async def search_searxng(
self,
user_query: str,
- user_id: str,
search_space_id: int,
top_k: int = 20,
) -> tuple:
@@ -420,7 +402,7 @@ class ConnectorService:
Search using a configured SearxNG instance and return both sources and documents.
"""
searx_connector = await self.get_connector_by_type(
- user_id, SearchSourceConnectorType.SEARXNG_API, search_space_id
+ SearchSourceConnectorType.SEARXNG_API, search_space_id
)
if not searx_connector:
@@ -598,7 +580,6 @@ class ConnectorService:
async def search_baidu(
self,
user_query: str,
- user_id: str,
search_space_id: int,
top_k: int = 20,
) -> tuple:
@@ -610,7 +591,6 @@ class ConnectorService:
Args:
user_query: User's search query
- user_id: User ID
search_space_id: Search space ID
top_k: Maximum number of results to return
@@ -619,7 +599,7 @@ class ConnectorService:
"""
# Get Baidu connector configuration
baidu_connector = await self.get_connector_by_type(
- user_id, SearchSourceConnectorType.BAIDU_SEARCH_API, search_space_id
+ SearchSourceConnectorType.BAIDU_SEARCH_API, search_space_id
)
if not baidu_connector:
@@ -824,7 +804,6 @@ class ConnectorService:
async def search_slack(
self,
user_query: str,
- user_id: str,
search_space_id: int,
top_k: int = 20,
search_mode: SearchMode = SearchMode.CHUNKS,
@@ -839,7 +818,6 @@ class ConnectorService:
slack_chunks = await self.chunk_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
- user_id=user_id,
search_space_id=search_space_id,
document_type="SLACK_CONNECTOR",
)
@@ -847,7 +825,6 @@ class ConnectorService:
slack_chunks = await self.document_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
- user_id=user_id,
search_space_id=search_space_id,
document_type="SLACK_CONNECTOR",
)
@@ -912,7 +889,6 @@ class ConnectorService:
async def search_notion(
self,
user_query: str,
- user_id: str,
search_space_id: int,
top_k: int = 20,
search_mode: SearchMode = SearchMode.CHUNKS,
@@ -922,7 +898,6 @@ class ConnectorService:
Args:
user_query: The user's query
- user_id: The user's ID
search_space_id: The search space ID to search in
top_k: Maximum number of results to return
@@ -933,7 +908,6 @@ class ConnectorService:
notion_chunks = await self.chunk_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
- user_id=user_id,
search_space_id=search_space_id,
document_type="NOTION_CONNECTOR",
)
@@ -941,7 +915,6 @@ class ConnectorService:
notion_chunks = await self.document_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
- user_id=user_id,
search_space_id=search_space_id,
document_type="NOTION_CONNECTOR",
)
@@ -1009,7 +982,6 @@ class ConnectorService:
async def search_extension(
self,
user_query: str,
- user_id: str,
search_space_id: int,
top_k: int = 20,
search_mode: SearchMode = SearchMode.CHUNKS,
@@ -1019,7 +991,6 @@ class ConnectorService:
Args:
user_query: The user's query
- user_id: The user's ID
search_space_id: The search space ID to search in
top_k: Maximum number of results to return
@@ -1030,7 +1001,6 @@ class ConnectorService:
extension_chunks = await self.chunk_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
- user_id=user_id,
search_space_id=search_space_id,
document_type="EXTENSION",
)
@@ -1038,7 +1008,6 @@ class ConnectorService:
extension_chunks = await self.document_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
- user_id=user_id,
search_space_id=search_space_id,
document_type="EXTENSION",
)
@@ -1130,7 +1099,6 @@ class ConnectorService:
async def search_youtube(
self,
user_query: str,
- user_id: str,
search_space_id: int,
top_k: int = 20,
search_mode: SearchMode = SearchMode.CHUNKS,
@@ -1140,7 +1108,6 @@ class ConnectorService:
Args:
user_query: The user's query
- user_id: The user's ID
search_space_id: The search space ID to search in
top_k: Maximum number of results to return
@@ -1151,7 +1118,6 @@ class ConnectorService:
youtube_chunks = await self.chunk_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
- user_id=user_id,
search_space_id=search_space_id,
document_type="YOUTUBE_VIDEO",
)
@@ -1159,7 +1125,6 @@ class ConnectorService:
youtube_chunks = await self.document_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
- user_id=user_id,
search_space_id=search_space_id,
document_type="YOUTUBE_VIDEO",
)
@@ -1227,7 +1192,6 @@ class ConnectorService:
async def search_github(
self,
user_query: str,
- user_id: int,
search_space_id: int,
top_k: int = 20,
search_mode: SearchMode = SearchMode.CHUNKS,
@@ -1242,7 +1206,6 @@ class ConnectorService:
github_chunks = await self.chunk_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
- user_id=user_id,
search_space_id=search_space_id,
document_type="GITHUB_CONNECTOR",
)
@@ -1250,7 +1213,6 @@ class ConnectorService:
github_chunks = await self.document_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
- user_id=user_id,
search_space_id=search_space_id,
document_type="GITHUB_CONNECTOR",
)
@@ -1302,7 +1264,6 @@ class ConnectorService:
async def search_linear(
self,
user_query: str,
- user_id: str,
search_space_id: int,
top_k: int = 20,
search_mode: SearchMode = SearchMode.CHUNKS,
@@ -1312,7 +1273,6 @@ class ConnectorService:
Args:
user_query: The user's query
- user_id: The user's ID
search_space_id: The search space ID to search in
top_k: Maximum number of results to return
@@ -1323,7 +1283,6 @@ class ConnectorService:
linear_chunks = await self.chunk_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
- user_id=user_id,
search_space_id=search_space_id,
document_type="LINEAR_CONNECTOR",
)
@@ -1331,7 +1290,6 @@ class ConnectorService:
linear_chunks = await self.document_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
- user_id=user_id,
search_space_id=search_space_id,
document_type="LINEAR_CONNECTOR",
)
@@ -1411,7 +1369,6 @@ class ConnectorService:
async def search_jira(
self,
user_query: str,
- user_id: str,
search_space_id: int,
top_k: int = 20,
search_mode: SearchMode = SearchMode.CHUNKS,
@@ -1421,7 +1378,6 @@ class ConnectorService:
Args:
user_query: The user's query
- user_id: The user's ID
search_space_id: The search space ID to search in
top_k: Maximum number of results to return
search_mode: Search mode (CHUNKS or DOCUMENTS)
@@ -1433,7 +1389,6 @@ class ConnectorService:
jira_chunks = await self.chunk_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
- user_id=user_id,
search_space_id=search_space_id,
document_type="JIRA_CONNECTOR",
)
@@ -1441,7 +1396,6 @@ class ConnectorService:
jira_chunks = await self.document_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
- user_id=user_id,
search_space_id=search_space_id,
document_type="JIRA_CONNECTOR",
)
@@ -1532,7 +1486,6 @@ class ConnectorService:
async def search_google_calendar(
self,
user_query: str,
- user_id: str,
search_space_id: int,
top_k: int = 20,
search_mode: SearchMode = SearchMode.CHUNKS,
@@ -1542,7 +1495,6 @@ class ConnectorService:
Args:
user_query: The user's query
- user_id: The user's ID
search_space_id: The search space ID to search in
top_k: Maximum number of results to return
search_mode: Search mode (CHUNKS or DOCUMENTS)
@@ -1554,7 +1506,6 @@ class ConnectorService:
calendar_chunks = await self.chunk_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
- user_id=user_id,
search_space_id=search_space_id,
document_type="GOOGLE_CALENDAR_CONNECTOR",
)
@@ -1562,7 +1513,6 @@ class ConnectorService:
calendar_chunks = await self.document_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
- user_id=user_id,
search_space_id=search_space_id,
document_type="GOOGLE_CALENDAR_CONNECTOR",
)
@@ -1665,7 +1615,6 @@ class ConnectorService:
async def search_airtable(
self,
user_query: str,
- user_id: str,
search_space_id: int,
top_k: int = 20,
search_mode: SearchMode = SearchMode.CHUNKS,
@@ -1675,7 +1624,6 @@ class ConnectorService:
Args:
user_query: The user's query
- user_id: The user's ID
search_space_id: The search space ID to search in
top_k: Maximum number of results to return
search_mode: Search mode (CHUNKS or DOCUMENTS)
@@ -1687,7 +1635,6 @@ class ConnectorService:
airtable_chunks = await self.chunk_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
- user_id=user_id,
search_space_id=search_space_id,
document_type="AIRTABLE_CONNECTOR",
)
@@ -1695,7 +1642,6 @@ class ConnectorService:
airtable_chunks = await self.document_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
- user_id=user_id,
search_space_id=search_space_id,
document_type="AIRTABLE_CONNECTOR",
)
@@ -1753,7 +1699,6 @@ class ConnectorService:
async def search_google_gmail(
self,
user_query: str,
- user_id: str,
search_space_id: int,
top_k: int = 20,
search_mode: SearchMode = SearchMode.CHUNKS,
@@ -1763,7 +1708,6 @@ class ConnectorService:
Args:
user_query: The user's query
- user_id: The user's ID
search_space_id: The search space ID to search in
top_k: Maximum number of results to return
search_mode: Search mode (CHUNKS or DOCUMENTS)
@@ -1775,7 +1719,6 @@ class ConnectorService:
gmail_chunks = await self.chunk_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
- user_id=user_id,
search_space_id=search_space_id,
document_type="GOOGLE_GMAIL_CONNECTOR",
)
@@ -1783,7 +1726,6 @@ class ConnectorService:
gmail_chunks = await self.document_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
- user_id=user_id,
search_space_id=search_space_id,
document_type="GOOGLE_GMAIL_CONNECTOR",
)
@@ -1877,7 +1819,6 @@ class ConnectorService:
async def search_confluence(
self,
user_query: str,
- user_id: str,
search_space_id: int,
top_k: int = 20,
search_mode: SearchMode = SearchMode.CHUNKS,
@@ -1887,7 +1828,6 @@ class ConnectorService:
Args:
user_query: The user's query
- user_id: The user's ID
search_space_id: The search space ID to search in
top_k: Maximum number of results to return
search_mode: Search mode (CHUNKS or DOCUMENTS)
@@ -1899,7 +1839,6 @@ class ConnectorService:
confluence_chunks = await self.chunk_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
- user_id=user_id,
search_space_id=search_space_id,
document_type="CONFLUENCE_CONNECTOR",
)
@@ -1907,7 +1846,6 @@ class ConnectorService:
confluence_chunks = await self.document_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
- user_id=user_id,
search_space_id=search_space_id,
document_type="CONFLUENCE_CONNECTOR",
)
@@ -1972,7 +1910,6 @@ class ConnectorService:
async def search_clickup(
self,
user_query: str,
- user_id: str,
search_space_id: int,
top_k: int = 20,
search_mode: SearchMode = SearchMode.CHUNKS,
@@ -1982,7 +1919,6 @@ class ConnectorService:
Args:
user_query: The user's query
- user_id: The user's ID
search_space_id: The search space ID to search in
top_k: Maximum number of results to return
search_mode: Search mode (CHUNKS or DOCUMENTS)
@@ -1994,7 +1930,6 @@ class ConnectorService:
clickup_chunks = await self.chunk_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
- user_id=user_id,
search_space_id=search_space_id,
document_type="CLICKUP_CONNECTOR",
)
@@ -2002,7 +1937,6 @@ class ConnectorService:
clickup_chunks = await self.document_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
- user_id=user_id,
search_space_id=search_space_id,
document_type="CLICKUP_CONNECTOR",
)
@@ -2088,7 +2022,6 @@ class ConnectorService:
async def search_linkup(
self,
user_query: str,
- user_id: str,
search_space_id: int,
mode: str = "standard",
) -> tuple:
@@ -2097,7 +2030,6 @@ class ConnectorService:
Args:
user_query: The user's query
- user_id: The user's ID
search_space_id: The search space ID
mode: Search depth mode, can be "standard" or "deep"
@@ -2106,7 +2038,7 @@ class ConnectorService:
"""
# Get Linkup connector configuration
linkup_connector = await self.get_connector_by_type(
- user_id, SearchSourceConnectorType.LINKUP_API, search_space_id
+ SearchSourceConnectorType.LINKUP_API, search_space_id
)
if not linkup_connector:
@@ -2211,7 +2143,6 @@ class ConnectorService:
async def search_discord(
self,
user_query: str,
- user_id: str,
search_space_id: int,
top_k: int = 20,
search_mode: SearchMode = SearchMode.CHUNKS,
@@ -2221,7 +2152,6 @@ class ConnectorService:
Args:
user_query: The user's query
- user_id: The user's ID
search_space_id: The search space ID to search in
top_k: Maximum number of results to return
@@ -2232,7 +2162,6 @@ class ConnectorService:
discord_chunks = await self.chunk_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
- user_id=user_id,
search_space_id=search_space_id,
document_type="DISCORD_CONNECTOR",
)
@@ -2240,7 +2169,6 @@ class ConnectorService:
discord_chunks = await self.document_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
- user_id=user_id,
search_space_id=search_space_id,
document_type="DISCORD_CONNECTOR",
)
@@ -2308,7 +2236,6 @@ class ConnectorService:
async def search_luma(
self,
user_query: str,
- user_id: str,
search_space_id: int,
top_k: int = 20,
search_mode: SearchMode = SearchMode.CHUNKS,
@@ -2318,7 +2245,6 @@ class ConnectorService:
Args:
user_query: The user's query
- user_id: The user's ID
search_space_id: The search space ID to search in
top_k: Maximum number of results to return
search_mode: Search mode (CHUNKS or DOCUMENTS)
@@ -2330,7 +2256,6 @@ class ConnectorService:
luma_chunks = await self.chunk_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
- user_id=user_id,
search_space_id=search_space_id,
document_type="LUMA_CONNECTOR",
)
@@ -2338,7 +2263,6 @@ class ConnectorService:
luma_chunks = await self.document_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
- user_id=user_id,
search_space_id=search_space_id,
document_type="LUMA_CONNECTOR",
)
@@ -2466,7 +2390,6 @@ class ConnectorService:
async def search_elasticsearch(
self,
user_query: str,
- user_id: str,
search_space_id: int,
top_k: int = 20,
search_mode: SearchMode = SearchMode.CHUNKS,
@@ -2476,7 +2399,6 @@ class ConnectorService:
Args:
user_query: The user's query
- user_id: The user's ID
search_space_id: The search space ID to search in
top_k: Maximum number of results to return
search_mode: Search mode (CHUNKS or DOCUMENTS)
@@ -2488,7 +2410,6 @@ class ConnectorService:
elasticsearch_chunks = await self.chunk_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
- user_id=user_id,
search_space_id=search_space_id,
document_type="ELASTICSEARCH_CONNECTOR",
)
@@ -2496,7 +2417,6 @@ class ConnectorService:
elasticsearch_chunks = await self.document_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
- user_id=user_id,
search_space_id=search_space_id,
document_type="ELASTICSEARCH_CONNECTOR",
)
diff --git a/surfsense_backend/app/services/llm_service.py b/surfsense_backend/app/services/llm_service.py
index ea9140f8e..c3270b59e 100644
--- a/surfsense_backend/app/services/llm_service.py
+++ b/surfsense_backend/app/services/llm_service.py
@@ -7,7 +7,7 @@ from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from app.config import config
-from app.db import LLMConfig, UserSearchSpacePreference
+from app.db import LLMConfig, SearchSpace
# Configure litellm to automatically drop unsupported parameters
litellm.drop_params = True
@@ -144,15 +144,16 @@ async def validate_llm_config(
return False, error_msg
-async def get_user_llm_instance(
- session: AsyncSession, user_id: str, search_space_id: int, role: str
+async def get_search_space_llm_instance(
+ session: AsyncSession, search_space_id: int, role: str
) -> ChatLiteLLM | None:
"""
- Get a ChatLiteLLM instance for a specific user, search space, and role.
+ Get a ChatLiteLLM instance for a specific search space and role.
+
+ LLM preferences are stored at the search space level and shared by all members.
Args:
session: Database session
- user_id: User ID
search_space_id: Search Space ID
role: LLM role ('long_context', 'fast', or 'strategic')
@@ -160,37 +161,30 @@ async def get_user_llm_instance(
ChatLiteLLM instance or None if not found
"""
try:
- # Get user's LLM preferences for this search space
+ # Get the search space with its LLM preferences
result = await session.execute(
- select(UserSearchSpacePreference).where(
- UserSearchSpacePreference.user_id == user_id,
- UserSearchSpacePreference.search_space_id == search_space_id,
- )
+ select(SearchSpace).where(SearchSpace.id == search_space_id)
)
- preference = result.scalars().first()
+ search_space = result.scalars().first()
- if not preference:
- logger.error(
- f"No LLM preferences found for user {user_id} in search space {search_space_id}"
- )
+ if not search_space:
+ logger.error(f"Search space {search_space_id} not found")
return None
# Get the appropriate LLM config ID based on role
llm_config_id = None
if role == LLMRole.LONG_CONTEXT:
- llm_config_id = preference.long_context_llm_id
+ llm_config_id = search_space.long_context_llm_id
elif role == LLMRole.FAST:
- llm_config_id = preference.fast_llm_id
+ llm_config_id = search_space.fast_llm_id
elif role == LLMRole.STRATEGIC:
- llm_config_id = preference.strategic_llm_id
+ llm_config_id = search_space.strategic_llm_id
else:
logger.error(f"Invalid LLM role: {role}")
return None
if not llm_config_id:
- logger.error(
- f"No {role} LLM configured for user {user_id} in search space {search_space_id}"
- )
+ logger.error(f"No {role} LLM configured for search space {search_space_id}")
return None
# Check if this is a global config (negative ID)
@@ -331,31 +325,63 @@ async def get_user_llm_instance(
except Exception as e:
logger.error(
- f"Error getting LLM instance for user {user_id}, role {role}: {e!s}"
+ f"Error getting LLM instance for search space {search_space_id}, role {role}: {e!s}"
)
return None
+async def get_long_context_llm(
+ session: AsyncSession, search_space_id: int
+) -> ChatLiteLLM | None:
+ """Get the search space's long context LLM instance."""
+ return await get_search_space_llm_instance(
+ session, search_space_id, LLMRole.LONG_CONTEXT
+ )
+
+
+async def get_fast_llm(
+ session: AsyncSession, search_space_id: int
+) -> ChatLiteLLM | None:
+ """Get the search space's fast LLM instance."""
+ return await get_search_space_llm_instance(session, search_space_id, LLMRole.FAST)
+
+
+async def get_strategic_llm(
+ session: AsyncSession, search_space_id: int
+) -> ChatLiteLLM | None:
+ """Get the search space's strategic LLM instance."""
+ return await get_search_space_llm_instance(
+ session, search_space_id, LLMRole.STRATEGIC
+ )
+
+
+# Backward-compatible aliases (deprecated - will be removed in future versions)
+async def get_user_llm_instance(
+ session: AsyncSession, user_id: str, search_space_id: int, role: str
+) -> ChatLiteLLM | None:
+ """
+ Deprecated: Use get_search_space_llm_instance instead.
+ LLM preferences are now stored at the search space level, not per-user.
+ """
+ return await get_search_space_llm_instance(session, search_space_id, role)
+
+
async def get_user_long_context_llm(
session: AsyncSession, user_id: str, search_space_id: int
) -> ChatLiteLLM | None:
- """Get user's long context LLM instance for a specific search space."""
- return await get_user_llm_instance(
- session, user_id, search_space_id, LLMRole.LONG_CONTEXT
- )
+ """Deprecated: Use get_long_context_llm instead."""
+ return await get_long_context_llm(session, search_space_id)
async def get_user_fast_llm(
session: AsyncSession, user_id: str, search_space_id: int
) -> ChatLiteLLM | None:
- """Get user's fast LLM instance for a specific search space."""
- return await get_user_llm_instance(session, user_id, search_space_id, LLMRole.FAST)
+ """Deprecated: Use get_fast_llm instead."""
+ return await get_fast_llm(session, search_space_id)
async def get_user_strategic_llm(
session: AsyncSession, user_id: str, search_space_id: int
) -> ChatLiteLLM | None:
- """Get user's strategic LLM instance for a specific search space."""
- return await get_user_llm_instance(
- session, user_id, search_space_id, LLMRole.STRATEGIC
- )
+ """Deprecated: Use get_strategic_llm instead."""
+ return await get_strategic_llm(session, search_space_id)
diff --git a/surfsense_backend/app/services/query_service.py b/surfsense_backend/app/services/query_service.py
index d2759ab27..0521dc942 100644
--- a/surfsense_backend/app/services/query_service.py
+++ b/surfsense_backend/app/services/query_service.py
@@ -4,7 +4,7 @@ from typing import Any
from langchain.schema import AIMessage, HumanMessage, SystemMessage
from sqlalchemy.ext.asyncio import AsyncSession
-from app.services.llm_service import get_user_strategic_llm
+from app.services.llm_service import get_strategic_llm
class QueryService:
@@ -16,19 +16,17 @@ class QueryService:
async def reformulate_query_with_chat_history(
user_query: str,
session: AsyncSession,
- user_id: str,
search_space_id: int,
chat_history_str: str | None = None,
) -> str:
"""
- Reformulate the user query using the user's strategic LLM to make it more
+ Reformulate the user query using the search space's strategic LLM to make it more
effective for information retrieval and research purposes.
Args:
user_query: The original user query
- session: Database session for accessing user LLM configs
- user_id: User ID to get their specific LLM configuration
- search_space_id: Search Space ID to get user's LLM preferences
+ session: Database session for accessing LLM configs
+ search_space_id: Search Space ID to get LLM preferences
chat_history_str: Optional chat history string
Returns:
@@ -38,11 +36,11 @@ class QueryService:
return user_query
try:
- # Get the user's strategic LLM instance
- llm = await get_user_strategic_llm(session, user_id, search_space_id)
+ # Get the search space's strategic LLM instance
+ llm = await get_strategic_llm(session, search_space_id)
if not llm:
print(
- f"Warning: No strategic LLM configured for user {user_id} in search space {search_space_id}. Using original query."
+ f"Warning: No strategic LLM configured for search space {search_space_id}. Using original query."
)
return user_query
diff --git a/surfsense_backend/app/utils/check_ownership.py b/surfsense_backend/app/utils/check_ownership.py
deleted file mode 100644
index 0bd290ff3..000000000
--- a/surfsense_backend/app/utils/check_ownership.py
+++ /dev/null
@@ -1,19 +0,0 @@
-from fastapi import HTTPException
-from sqlalchemy.ext.asyncio import AsyncSession
-from sqlalchemy.future import select
-
-from app.db import User
-
-
-# Helper function to check user ownership
-async def check_ownership(session: AsyncSession, model, item_id: int, user: User):
- item = await session.execute(
- select(model).filter(model.id == item_id, model.user_id == user.id)
- )
- item = item.scalars().first()
- if not item:
- raise HTTPException(
- status_code=404,
- detail="Item not found or you don't have permission to access it",
- )
- return item
diff --git a/surfsense_backend/app/utils/rbac.py b/surfsense_backend/app/utils/rbac.py
new file mode 100644
index 000000000..6cb180d80
--- /dev/null
+++ b/surfsense_backend/app/utils/rbac.py
@@ -0,0 +1,274 @@
+"""
+RBAC (Role-Based Access Control) utility functions.
+Provides helpers for checking user permissions in search spaces.
+"""
+
+import secrets
+from uuid import UUID
+
+from fastapi import HTTPException
+from sqlalchemy.ext.asyncio import AsyncSession
+from sqlalchemy.future import select
+from sqlalchemy.orm import selectinload
+
+from app.db import (
+ Permission,
+ SearchSpace,
+ SearchSpaceMembership,
+ SearchSpaceRole,
+ User,
+ has_permission,
+)
+
+
+async def get_user_membership(
+ session: AsyncSession,
+ user_id: UUID,
+ search_space_id: int,
+) -> SearchSpaceMembership | None:
+ """
+ Get the user's membership in a search space.
+
+ Args:
+ session: Database session
+ user_id: User UUID
+ search_space_id: Search space ID
+
+ Returns:
+ SearchSpaceMembership if found, None otherwise
+ """
+ result = await session.execute(
+ select(SearchSpaceMembership)
+ .options(selectinload(SearchSpaceMembership.role))
+ .filter(
+ SearchSpaceMembership.user_id == user_id,
+ SearchSpaceMembership.search_space_id == search_space_id,
+ )
+ )
+ return result.scalars().first()
+
+
+async def get_user_permissions(
+ session: AsyncSession,
+ user_id: UUID,
+ search_space_id: int,
+) -> list[str]:
+ """
+ Get the user's permissions in a search space.
+
+ Args:
+ session: Database session
+ user_id: User UUID
+ search_space_id: Search space ID
+
+ Returns:
+ List of permission strings
+ """
+ membership = await get_user_membership(session, user_id, search_space_id)
+
+ if not membership:
+ return []
+
+ # Owners always have full access
+ if membership.is_owner:
+ return [Permission.FULL_ACCESS.value]
+
+ # Get permissions from role
+ if membership.role:
+ return membership.role.permissions or []
+
+ return []
+
+
+async def check_permission(
+ session: AsyncSession,
+ user: User,
+ search_space_id: int,
+ required_permission: str,
+ error_message: str = "You don't have permission to perform this action",
+) -> SearchSpaceMembership:
+ """
+ Check if a user has a specific permission in a search space.
+ Raises HTTPException if permission is denied.
+
+ Args:
+ session: Database session
+ user: User object
+ search_space_id: Search space ID
+ required_permission: Permission string to check
+ error_message: Custom error message for permission denied
+
+ Returns:
+ SearchSpaceMembership if permission granted
+
+ Raises:
+ HTTPException: If user doesn't have access or permission
+ """
+ membership = await get_user_membership(session, user.id, search_space_id)
+
+ if not membership:
+ raise HTTPException(
+ status_code=403,
+ detail="You don't have access to this search space",
+ )
+
+ # Get user's permissions
+ if membership.is_owner:
+ permissions = [Permission.FULL_ACCESS.value]
+ elif membership.role:
+ permissions = membership.role.permissions or []
+ else:
+ permissions = []
+
+ if not has_permission(permissions, required_permission):
+ raise HTTPException(status_code=403, detail=error_message)
+
+ return membership
+
+
+async def check_search_space_access(
+ session: AsyncSession,
+ user: User,
+ search_space_id: int,
+) -> SearchSpaceMembership:
+ """
+ Check if a user has any access to a search space.
+ This is used for basic access control (user is a member).
+
+ Args:
+ session: Database session
+ user: User object
+ search_space_id: Search space ID
+
+ Returns:
+ SearchSpaceMembership if user has access
+
+ Raises:
+ HTTPException: If user doesn't have access
+ """
+ membership = await get_user_membership(session, user.id, search_space_id)
+
+ if not membership:
+ raise HTTPException(
+ status_code=403,
+ detail="You don't have access to this search space",
+ )
+
+ return membership
+
+
+async def is_search_space_owner(
+ session: AsyncSession,
+ user_id: UUID,
+ search_space_id: int,
+) -> bool:
+ """
+ Check if a user is the owner of a search space.
+
+ Args:
+ session: Database session
+ user_id: User UUID
+ search_space_id: Search space ID
+
+ Returns:
+ True if user is the owner, False otherwise
+ """
+ membership = await get_user_membership(session, user_id, search_space_id)
+ return membership is not None and membership.is_owner
+
+
+async def get_search_space_with_access_check(
+ session: AsyncSession,
+ user: User,
+ search_space_id: int,
+ required_permission: str | None = None,
+) -> tuple[SearchSpace, SearchSpaceMembership]:
+ """
+ Get a search space with access and optional permission check.
+
+ Args:
+ session: Database session
+ user: User object
+ search_space_id: Search space ID
+ required_permission: Optional permission to check
+
+ Returns:
+ Tuple of (SearchSpace, SearchSpaceMembership)
+
+ Raises:
+ HTTPException: If search space not found or user lacks access/permission
+ """
+ # Get the search space
+ result = await session.execute(
+ select(SearchSpace).filter(SearchSpace.id == search_space_id)
+ )
+ search_space = result.scalars().first()
+
+ if not search_space:
+ raise HTTPException(status_code=404, detail="Search space not found")
+
+ # Check access
+ if required_permission:
+ membership = await check_permission(
+ session, user, search_space_id, required_permission
+ )
+ else:
+ membership = await check_search_space_access(session, user, search_space_id)
+
+ return search_space, membership
+
+
+def generate_invite_code() -> str:
+ """
+ Generate a unique invite code for search space invites.
+
+ Returns:
+ A 32-character URL-safe invite code
+ """
+ return secrets.token_urlsafe(24)
+
+
+async def get_default_role(
+ session: AsyncSession,
+ search_space_id: int,
+) -> SearchSpaceRole | None:
+ """
+ Get the default role for a search space (used when accepting invites without a specific role).
+
+ Args:
+ session: Database session
+ search_space_id: Search space ID
+
+ Returns:
+ Default SearchSpaceRole or None
+ """
+ result = await session.execute(
+ select(SearchSpaceRole).filter(
+ SearchSpaceRole.search_space_id == search_space_id,
+ SearchSpaceRole.is_default == True, # noqa: E712
+ )
+ )
+ return result.scalars().first()
+
+
+async def get_owner_role(
+ session: AsyncSession,
+ search_space_id: int,
+) -> SearchSpaceRole | None:
+ """
+ Get the Owner role for a search space.
+
+ Args:
+ session: Database session
+ search_space_id: Search space ID
+
+ Returns:
+ Owner SearchSpaceRole or None
+ """
+ result = await session.execute(
+ select(SearchSpaceRole).filter(
+ SearchSpaceRole.search_space_id == search_space_id,
+ SearchSpaceRole.name == "Owner",
+ )
+ )
+ return result.scalars().first()
diff --git a/surfsense_web/app/dashboard/[search_space_id]/client-layout.tsx b/surfsense_web/app/dashboard/[search_space_id]/client-layout.tsx
index 4ec8046a4..105c21e26 100644
--- a/surfsense_web/app/dashboard/[search_space_id]/client-layout.tsx
+++ b/surfsense_web/app/dashboard/[search_space_id]/client-layout.tsx
@@ -18,6 +18,7 @@ import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/com
import { Separator } from "@/components/ui/separator";
import { SidebarInset, SidebarProvider, SidebarTrigger } from "@/components/ui/sidebar";
import { useLLMPreferences } from "@/hooks/use-llm-configs";
+import { useUserAccess } from "@/hooks/use-rbac";
import { cn } from "@/lib/utils";
export function DashboardClientLayout({
@@ -60,11 +61,15 @@ export function DashboardClientLayout({
}, [activeChatId, isChatPannelOpen]);
const { loading, error, isOnboardingComplete } = useLLMPreferences(searchSpaceIdNum);
+ const { access, loading: accessLoading } = useUserAccess(searchSpaceIdNum);
const [hasCheckedOnboarding, setHasCheckedOnboarding] = useState(false);
// Skip onboarding check if we're already on the onboarding page
const isOnboardingPage = pathname?.includes("/onboard");
+ // Only owners should see onboarding - invited members use existing config
+ const isOwner = access?.is_owner ?? false;
+
// Translate navigation items
const tNavMenu = useTranslations("nav_menu");
const translatedNavMain = useMemo(() => {
@@ -102,11 +107,13 @@ export function DashboardClientLayout({
return;
}
- // Only check once after preferences have loaded
- if (!loading && !hasCheckedOnboarding) {
+ // Wait for both preferences and access data to load
+ if (!loading && !accessLoading && !hasCheckedOnboarding) {
const onboardingComplete = isOnboardingComplete();
- if (!onboardingComplete) {
+ // Only redirect to onboarding if user is the owner and onboarding is not complete
+ // Invited members (non-owners) should skip onboarding and use existing config
+ if (!onboardingComplete && isOwner) {
router.push(`/dashboard/${searchSpaceId}/onboard`);
}
@@ -114,8 +121,10 @@ export function DashboardClientLayout({
}
}, [
loading,
+ accessLoading,
isOnboardingComplete,
isOnboardingPage,
+ isOwner,
router,
searchSpaceId,
hasCheckedOnboarding,
@@ -145,7 +154,7 @@ export function DashboardClientLayout({
}, [chat_id, search_space_id]);
// Show loading screen while checking onboarding status (only on first load)
- if (!hasCheckedOnboarding && loading && !isOnboardingPage) {
+ if (!hasCheckedOnboarding && (loading || accessLoading) && !isOnboardingPage) {
return (
diff --git a/surfsense_web/app/dashboard/[search_space_id]/layout.tsx b/surfsense_web/app/dashboard/[search_space_id]/layout.tsx
index d09eaea94..5625d7450 100644
--- a/surfsense_web/app/dashboard/[search_space_id]/layout.tsx
+++ b/surfsense_web/app/dashboard/[search_space_id]/layout.tsx
@@ -52,6 +52,12 @@ export default function DashboardLayout({
},
],
},
+ {
+ title: "Team",
+ url: `/dashboard/${search_space_id}/team`,
+ icon: "Users",
+ items: [],
+ },
{
title: "Settings",
url: `/dashboard/${search_space_id}/settings`,
diff --git a/surfsense_web/app/dashboard/[search_space_id]/logs/(manage)/page.tsx b/surfsense_web/app/dashboard/[search_space_id]/logs/(manage)/page.tsx
index 95839d10d..40a3e46a1 100644
--- a/surfsense_web/app/dashboard/[search_space_id]/logs/(manage)/page.tsx
+++ b/surfsense_web/app/dashboard/[search_space_id]/logs/(manage)/page.tsx
@@ -1126,7 +1126,7 @@ function LogRowActions({ row, t }: { row: Row; t: (key: string) => string }
setIsDeleting(true);
try {
await deleteLog(log.id);
- toast.success(t("log_deleted_success"));
+ // toast.success(t("log_deleted_success"));
await refreshLogs();
} catch (error) {
console.error("Error deleting log:", error);
diff --git a/surfsense_web/app/dashboard/[search_space_id]/team/page.tsx b/surfsense_web/app/dashboard/[search_space_id]/team/page.tsx
new file mode 100644
index 000000000..5f9143a83
--- /dev/null
+++ b/surfsense_web/app/dashboard/[search_space_id]/team/page.tsx
@@ -0,0 +1,1325 @@
+"use client";
+
+import {
+ type ColumnDef,
+ type ColumnFiltersState,
+ flexRender,
+ getCoreRowModel,
+ getFilteredRowModel,
+ getPaginationRowModel,
+ getSortedRowModel,
+ type SortingState,
+ useReactTable,
+} from "@tanstack/react-table";
+import {
+ ArrowLeft,
+ Calendar,
+ Check,
+ ChevronDown,
+ ChevronUp,
+ Clock,
+ Copy,
+ Crown,
+ Edit2,
+ ExternalLink,
+ Hash,
+ Link2,
+ LinkIcon,
+ Loader2,
+ MoreHorizontal,
+ Plus,
+ RefreshCw,
+ Search,
+ Settings,
+ Shield,
+ ShieldCheck,
+ Trash2,
+ User,
+ UserMinus,
+ UserPlus,
+ Users,
+ X,
+} from "lucide-react";
+import { motion } from "motion/react";
+import { useParams, useRouter } from "next/navigation";
+import { useCallback, useMemo, useState } from "react";
+import { toast } from "sonner";
+import {
+ AlertDialog,
+ AlertDialogAction,
+ AlertDialogCancel,
+ AlertDialogContent,
+ AlertDialogDescription,
+ AlertDialogFooter,
+ AlertDialogHeader,
+ AlertDialogTitle,
+ AlertDialogTrigger,
+} from "@/components/ui/alert-dialog";
+import { Badge } from "@/components/ui/badge";
+import { Button } from "@/components/ui/button";
+import { Calendar as CalendarComponent } from "@/components/ui/calendar";
+import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card";
+import { Checkbox } from "@/components/ui/checkbox";
+import {
+ Dialog,
+ DialogContent,
+ DialogDescription,
+ DialogFooter,
+ DialogHeader,
+ DialogTitle,
+ DialogTrigger,
+} from "@/components/ui/dialog";
+import {
+ DropdownMenu,
+ DropdownMenuContent,
+ DropdownMenuItem,
+ DropdownMenuLabel,
+ DropdownMenuSeparator,
+ DropdownMenuTrigger,
+} from "@/components/ui/dropdown-menu";
+import { Input } from "@/components/ui/input";
+import { Label } from "@/components/ui/label";
+import { Popover, PopoverContent, PopoverTrigger } from "@/components/ui/popover";
+import { ScrollArea } from "@/components/ui/scroll-area";
+import {
+ Select,
+ SelectContent,
+ SelectItem,
+ SelectTrigger,
+ SelectValue,
+} from "@/components/ui/select";
+import { Separator } from "@/components/ui/separator";
+import {
+ Table,
+ TableBody,
+ TableCell,
+ TableHead,
+ TableHeader,
+ TableRow,
+} from "@/components/ui/table";
+import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
+import { Textarea } from "@/components/ui/textarea";
+import {
+ type Invite,
+ type InviteCreate,
+ type Member,
+ type Role,
+ type RoleCreate,
+ useInvites,
+ useMembers,
+ usePermissions,
+ useRoles,
+ useUserAccess,
+} from "@/hooks/use-rbac";
+import { cn } from "@/lib/utils";
+
+// Animation variants
+const fadeInUp = {
+ hidden: { opacity: 0, y: 20 },
+ visible: { opacity: 1, y: 0, transition: { duration: 0.4, ease: "easeOut" } },
+};
+
+const staggerContainer = {
+ hidden: { opacity: 0 },
+ visible: {
+ opacity: 1,
+ transition: { staggerChildren: 0.1 },
+ },
+};
+
+const cardVariants = {
+ hidden: { opacity: 0, scale: 0.95 },
+ visible: {
+ opacity: 1,
+ scale: 1,
+ transition: { type: "spring", stiffness: 300, damping: 30 },
+ },
+};
+
+export default function TeamManagementPage() {
+ const router = useRouter();
+ const params = useParams();
+ const searchSpaceId = Number(params.search_space_id);
+ const [activeTab, setActiveTab] = useState("members");
+
+ const { access, loading: accessLoading, hasPermission } = useUserAccess(searchSpaceId);
+ const {
+ members,
+ loading: membersLoading,
+ fetchMembers,
+ updateMemberRole,
+ removeMember,
+ } = useMembers(searchSpaceId);
+ const {
+ roles,
+ loading: rolesLoading,
+ fetchRoles,
+ createRole,
+ updateRole,
+ deleteRole,
+ } = useRoles(searchSpaceId);
+ const {
+ invites,
+ loading: invitesLoading,
+ fetchInvites,
+ createInvite,
+ revokeInvite,
+ } = useInvites(searchSpaceId);
+ const { groupedPermissions, loading: permissionsLoading } = usePermissions();
+
+ const canManageMembers = hasPermission("members:view");
+ const canManageRoles = hasPermission("roles:read");
+ const canInvite = hasPermission("members:invite");
+
+ const handleRefresh = useCallback(async () => {
+ await Promise.all([fetchMembers(), fetchRoles(), fetchInvites()]);
+ toast.success("Team data refreshed");
+ }, [fetchMembers, fetchRoles, fetchInvites]);
+
+ if (accessLoading) {
+ return (
+
+
+
+ Loading team data...
+
+
+ );
+ }
+
+ return (
+
+
+
+ {/* Header */}
+
+
+
+
router.push(`/dashboard/${searchSpaceId}`)}
+ className="flex items-center justify-center h-10 w-10 rounded-lg bg-primary/10 hover:bg-primary/20 transition-colors"
+ aria-label="Back to Dashboard"
+ type="button"
+ >
+
+
+
+
+
+
+
+ Team Management
+
+
+ Manage members, roles, and invite links for your search space
+
+
+
+
+
+
+ Refresh
+
+
+
+
+
+
+ {/* Summary Cards */}
+
+
+
+
+
+ Total Members
+
+
+
+
+ {members.length}
+
+
+ {members.filter((m) => m.is_owner).length} owner
+ {members.filter((m) => m.is_owner).length !== 1 ? "s" : ""}
+
+
+
+
+
+
+
+
+
+ Active Roles
+
+
+
+
+ {roles.length}
+
+
+ {roles.filter((r) => r.is_system_role).length} system roles
+
+
+
+
+
+
+
+
+
+ Active Invites
+
+
+
+
+ {invites.filter((i) => i.is_active).length}
+
+
+ {invites.reduce((acc, i) => acc + i.uses_count, 0)} total uses
+
+
+
+
+
+
+ {/* Tabs Content */}
+
+
+
+
+
+ Members
+
+ {members.length}
+
+
+
+
+ Roles
+
+ {roles.length}
+
+
+
+
+ Invites
+
+ {invites.filter((i) => i.is_active).length}
+
+
+
+
+ {activeTab === "invites" && canInvite && (
+
+ )}
+ {activeTab === "roles" && hasPermission("roles:create") && (
+
+ )}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ );
+}
+
+// ============ Members Tab ============
+
+function MembersTab({
+ members,
+ roles,
+ loading,
+ onUpdateRole,
+ onRemoveMember,
+ canManageRoles,
+ canRemove,
+}: {
+ members: Member[];
+ roles: Role[];
+ loading: boolean;
+ onUpdateRole: (membershipId: number, roleId: number | null) => Promise;
+ onRemoveMember: (membershipId: number) => Promise;
+ canManageRoles: boolean;
+ canRemove: boolean;
+}) {
+ const [sorting, setSorting] = useState([]);
+ const [columnFilters, setColumnFilters] = useState([]);
+ const [searchQuery, setSearchQuery] = useState("");
+
+ const filteredMembers = useMemo(() => {
+ if (!searchQuery) return members;
+ const query = searchQuery.toLowerCase();
+ return members.filter(
+ (m) =>
+ m.user_email?.toLowerCase().includes(query) || m.role?.name.toLowerCase().includes(query)
+ );
+ }, [members, searchQuery]);
+
+ if (loading) {
+ return (
+
+
+
+ );
+ }
+
+ return (
+
+ {/* Search */}
+
+
+
+ setSearchQuery(e.target.value)}
+ className="pl-9"
+ />
+
+
+
+ {/* Members List */}
+
+
+
+
+ Member
+ Role
+ Joined
+ Actions
+
+
+
+ {filteredMembers.length === 0 ? (
+
+
+
+
+
+ ) : (
+ filteredMembers.map((member, index) => (
+
+
+
+
+
+
+
+ {member.is_owner && (
+
+
+
+ )}
+
+
+
{member.user_email || "Unknown"}
+ {member.is_owner && (
+
+ Owner
+
+ )}
+
+
+
+
+ {canManageRoles && !member.is_owner ? (
+
+ onUpdateRole(member.id, value === "none" ? null : Number(value))
+ }
+ >
+
+
+
+
+ No role
+ {roles.map((role) => (
+
+
+
+ {role.name}
+
+
+ ))}
+
+
+ ) : (
+
+
+ {member.role?.name || "No role"}
+
+ )}
+
+
+
+
+ {new Date(member.joined_at).toLocaleDateString()}
+
+
+
+ {canRemove && !member.is_owner && (
+
+
+
+
+
+
+
+
+ Remove member?
+
+ This will remove{" "}
+ {member.user_email} from this
+ search space. They will lose access to all resources.
+
+
+
+ Cancel
+ onRemoveMember(member.id)}
+ className="bg-destructive text-destructive-foreground hover:bg-destructive/90"
+ >
+ Remove
+
+
+
+
+ )}
+
+
+ ))
+ )}
+
+
+
+
+ );
+}
+
+// ============ Roles Tab ============
+
+function RolesTab({
+ roles,
+ groupedPermissions,
+ loading,
+ onUpdateRole,
+ onDeleteRole,
+ canUpdate,
+ canDelete,
+}: {
+ roles: Role[];
+ groupedPermissions: Record;
+ loading: boolean;
+ onUpdateRole: (roleId: number, data: { permissions?: string[] }) => Promise;
+ onDeleteRole: (roleId: number) => Promise;
+ canUpdate: boolean;
+ canDelete: boolean;
+}) {
+ if (loading) {
+ return (
+
+
+
+ );
+ }
+
+ return (
+
+ {roles.map((role, index) => (
+
+
+ {role.is_system_role && (
+
+ System Role
+
+ )}
+
+
+
+
+
+
+
+ {role.name}
+ {role.is_default && (
+
+ Default
+
+ )}
+
+
+ {!role.is_system_role && (
+
+
+
+
+
+
+
+ {canUpdate && (
+
+
+ Edit Role
+
+ )}
+ {canDelete && (
+ <>
+
+
+
+ e.preventDefault()}
+ >
+
+ Delete Role
+
+
+
+
+ Delete role?
+
+ This will permanently delete the "{role.name}" role. Members with
+ this role will lose their permissions.
+
+
+
+ Cancel
+ onDeleteRole(role.id)}
+ className="bg-destructive text-destructive-foreground hover:bg-destructive/90"
+ >
+ Delete
+
+
+
+
+ >
+ )}
+
+
+ )}
+
+ {role.description && (
+ {role.description}
+ )}
+
+
+
+
+ Permissions ({role.permissions.includes("*") ? "All" : role.permissions.length})
+
+
+ {role.permissions.includes("*") ? (
+
+ Full Access
+
+ ) : (
+ role.permissions.slice(0, 5).map((perm) => (
+
+ {perm.replace(":", " ")}
+
+ ))
+ )}
+ {!role.permissions.includes("*") && role.permissions.length > 5 && (
+
+ +{role.permissions.length - 5} more
+
+ )}
+
+
+
+
+
+ ))}
+
+ );
+}
+
+// ============ Invites Tab ============
+
+function InvitesTab({
+ invites,
+ loading,
+ onRevokeInvite,
+ canRevoke,
+}: {
+ invites: Invite[];
+ loading: boolean;
+ onRevokeInvite: (inviteId: number) => Promise;
+ canRevoke: boolean;
+}) {
+ const [copiedId, setCopiedId] = useState(null);
+
+ const copyInviteLink = useCallback((invite: Invite) => {
+ const link = `${window.location.origin}/invite/${invite.invite_code}`;
+ navigator.clipboard.writeText(link);
+ setCopiedId(invite.id);
+ toast.success("Invite link copied to clipboard");
+ setTimeout(() => setCopiedId(null), 2000);
+ }, []);
+
+ if (loading) {
+ return (
+
+
+
+ );
+ }
+
+ if (invites.length === 0) {
+ return (
+
+
+
+
+ No invite links
+
+ Create an invite link to allow others to join your search space with specific roles.
+
+
+ );
+ }
+
+ return (
+
+ {invites.map((invite, index) => {
+ const isExpired = invite.expires_at && new Date(invite.expires_at) < new Date();
+ const isMaxedOut = invite.max_uses && invite.uses_count >= invite.max_uses;
+ const isInactive = !invite.is_active || isExpired || isMaxedOut;
+
+ return (
+
+
+
+
+
+
+
+
+
+
+
{invite.name || "Unnamed Invite"}
+ {isExpired && (
+
+ Expired
+
+ )}
+ {isMaxedOut && (
+
+ Max uses reached
+
+ )}
+ {!invite.is_active && !isExpired && !isMaxedOut && (
+
+ Inactive
+
+ )}
+
+
+
+
+ {invite.role?.name || "Default role"}
+
+
+
+ {invite.uses_count} uses
+ {invite.max_uses && ` / ${invite.max_uses}`}
+
+ {invite.expires_at && (
+
+
+ {isExpired
+ ? "Expired"
+ : `Expires ${new Date(invite.expires_at).toLocaleDateString()}`}
+
+ )}
+
+
+
+
+
copyInviteLink(invite)}
+ disabled={isInactive}
+ >
+ {copiedId === invite.id ? (
+ <>
+
+ Copied!
+ >
+ ) : (
+ <>
+
+ Copy Link
+ >
+ )}
+
+ {canRevoke && (
+
+
+
+
+
+
+
+
+ Revoke invite?
+
+ This will permanently delete this invite link. Anyone with this link
+ will no longer be able to join.
+
+
+
+ Cancel
+ onRevokeInvite(invite.id)}
+ className="bg-destructive text-destructive-foreground hover:bg-destructive/90"
+ >
+ Revoke
+
+
+
+
+ )}
+
+
+
+
+
+ );
+ })}
+
+ );
+}
+
+// ============ Create Invite Dialog ============
+
+function CreateInviteDialog({
+ roles,
+ onCreateInvite,
+ searchSpaceId,
+}: {
+ roles: Role[];
+ onCreateInvite: (data: InviteCreate) => Promise;
+ searchSpaceId: number;
+}) {
+ const [open, setOpen] = useState(false);
+ const [creating, setCreating] = useState(false);
+ const [name, setName] = useState("");
+ const [roleId, setRoleId] = useState("");
+ const [maxUses, setMaxUses] = useState("");
+ const [expiresAt, setExpiresAt] = useState(undefined);
+ const [createdInvite, setCreatedInvite] = useState(null);
+ const [copiedLink, setCopiedLink] = useState(false);
+
+ const handleCreate = async () => {
+ setCreating(true);
+ try {
+ const data: InviteCreate = {};
+ if (name) data.name = name;
+ if (roleId && roleId !== "default") data.role_id = Number(roleId);
+ if (maxUses) data.max_uses = Number(maxUses);
+ if (expiresAt) data.expires_at = expiresAt.toISOString();
+
+ const invite = await onCreateInvite(data);
+ setCreatedInvite(invite);
+ } catch (error) {
+ console.error("Failed to create invite:", error);
+ } finally {
+ setCreating(false);
+ }
+ };
+
+ const handleClose = () => {
+ setOpen(false);
+ setName("");
+ setRoleId("");
+ setMaxUses("");
+ setExpiresAt(undefined);
+ setCreatedInvite(null);
+ setCopiedLink(false);
+ };
+
+ const copyLink = () => {
+ if (!createdInvite) return;
+ const link = `${window.location.origin}/invite/${createdInvite.invite_code}`;
+ navigator.clipboard.writeText(link);
+ setCopiedLink(true);
+ toast.success("Invite link copied to clipboard");
+ };
+
+ return (
+ (v ? setOpen(true) : handleClose())}>
+
+
+
+ Create Invite
+
+
+
+ {createdInvite ? (
+ <>
+
+
+
+ Invite Created!
+
+
+ Share this link to invite people to your search space.
+
+
+
+
+
+ {window.location.origin}/invite/{createdInvite.invite_code}
+
+
+ {copiedLink ? (
+
+ ) : (
+
+ )}
+
+
+
+
+
+ {createdInvite.role?.name || "Default role"}
+
+ {createdInvite.max_uses && (
+
+
+ Max {createdInvite.max_uses} uses
+
+ )}
+ {createdInvite.expires_at && (
+
+
+ Expires {new Date(createdInvite.expires_at).toLocaleDateString()}
+
+ )}
+
+
+
+ Done
+
+ >
+ ) : (
+ <>
+
+ Create Invite Link
+
+ Create a link to invite people to this search space.
+
+
+
+
+ Name (optional)
+ setName(e.target.value)}
+ />
+
+
+
Role
+
+
+
+
+
+ Default role (Viewer)
+ {roles
+ .filter((r) => r.name !== "Owner")
+ .map((role) => (
+
+
+
+ {role.name}
+
+
+ ))}
+
+
+
+
+
+ Max uses (optional)
+ setMaxUses(e.target.value)}
+ />
+
+
+
Expires on (optional)
+
+
+
+
+ {expiresAt ? expiresAt.toLocaleDateString() : "Never"}
+
+
+
+ date < new Date()}
+ initialFocus
+ />
+
+
+
+
+
+
+
+ Cancel
+
+
+ {creating ? (
+ <>
+
+ Creating...
+ >
+ ) : (
+ "Create Invite"
+ )}
+
+
+ >
+ )}
+
+
+ );
+}
+
+// ============ Create Role Dialog ============
+
+function CreateRoleDialog({
+ groupedPermissions,
+ onCreateRole,
+}: {
+ groupedPermissions: Record;
+ onCreateRole: (data: RoleCreate) => Promise;
+}) {
+ const [open, setOpen] = useState(false);
+ const [creating, setCreating] = useState(false);
+ const [name, setName] = useState("");
+ const [description, setDescription] = useState("");
+ const [selectedPermissions, setSelectedPermissions] = useState([]);
+ const [isDefault, setIsDefault] = useState(false);
+
+ const handleCreate = async () => {
+ if (!name.trim()) {
+ toast.error("Please enter a role name");
+ return;
+ }
+
+ setCreating(true);
+ try {
+ await onCreateRole({
+ name: name.trim(),
+ description: description.trim() || undefined,
+ permissions: selectedPermissions,
+ is_default: isDefault,
+ });
+ setOpen(false);
+ setName("");
+ setDescription("");
+ setSelectedPermissions([]);
+ setIsDefault(false);
+ } catch (error) {
+ console.error("Failed to create role:", error);
+ } finally {
+ setCreating(false);
+ }
+ };
+
+ const togglePermission = (perm: string) => {
+ setSelectedPermissions((prev) =>
+ prev.includes(perm) ? prev.filter((p) => p !== perm) : [...prev, perm]
+ );
+ };
+
+ const toggleCategory = (category: string) => {
+ const categoryPerms = groupedPermissions[category]?.map((p) => p.value) || [];
+ const allSelected = categoryPerms.every((p) => selectedPermissions.includes(p));
+
+ if (allSelected) {
+ setSelectedPermissions((prev) => prev.filter((p) => !categoryPerms.includes(p)));
+ } else {
+ setSelectedPermissions((prev) => [...new Set([...prev, ...categoryPerms])]);
+ }
+ };
+
+ return (
+
+
+
+
+ Create Role
+
+
+
+
+ Create Custom Role
+
+ Define a new role with specific permissions for this search space.
+
+
+
+
+
+ Role Name *
+ setName(e.target.value)}
+ />
+
+
+
+ setIsDefault(!!v)} />
+ Set as default role
+
+
+ New invites without a role will use this
+
+
+
+
+ Description
+
+
+
Permissions ({selectedPermissions.length} selected)
+
+
+ {Object.entries(groupedPermissions).map(([category, perms]) => {
+ const categorySelected = perms.filter((p) =>
+ selectedPermissions.includes(p.value)
+ ).length;
+ const allSelected = categorySelected === perms.length;
+
+ return (
+
+
toggleCategory(category)}
+ >
+ toggleCategory(category)}
+ />
+
+ {category} ({categorySelected}/{perms.length})
+
+
+
+ {perms.map((perm) => (
+
togglePermission(perm.value)}
+ >
+ togglePermission(perm.value)}
+ />
+
+ {perm.value.split(":")[1]}
+
+
+ ))}
+
+
+ );
+ })}
+
+
+
+
+
+ setOpen(false)}>
+ Cancel
+
+
+ {creating ? (
+ <>
+
+ Creating...
+ >
+ ) : (
+ "Create Role"
+ )}
+
+
+
+
+ );
+}
diff --git a/surfsense_web/app/dashboard/page.tsx b/surfsense_web/app/dashboard/page.tsx
index d61e714c6..0910d0b44 100644
--- a/surfsense_web/app/dashboard/page.tsx
+++ b/surfsense_web/app/dashboard/page.tsx
@@ -1,6 +1,6 @@
"use client";
-import { AlertCircle, Loader2, Plus, Search, Trash2 } from "lucide-react";
+import { AlertCircle, Loader2, Plus, Search, Trash2, UserCheck, Users } from "lucide-react";
import { motion, type Variants } from "motion/react";
import Image from "next/image";
import Link from "next/link";
@@ -22,6 +22,7 @@ import {
AlertDialogTitle,
AlertDialogTrigger,
} from "@/components/ui/alert-dialog";
+import { Badge } from "@/components/ui/badge";
import { Button } from "@/components/ui/button";
import {
Card,
@@ -308,16 +309,30 @@ const DashboardPage = () => {
>
-
{space.name}
+
+
{space.name}
+ {!space.is_owner && (
+
+ {t("shared")}
+
+ )}
+
{space.description}
-
- {/*
{space.title} */}
+
{t("created")} {formatDate(space.created_at)}
+
+ {space.is_owner ? (
+
+ ) : (
+
+ )}
+ {space.member_count}
+
diff --git a/surfsense_web/app/invite/[invite_code]/page.tsx b/surfsense_web/app/invite/[invite_code]/page.tsx
new file mode 100644
index 000000000..1a4600483
--- /dev/null
+++ b/surfsense_web/app/invite/[invite_code]/page.tsx
@@ -0,0 +1,336 @@
+"use client";
+
+import {
+ AlertCircle,
+ ArrowRight,
+ CheckCircle2,
+ Clock,
+ Loader2,
+ LogIn,
+ Shield,
+ Sparkles,
+ Users,
+ XCircle,
+} from "lucide-react";
+import { motion } from "motion/react";
+import Image from "next/image";
+import Link from "next/link";
+import { useParams, useRouter } from "next/navigation";
+import { use, useEffect, useState } from "react";
+import { Button } from "@/components/ui/button";
+import {
+ Card,
+ CardContent,
+ CardDescription,
+ CardFooter,
+ CardHeader,
+ CardTitle,
+} from "@/components/ui/card";
+import { useInviteInfo } from "@/hooks/use-rbac";
+
+export default function InviteAcceptPage() {
+ const params = useParams();
+ const router = useRouter();
+ const inviteCode = params.invite_code as string;
+
+ const { inviteInfo, loading, acceptInvite } = useInviteInfo(inviteCode);
+ const [accepting, setAccepting] = useState(false);
+ const [accepted, setAccepted] = useState(false);
+ const [acceptedData, setAcceptedData] = useState<{
+ search_space_id: number;
+ search_space_name: string;
+ role_name: string;
+ } | null>(null);
+ const [error, setError] = useState
(null);
+ const [isLoggedIn, setIsLoggedIn] = useState(null);
+
+ // Check if user is logged in
+ useEffect(() => {
+ if (typeof window !== "undefined") {
+ const token = localStorage.getItem("surfsense_bearer_token");
+ setIsLoggedIn(!!token);
+ }
+ }, []);
+
+ const handleAccept = async () => {
+ setAccepting(true);
+ setError(null);
+ try {
+ const result = await acceptInvite();
+ if (result) {
+ setAccepted(true);
+ setAcceptedData(result);
+ }
+ } catch (err: any) {
+ setError(err.message || "Failed to accept invite");
+ } finally {
+ setAccepting(false);
+ }
+ };
+
+ const handleLoginRedirect = () => {
+ // Store the invite code to redirect back after login
+ localStorage.setItem("pending_invite_code", inviteCode);
+ router.push("/auth");
+ };
+
+ // Check for pending invite after login
+ useEffect(() => {
+ if (isLoggedIn && typeof window !== "undefined") {
+ const pendingInvite = localStorage.getItem("pending_invite_code");
+ if (pendingInvite === inviteCode) {
+ localStorage.removeItem("pending_invite_code");
+ // Auto-accept the invite after redirect
+ handleAccept();
+ }
+ }
+ }, [isLoggedIn, inviteCode]);
+
+ return (
+
+ {/* Background decoration */}
+
+
+
+
+ {loading || isLoggedIn === null ? (
+
+
+
+
+ Loading invite details...
+
+ ) : accepted && acceptedData ? (
+ <>
+
+
+
+
+ Welcome to the team!
+
+ You've successfully joined {acceptedData.search_space_name}
+
+
+
+
+
+
+
+
+
+
{acceptedData.search_space_name}
+
Search Space
+
+
+
+
+
+
+
+
{acceptedData.role_name}
+
Your Role
+
+
+
+
+
+ router.push(`/dashboard/${acceptedData.search_space_id}`)}
+ >
+ Go to Search Space
+
+
+
+ >
+ ) : !inviteInfo?.is_valid ? (
+ <>
+
+
+
+
+ Invalid Invite
+
+ {inviteInfo?.message || "This invite link is no longer valid"}
+
+
+
+
+ The invite may have expired, reached its maximum uses, or been revoked by the
+ owner.
+
+
+
+ router.push("/dashboard")}
+ >
+ Go to Dashboard
+
+
+ >
+ ) : !isLoggedIn ? (
+ <>
+
+
+
+
+ You're Invited!
+
+ Sign in to join {inviteInfo?.search_space_name || "this search space"}
+
+
+
+
+
+
+
+
+
+
{inviteInfo?.search_space_name}
+
Search Space
+
+
+ {inviteInfo?.role_name && (
+
+
+
+
+
+
{inviteInfo.role_name}
+
Role you'll receive
+
+
+ )}
+
+
+
+
+
+ Sign in to Accept
+
+
+ >
+ ) : (
+ <>
+
+
+
+
+ You're Invited!
+
+ Accept this invite to join {inviteInfo?.search_space_name || "this search space"}
+
+
+
+
+
+
+
+
+
+
{inviteInfo?.search_space_name}
+
Search Space
+
+
+ {inviteInfo?.role_name && (
+
+
+
+
+
+
{inviteInfo.role_name}
+
Role you'll receive
+
+
+ )}
+
+
+ {error && (
+
+
+ {error}
+
+ )}
+
+
+ router.push("/dashboard")}
+ >
+ Cancel
+
+
+ {accepting ? (
+ <>
+
+ Accepting...
+ >
+ ) : (
+ <>
+
+ Accept Invite
+ >
+ )}
+
+
+ >
+ )}
+
+
+ {/* Branding */}
+
+
+
+ SurfSense
+
+
+
+
+ );
+}
diff --git a/surfsense_web/components/sidebar/app-sidebar.tsx b/surfsense_web/components/sidebar/app-sidebar.tsx
index c06a166ec..e15b83ec6 100644
--- a/surfsense_web/components/sidebar/app-sidebar.tsx
+++ b/surfsense_web/components/sidebar/app-sidebar.tsx
@@ -17,6 +17,7 @@ import {
SquareTerminal,
Trash2,
Undo2,
+ Users,
} from "lucide-react";
import Image from "next/image";
import Link from "next/link";
@@ -54,6 +55,7 @@ export const iconMap: Record = {
Trash2,
Podcast,
FileText,
+ Users,
};
const defaultData = {
diff --git a/surfsense_web/components/sidebar/nav-main.tsx b/surfsense_web/components/sidebar/nav-main.tsx
index 27d6d9fb7..274d77b33 100644
--- a/surfsense_web/components/sidebar/nav-main.tsx
+++ b/surfsense_web/components/sidebar/nav-main.tsx
@@ -43,6 +43,7 @@ export function NavMain({ items }: { items: NavItem[] }) {
Podcasts: "podcasts",
Logs: "logs",
Platform: "platform",
+ Team: "team",
};
const key = titleMap[title];
diff --git a/surfsense_web/hooks/index.ts b/surfsense_web/hooks/index.ts
index 546df5407..a244609a2 100644
--- a/surfsense_web/hooks/index.ts
+++ b/surfsense_web/hooks/index.ts
@@ -1,5 +1,6 @@
export * from "./use-document-by-chunk";
export * from "./use-logs";
+export * from "./use-rbac";
export * from "./use-search-source-connectors";
export * from "./use-search-space";
export * from "./use-user";
diff --git a/surfsense_web/hooks/use-rbac.ts b/surfsense_web/hooks/use-rbac.ts
new file mode 100644
index 000000000..6033f887f
--- /dev/null
+++ b/surfsense_web/hooks/use-rbac.ts
@@ -0,0 +1,773 @@
+"use client";
+
+import { useCallback, useEffect, useMemo, useState } from "react";
+import { toast } from "sonner";
+
+// ============ Types ============
+
+export interface Role {
+ id: number;
+ name: string;
+ description: string | null;
+ permissions: string[];
+ is_default: boolean;
+ is_system_role: boolean;
+ search_space_id: number;
+ created_at: string;
+}
+
+export interface Member {
+ id: number;
+ user_id: string;
+ search_space_id: number;
+ role_id: number | null;
+ is_owner: boolean;
+ joined_at: string;
+ created_at: string;
+ role: Role | null;
+ user_email: string | null;
+}
+
+export interface Invite {
+ id: number;
+ invite_code: string;
+ search_space_id: number;
+ role_id: number | null;
+ created_by_id: string | null;
+ expires_at: string | null;
+ max_uses: number | null;
+ uses_count: number;
+ is_active: boolean;
+ name: string | null;
+ created_at: string;
+ role: Role | null;
+}
+
+export interface InviteCreate {
+ name?: string;
+ role_id?: number;
+ expires_at?: string;
+ max_uses?: number;
+}
+
+export interface InviteUpdate {
+ name?: string;
+ role_id?: number;
+ expires_at?: string;
+ max_uses?: number;
+ is_active?: boolean;
+}
+
+export interface RoleCreate {
+ name: string;
+ description?: string;
+ permissions: string[];
+ is_default?: boolean;
+}
+
+export interface RoleUpdate {
+ name?: string;
+ description?: string;
+ permissions?: string[];
+ is_default?: boolean;
+}
+
+export interface PermissionInfo {
+ value: string;
+ name: string;
+ category: string;
+}
+
+export interface UserAccess {
+ search_space_id: number;
+ search_space_name: string;
+ is_owner: boolean;
+ role_name: string | null;
+ permissions: string[];
+}
+
+export interface InviteInfo {
+ search_space_name: string;
+ role_name: string | null;
+ is_valid: boolean;
+ message: string | null;
+}
+
+// ============ Members Hook ============
+
+export function useMembers(searchSpaceId: number) {
+ const [members, setMembers] = useState([]);
+ const [loading, setLoading] = useState(true);
+ const [error, setError] = useState(null);
+
+ const fetchMembers = useCallback(async () => {
+ if (!searchSpaceId) return;
+
+ try {
+ setLoading(true);
+ const response = await fetch(
+ `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/members`,
+ {
+ headers: {
+ Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
+ },
+ method: "GET",
+ }
+ );
+
+ if (response.status === 401) {
+ localStorage.removeItem("surfsense_bearer_token");
+ window.location.href = "/";
+ throw new Error("Unauthorized");
+ }
+
+ if (!response.ok) {
+ const errorData = await response.json().catch(() => ({}));
+ throw new Error(errorData.detail || "Failed to fetch members");
+ }
+
+ const data = await response.json();
+ setMembers(data);
+ setError(null);
+ return data;
+ } catch (err: any) {
+ setError(err.message || "Failed to fetch members");
+ console.error("Error fetching members:", err);
+ } finally {
+ setLoading(false);
+ }
+ }, [searchSpaceId]);
+
+ useEffect(() => {
+ fetchMembers();
+ }, [fetchMembers]);
+
+ const updateMemberRole = useCallback(
+ async (membershipId: number, roleId: number | null) => {
+ try {
+ const response = await fetch(
+ `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/members/${membershipId}`,
+ {
+ headers: {
+ "Content-Type": "application/json",
+ Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
+ },
+ method: "PUT",
+ body: JSON.stringify({ role_id: roleId }),
+ }
+ );
+
+ if (!response.ok) {
+ const errorData = await response.json().catch(() => ({}));
+ throw new Error(errorData.detail || "Failed to update member role");
+ }
+
+ const updatedMember = await response.json();
+ setMembers((prev) => prev.map((m) => (m.id === membershipId ? updatedMember : m)));
+ toast.success("Member role updated successfully");
+ return updatedMember;
+ } catch (err: any) {
+ toast.error(err.message || "Failed to update member role");
+ throw err;
+ }
+ },
+ [searchSpaceId]
+ );
+
+ const removeMember = useCallback(
+ async (membershipId: number) => {
+ try {
+ const response = await fetch(
+ `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/members/${membershipId}`,
+ {
+ headers: {
+ Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
+ },
+ method: "DELETE",
+ }
+ );
+
+ if (!response.ok) {
+ const errorData = await response.json().catch(() => ({}));
+ throw new Error(errorData.detail || "Failed to remove member");
+ }
+
+ setMembers((prev) => prev.filter((m) => m.id !== membershipId));
+ toast.success("Member removed successfully");
+ return true;
+ } catch (err: any) {
+ toast.error(err.message || "Failed to remove member");
+ return false;
+ }
+ },
+ [searchSpaceId]
+ );
+
+ const leaveSearchSpace = useCallback(async () => {
+ try {
+ const response = await fetch(
+ `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/members/me`,
+ {
+ headers: {
+ Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
+ },
+ method: "DELETE",
+ }
+ );
+
+ if (!response.ok) {
+ const errorData = await response.json().catch(() => ({}));
+ throw new Error(errorData.detail || "Failed to leave search space");
+ }
+
+ toast.success("Successfully left the search space");
+ return true;
+ } catch (err: any) {
+ toast.error(err.message || "Failed to leave search space");
+ return false;
+ }
+ }, [searchSpaceId]);
+
+ return {
+ members,
+ loading,
+ error,
+ fetchMembers,
+ updateMemberRole,
+ removeMember,
+ leaveSearchSpace,
+ };
+}
+
+// ============ Roles Hook ============
+
+export function useRoles(searchSpaceId: number) {
+ const [roles, setRoles] = useState([]);
+ const [loading, setLoading] = useState(true);
+ const [error, setError] = useState(null);
+
+ const fetchRoles = useCallback(async () => {
+ if (!searchSpaceId) return;
+
+ try {
+ setLoading(true);
+ const response = await fetch(
+ `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/roles`,
+ {
+ headers: {
+ Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
+ },
+ method: "GET",
+ }
+ );
+
+ if (response.status === 401) {
+ localStorage.removeItem("surfsense_bearer_token");
+ window.location.href = "/";
+ throw new Error("Unauthorized");
+ }
+
+ if (!response.ok) {
+ const errorData = await response.json().catch(() => ({}));
+ throw new Error(errorData.detail || "Failed to fetch roles");
+ }
+
+ const data = await response.json();
+ setRoles(data);
+ setError(null);
+ return data;
+ } catch (err: any) {
+ setError(err.message || "Failed to fetch roles");
+ console.error("Error fetching roles:", err);
+ } finally {
+ setLoading(false);
+ }
+ }, [searchSpaceId]);
+
+ useEffect(() => {
+ fetchRoles();
+ }, [fetchRoles]);
+
+ const createRole = useCallback(
+ async (roleData: RoleCreate) => {
+ try {
+ const response = await fetch(
+ `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/roles`,
+ {
+ headers: {
+ "Content-Type": "application/json",
+ Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
+ },
+ method: "POST",
+ body: JSON.stringify(roleData),
+ }
+ );
+
+ if (!response.ok) {
+ const errorData = await response.json().catch(() => ({}));
+ throw new Error(errorData.detail || "Failed to create role");
+ }
+
+ const newRole = await response.json();
+ setRoles((prev) => [...prev, newRole]);
+ toast.success("Role created successfully");
+ return newRole;
+ } catch (err: any) {
+ toast.error(err.message || "Failed to create role");
+ throw err;
+ }
+ },
+ [searchSpaceId]
+ );
+
+ const updateRole = useCallback(
+ async (roleId: number, roleData: RoleUpdate) => {
+ try {
+ const response = await fetch(
+ `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/roles/${roleId}`,
+ {
+ headers: {
+ "Content-Type": "application/json",
+ Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
+ },
+ method: "PUT",
+ body: JSON.stringify(roleData),
+ }
+ );
+
+ if (!response.ok) {
+ const errorData = await response.json().catch(() => ({}));
+ throw new Error(errorData.detail || "Failed to update role");
+ }
+
+ const updatedRole = await response.json();
+ setRoles((prev) => prev.map((r) => (r.id === roleId ? updatedRole : r)));
+ toast.success("Role updated successfully");
+ return updatedRole;
+ } catch (err: any) {
+ toast.error(err.message || "Failed to update role");
+ throw err;
+ }
+ },
+ [searchSpaceId]
+ );
+
+ const deleteRole = useCallback(
+ async (roleId: number) => {
+ try {
+ const response = await fetch(
+ `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/roles/${roleId}`,
+ {
+ headers: {
+ Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
+ },
+ method: "DELETE",
+ }
+ );
+
+ if (!response.ok) {
+ const errorData = await response.json().catch(() => ({}));
+ throw new Error(errorData.detail || "Failed to delete role");
+ }
+
+ setRoles((prev) => prev.filter((r) => r.id !== roleId));
+ toast.success("Role deleted successfully");
+ return true;
+ } catch (err: any) {
+ toast.error(err.message || "Failed to delete role");
+ return false;
+ }
+ },
+ [searchSpaceId]
+ );
+
+ return {
+ roles,
+ loading,
+ error,
+ fetchRoles,
+ createRole,
+ updateRole,
+ deleteRole,
+ };
+}
+
+// ============ Invites Hook ============
+
+export function useInvites(searchSpaceId: number) {
+ const [invites, setInvites] = useState([]);
+ const [loading, setLoading] = useState(true);
+ const [error, setError] = useState(null);
+
+ const fetchInvites = useCallback(async () => {
+ if (!searchSpaceId) return;
+
+ try {
+ setLoading(true);
+ const response = await fetch(
+ `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/invites`,
+ {
+ headers: {
+ Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
+ },
+ method: "GET",
+ }
+ );
+
+ if (response.status === 401) {
+ localStorage.removeItem("surfsense_bearer_token");
+ window.location.href = "/";
+ throw new Error("Unauthorized");
+ }
+
+ if (!response.ok) {
+ const errorData = await response.json().catch(() => ({}));
+ throw new Error(errorData.detail || "Failed to fetch invites");
+ }
+
+ const data = await response.json();
+ setInvites(data);
+ setError(null);
+ return data;
+ } catch (err: any) {
+ setError(err.message || "Failed to fetch invites");
+ console.error("Error fetching invites:", err);
+ } finally {
+ setLoading(false);
+ }
+ }, [searchSpaceId]);
+
+ useEffect(() => {
+ fetchInvites();
+ }, [fetchInvites]);
+
+ const createInvite = useCallback(
+ async (inviteData: InviteCreate) => {
+ try {
+ const response = await fetch(
+ `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/invites`,
+ {
+ headers: {
+ "Content-Type": "application/json",
+ Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
+ },
+ method: "POST",
+ body: JSON.stringify(inviteData),
+ }
+ );
+
+ if (!response.ok) {
+ const errorData = await response.json().catch(() => ({}));
+ throw new Error(errorData.detail || "Failed to create invite");
+ }
+
+ const newInvite = await response.json();
+ setInvites((prev) => [...prev, newInvite]);
+ toast.success("Invite created successfully");
+ return newInvite;
+ } catch (err: any) {
+ toast.error(err.message || "Failed to create invite");
+ throw err;
+ }
+ },
+ [searchSpaceId]
+ );
+
+ const updateInvite = useCallback(
+ async (inviteId: number, inviteData: InviteUpdate) => {
+ try {
+ const response = await fetch(
+ `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/invites/${inviteId}`,
+ {
+ headers: {
+ "Content-Type": "application/json",
+ Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
+ },
+ method: "PUT",
+ body: JSON.stringify(inviteData),
+ }
+ );
+
+ if (!response.ok) {
+ const errorData = await response.json().catch(() => ({}));
+ throw new Error(errorData.detail || "Failed to update invite");
+ }
+
+ const updatedInvite = await response.json();
+ setInvites((prev) => prev.map((i) => (i.id === inviteId ? updatedInvite : i)));
+ toast.success("Invite updated successfully");
+ return updatedInvite;
+ } catch (err: any) {
+ toast.error(err.message || "Failed to update invite");
+ throw err;
+ }
+ },
+ [searchSpaceId]
+ );
+
+ const revokeInvite = useCallback(
+ async (inviteId: number) => {
+ try {
+ const response = await fetch(
+ `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/invites/${inviteId}`,
+ {
+ headers: {
+ Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
+ },
+ method: "DELETE",
+ }
+ );
+
+ if (!response.ok) {
+ const errorData = await response.json().catch(() => ({}));
+ throw new Error(errorData.detail || "Failed to revoke invite");
+ }
+
+ setInvites((prev) => prev.filter((i) => i.id !== inviteId));
+ toast.success("Invite revoked successfully");
+ return true;
+ } catch (err: any) {
+ toast.error(err.message || "Failed to revoke invite");
+ return false;
+ }
+ },
+ [searchSpaceId]
+ );
+
+ return {
+ invites,
+ loading,
+ error,
+ fetchInvites,
+ createInvite,
+ updateInvite,
+ revokeInvite,
+ };
+}
+
+// ============ Permissions Hook ============
+
+export function usePermissions() {
+ const [permissions, setPermissions] = useState([]);
+ const [loading, setLoading] = useState(true);
+ const [error, setError] = useState(null);
+
+ const fetchPermissions = useCallback(async () => {
+ try {
+ setLoading(true);
+ const response = await fetch(
+ `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/permissions`,
+ {
+ headers: {
+ Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
+ },
+ method: "GET",
+ }
+ );
+
+ if (!response.ok) {
+ const errorData = await response.json().catch(() => ({}));
+ throw new Error(errorData.detail || "Failed to fetch permissions");
+ }
+
+ const data = await response.json();
+ setPermissions(data.permissions);
+ setError(null);
+ return data.permissions;
+ } catch (err: any) {
+ setError(err.message || "Failed to fetch permissions");
+ console.error("Error fetching permissions:", err);
+ } finally {
+ setLoading(false);
+ }
+ }, []);
+
+ useEffect(() => {
+ fetchPermissions();
+ }, [fetchPermissions]);
+
+ // Group permissions by category
+ const groupedPermissions = useMemo(() => {
+ const groups: Record = {};
+ for (const perm of permissions) {
+ if (!groups[perm.category]) {
+ groups[perm.category] = [];
+ }
+ groups[perm.category].push(perm);
+ }
+ return groups;
+ }, [permissions]);
+
+ return {
+ permissions,
+ groupedPermissions,
+ loading,
+ error,
+ fetchPermissions,
+ };
+}
+
+// ============ User Access Hook ============
+
+export function useUserAccess(searchSpaceId: number) {
+ const [access, setAccess] = useState(null);
+ const [loading, setLoading] = useState(true);
+ const [error, setError] = useState(null);
+
+ const fetchAccess = useCallback(async () => {
+ if (!searchSpaceId) return;
+
+ try {
+ setLoading(true);
+ const response = await fetch(
+ `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/my-access`,
+ {
+ headers: {
+ Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
+ },
+ method: "GET",
+ }
+ );
+
+ if (response.status === 401) {
+ localStorage.removeItem("surfsense_bearer_token");
+ window.location.href = "/";
+ throw new Error("Unauthorized");
+ }
+
+ if (!response.ok) {
+ const errorData = await response.json().catch(() => ({}));
+ throw new Error(errorData.detail || "Failed to fetch access info");
+ }
+
+ const data = await response.json();
+ setAccess(data);
+ setError(null);
+ return data;
+ } catch (err: any) {
+ setError(err.message || "Failed to fetch access info");
+ console.error("Error fetching access:", err);
+ } finally {
+ setLoading(false);
+ }
+ }, [searchSpaceId]);
+
+ useEffect(() => {
+ fetchAccess();
+ }, [fetchAccess]);
+
+ // Helper function to check if user has a specific permission
+ const hasPermission = useCallback(
+ (permission: string) => {
+ if (!access) return false;
+ // Owner/full access check
+ if (access.permissions.includes("*")) return true;
+ return access.permissions.includes(permission);
+ },
+ [access]
+ );
+
+ // Helper function to check if user has any of the given permissions
+ const hasAnyPermission = useCallback(
+ (permissions: string[]) => {
+ if (!access) return false;
+ if (access.permissions.includes("*")) return true;
+ return permissions.some((p) => access.permissions.includes(p));
+ },
+ [access]
+ );
+
+ return {
+ access,
+ loading,
+ error,
+ fetchAccess,
+ hasPermission,
+ hasAnyPermission,
+ };
+}
+
+// ============ Invite Info Hook (Public) ============
+
+export function useInviteInfo(inviteCode: string | null) {
+ const [inviteInfo, setInviteInfo] = useState(null);
+ const [loading, setLoading] = useState(true);
+ const [error, setError] = useState(null);
+
+ const fetchInviteInfo = useCallback(async () => {
+ if (!inviteCode) {
+ setLoading(false);
+ return;
+ }
+
+ try {
+ setLoading(true);
+ const response = await fetch(
+ `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/invites/${inviteCode}/info`,
+ {
+ method: "GET",
+ }
+ );
+
+ if (!response.ok) {
+ const errorData = await response.json().catch(() => ({}));
+ throw new Error(errorData.detail || "Failed to fetch invite info");
+ }
+
+ const data = await response.json();
+ setInviteInfo(data);
+ setError(null);
+ return data;
+ } catch (err: any) {
+ setError(err.message || "Failed to fetch invite info");
+ console.error("Error fetching invite info:", err);
+ } finally {
+ setLoading(false);
+ }
+ }, [inviteCode]);
+
+ useEffect(() => {
+ fetchInviteInfo();
+ }, [fetchInviteInfo]);
+
+ const acceptInvite = useCallback(async () => {
+ if (!inviteCode) {
+ toast.error("No invite code provided");
+ return null;
+ }
+
+ try {
+ const response = await fetch(
+ `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/invites/accept`,
+ {
+ headers: {
+ "Content-Type": "application/json",
+ Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
+ },
+ method: "POST",
+ body: JSON.stringify({ invite_code: inviteCode }),
+ }
+ );
+
+ if (!response.ok) {
+ const errorData = await response.json().catch(() => ({}));
+ throw new Error(errorData.detail || "Failed to accept invite");
+ }
+
+ const data = await response.json();
+ toast.success(data.message || "Successfully joined the search space");
+ return data;
+ } catch (err: any) {
+ toast.error(err.message || "Failed to accept invite");
+ throw err;
+ }
+ }, [inviteCode]);
+
+ return {
+ inviteInfo,
+ loading,
+ error,
+ fetchInviteInfo,
+ acceptInvite,
+ };
+}
diff --git a/surfsense_web/hooks/use-search-spaces.ts b/surfsense_web/hooks/use-search-spaces.ts
index 7d9819d23..f69144081 100644
--- a/surfsense_web/hooks/use-search-spaces.ts
+++ b/surfsense_web/hooks/use-search-spaces.ts
@@ -10,6 +10,8 @@ interface SearchSpace {
created_at: string;
citations_enabled: boolean;
qna_custom_instructions: string | null;
+ member_count: number;
+ is_owner: boolean;
}
export function useSearchSpaces() {
diff --git a/surfsense_web/messages/en.json b/surfsense_web/messages/en.json
index 758e558b3..45663859d 100644
--- a/surfsense_web/messages/en.json
+++ b/surfsense_web/messages/en.json
@@ -103,6 +103,7 @@
"surfsense_dashboard": "SurfSense Dashboard",
"welcome_message": "Welcome to your SurfSense dashboard.",
"your_search_spaces": "Your Search Spaces",
+ "shared": "Shared",
"create_search_space": "Create Search Space",
"add_new_search_space": "Add New Search Space",
"loading": "Loading",
@@ -149,7 +150,8 @@
"podcasts": "Podcasts",
"logs": "Logs",
"all_search_spaces": "All Search Spaces",
- "chat": "Chat"
+ "chat": "Chat",
+ "team": "Team"
},
"pricing": {
"title": "SurfSense Pricing",
diff --git a/surfsense_web/messages/zh.json b/surfsense_web/messages/zh.json
index 857649b80..809b654a5 100644
--- a/surfsense_web/messages/zh.json
+++ b/surfsense_web/messages/zh.json
@@ -103,6 +103,7 @@
"surfsense_dashboard": "SurfSense 仪表盘",
"welcome_message": "欢迎来到您的 SurfSense 仪表盘。",
"your_search_spaces": "您的搜索空间",
+ "shared": "共享",
"create_search_space": "创建搜索空间",
"add_new_search_space": "添加新的搜索空间",
"loading": "加载中",
@@ -149,7 +150,8 @@
"podcasts": "播客",
"logs": "日志",
"all_search_spaces": "所有搜索空间",
- "chat": "聊天"
+ "chat": "聊天",
+ "team": "团队"
},
"pricing": {
"title": "SurfSense 定价",
From 7e94413814cb434f39874c36269f919e41e3a17c Mon Sep 17 00:00:00 2001
From: "DESKTOP-RTLN3BA\\$punk"
Date: Thu, 27 Nov 2025 23:01:14 -0800
Subject: [PATCH 06/36] chore: biome fixes
---
.../dashboard/[search_space_id]/team/page.tsx | 22 +++++++++----------
1 file changed, 11 insertions(+), 11 deletions(-)
diff --git a/surfsense_web/app/dashboard/[search_space_id]/team/page.tsx b/surfsense_web/app/dashboard/[search_space_id]/team/page.tsx
index 5f9143a83..dd3f25218 100644
--- a/surfsense_web/app/dashboard/[search_space_id]/team/page.tsx
+++ b/surfsense_web/app/dashboard/[search_space_id]/team/page.tsx
@@ -1268,33 +1268,33 @@ function CreateRoleDialog({
return (
-
toggleCategory(category)}
>
toggleCategory(category)}
/>
-
+
{category} ({categorySelected}/{perms.length})
-
-
+
+
{perms.map((perm) => (
-
togglePermission(perm.value)}
>
togglePermission(perm.value)}
/>
-
- {perm.value.split(":")[1]}
-
-
+
{perm.value.split(":")[1]}
+
))}
From 55982a543908304459eec126933b00ec7e3fd50c Mon Sep 17 00:00:00 2001
From: "DESKTOP-RTLN3BA\\$punk"
Date: Thu, 27 Nov 2025 23:25:43 -0800
Subject: [PATCH 07/36] fix: address biome-related issues and improve stability
---
...backfill_rbac_for_existing_searchspaces.py | 179 ++++++++++++++++++
1 file changed, 179 insertions(+)
create mode 100644 surfsense_backend/alembic/versions/41_backfill_rbac_for_existing_searchspaces.py
diff --git a/surfsense_backend/alembic/versions/41_backfill_rbac_for_existing_searchspaces.py b/surfsense_backend/alembic/versions/41_backfill_rbac_for_existing_searchspaces.py
new file mode 100644
index 000000000..970f4b256
--- /dev/null
+++ b/surfsense_backend/alembic/versions/41_backfill_rbac_for_existing_searchspaces.py
@@ -0,0 +1,179 @@
+"""Backfill RBAC data for existing search spaces
+
+Revision ID: 41
+Revises: 40
+Create Date: 2025-11-28
+
+This migration creates default roles and owner memberships for all existing
+search spaces that were created before the RBAC system was implemented.
+"""
+
+import sqlalchemy as sa
+
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = "41"
+down_revision = "40"
+branch_labels = None
+depends_on = None
+
+# Default role permissions (must match DEFAULT_ROLE_PERMISSIONS in db.py)
+DEFAULT_ROLES = [
+ {
+ "name": "Owner",
+ "description": "Full access to all resources",
+ "permissions": ["*"],
+ "is_system_role": True,
+ "is_default": False,
+ },
+ {
+ "name": "Admin",
+ "description": "Can manage members, roles, and all content",
+ "permissions": [
+ "documents:create", "documents:read", "documents:update", "documents:delete",
+ "chats:create", "chats:read", "chats:update", "chats:delete",
+ "llm_configs:create", "llm_configs:read", "llm_configs:update", "llm_configs:delete",
+ "logs:read", "logs:delete",
+ "podcasts:create", "podcasts:read", "podcasts:update", "podcasts:delete",
+ "connectors:create", "connectors:read", "connectors:update", "connectors:delete",
+ "members:read", "members:update", "members:delete",
+ "roles:create", "roles:read", "roles:update", "roles:delete",
+ "invites:create", "invites:read", "invites:delete",
+ "settings:read", "settings:update",
+ ],
+ "is_system_role": True,
+ "is_default": False,
+ },
+ {
+ "name": "Editor",
+ "description": "Can create and edit content",
+ "permissions": [
+ "documents:create", "documents:read", "documents:update",
+ "chats:create", "chats:read", "chats:update",
+ "llm_configs:read",
+ "logs:read",
+ "podcasts:create", "podcasts:read", "podcasts:update",
+ "connectors:create", "connectors:read", "connectors:update",
+ "members:read",
+ "roles:read",
+ ],
+ "is_system_role": True,
+ "is_default": True,
+ },
+ {
+ "name": "Viewer",
+ "description": "Read-only access to content",
+ "permissions": [
+ "documents:read",
+ "chats:read",
+ "llm_configs:read",
+ "logs:read",
+ "podcasts:read",
+ "connectors:read",
+ "members:read",
+ "roles:read",
+ ],
+ "is_system_role": True,
+ "is_default": False,
+ },
+]
+
+
+def upgrade():
+ connection = op.get_bind()
+
+ # Get all existing search spaces that don't have roles yet
+ search_spaces = connection.execute(
+ sa.text("""
+ SELECT ss.id, ss.user_id
+ FROM searchspaces ss
+ WHERE NOT EXISTS (
+ SELECT 1 FROM search_space_roles ssr
+ WHERE ssr.search_space_id = ss.id
+ )
+ """)
+ ).fetchall()
+
+ for ss_id, owner_user_id in search_spaces:
+ owner_role_id = None
+
+ # Create default roles for each search space
+ for role in DEFAULT_ROLES:
+ # Convert permissions list to PostgreSQL array literal format for raw SQL
+ perms_literal = "ARRAY[" + ",".join(f"'{p}'" for p in role["permissions"]) + "]::TEXT[]"
+
+ result = connection.execute(
+ sa.text(f"""
+ INSERT INTO search_space_roles
+ (name, description, permissions, is_default, is_system_role, search_space_id)
+ VALUES (:name, :description, {perms_literal}, :is_default, :is_system_role, :search_space_id)
+ RETURNING id
+ """),
+ {
+ "name": role["name"],
+ "description": role["description"],
+ "is_default": role["is_default"],
+ "is_system_role": role["is_system_role"],
+ "search_space_id": ss_id,
+ }
+ )
+ role_id = result.fetchone()[0]
+
+ # Keep track of Owner role ID
+ if role["name"] == "Owner":
+ owner_role_id = role_id
+
+ # Create owner membership for the search space creator
+ if owner_user_id and owner_role_id:
+ # Check if membership already exists
+ existing = connection.execute(
+ sa.text("""
+ SELECT 1 FROM search_space_memberships
+ WHERE user_id = :user_id AND search_space_id = :search_space_id
+ """),
+ {"user_id": owner_user_id, "search_space_id": ss_id}
+ ).fetchone()
+
+ if not existing:
+ connection.execute(
+ sa.text("""
+ INSERT INTO search_space_memberships
+ (user_id, search_space_id, role_id, is_owner)
+ VALUES (:user_id, :search_space_id, :role_id, TRUE)
+ """),
+ {
+ "user_id": owner_user_id,
+ "search_space_id": ss_id,
+ "role_id": owner_role_id,
+ }
+ )
+
+
+def downgrade():
+ # This migration only adds data, not schema changes
+ # Downgrade would remove all roles and memberships created by this migration
+ # However, this is destructive and may affect manually created data
+ # So we only remove system roles and owner memberships that were auto-created
+ connection = op.get_bind()
+
+ # Remove memberships where user is owner and role is system Owner role
+ connection.execute(
+ sa.text("""
+ DELETE FROM search_space_memberships ssm
+ USING search_space_roles ssr
+ WHERE ssm.role_id = ssr.id
+ AND ssm.is_owner = TRUE
+ AND ssr.is_system_role = TRUE
+ AND ssr.name = 'Owner'
+ """)
+ )
+
+ # Remove system roles
+ connection.execute(
+ sa.text("""
+ DELETE FROM search_space_roles
+ WHERE is_system_role = TRUE
+ """)
+ )
+
From 111323b3b32a66d2664d922879918d94acfc4aa4 Mon Sep 17 00:00:00 2001
From: "DESKTOP-RTLN3BA\\$punk"
Date: Fri, 28 Nov 2025 23:39:07 -0800
Subject: [PATCH 08/36] feat: UX improvement
- Added "Add Sources" button in ChatInputGroup for easier document management.
- Introduced a "Manage Team" card in CompletionStep to facilitate team collaboration.
- Updated UI elements for better visual feedback and interaction.
- Removed unused role examples in LLMRoleManager for cleaner code.
---
.../components/chat/ChatInputGroup.tsx | 17 +++-
.../components/onboard/completion-step.tsx | 97 ++++++++++++++-----
.../components/settings/llm-role-manager.tsx | 13 ---
3 files changed, 86 insertions(+), 41 deletions(-)
diff --git a/surfsense_web/components/chat/ChatInputGroup.tsx b/surfsense_web/components/chat/ChatInputGroup.tsx
index 2995a30b8..7a76c4d56 100644
--- a/surfsense_web/components/chat/ChatInputGroup.tsx
+++ b/surfsense_web/components/chat/ChatInputGroup.tsx
@@ -1,8 +1,8 @@
"use client";
import { ChatInput } from "@llamaindex/chat-ui";
-import { Brain, Check, FolderOpen, Minus, Plus, Zap } from "lucide-react";
-import { useParams } from "next/navigation";
+import { Brain, Check, FolderOpen, Minus, Plus, PlusCircle, Zap } from "lucide-react";
+import { useParams, useRouter } from "next/navigation";
import React, { Suspense, useCallback, useState } from "react";
import { DocumentsDataTable } from "@/components/chat/DocumentsDataTable";
import { Badge } from "@/components/ui/badge";
@@ -115,6 +115,7 @@ const ConnectorSelector = React.memo(
selectedConnectors?: string[];
}) => {
const { search_space_id } = useParams();
+ const router = useRouter();
const [isOpen, setIsOpen] = useState(false);
// Fetch immediately (not lazy) so the button can show the correct count
@@ -247,9 +248,19 @@ const ConnectorSelector = React.memo(
No sources found
-
+
Add documents or configure search connectors for this search space
+ {
+ setIsOpen(false);
+ router.push(`/dashboard/${search_space_id}/sources/add`);
+ }}
+ className="gap-2"
+ >
+
+ Add Sources
+
) : (
<>
diff --git a/surfsense_web/components/onboard/completion-step.tsx b/surfsense_web/components/onboard/completion-step.tsx
index 68aa77568..a8dbbd76c 100644
--- a/surfsense_web/components/onboard/completion-step.tsx
+++ b/surfsense_web/components/onboard/completion-step.tsx
@@ -8,6 +8,8 @@ import {
FileText,
MessageSquare,
Sparkles,
+ UserPlus,
+ Users,
Zap,
} from "lucide-react";
import { motion } from "motion/react";
@@ -50,16 +52,60 @@ export function CompletionStep({ searchSpaceId }: CompletionStepProps) {
Choose an option to continue
-
+
+ {/* Manage Team Card */}
+
+
+
+
+
+
+
+ Manage Team
+
+ Invite team members and collaborate on your search space
+
+
+
+
+
+
+ Invite team members
+
+
+
+ Assign roles & permissions
+
+
+
+ Collaborate together
+
+
+ router.push(`/dashboard/${searchSpaceId}/team`)}
+ >
+ Manage Team
+
+
+
+
+
+
{/* Add Sources Card */}
-
-
-
+
+
+
+
Add Sources
@@ -67,27 +113,27 @@ export function CompletionStep({ searchSpaceId }: CompletionStepProps) {
Connect your data sources to start building your knowledge base
-
+
-
+
Connect documents and files
-
+
Import from various sources
-
+
Build your knowledge base
router.push(`/dashboard/${searchSpaceId}/sources/add`)}
>
Add Sources
-
+
@@ -95,13 +141,14 @@ export function CompletionStep({ searchSpaceId }: CompletionStepProps) {
{/* Start Chatting Card */}
-
-
-
+
+
+
+
Start Chatting
@@ -109,27 +156,27 @@ export function CompletionStep({ searchSpaceId }: CompletionStepProps) {
Jump right into the AI researcher and start asking questions
-
+
-
+
AI-powered conversations
-
+
Research and explore topics
-
+
Get instant insights
router.push(`/dashboard/${searchSpaceId}/researcher`)}
>
Start Chatting
-
+
diff --git a/surfsense_web/components/settings/llm-role-manager.tsx b/surfsense_web/components/settings/llm-role-manager.tsx
index e1166bda5..06b56b24f 100644
--- a/surfsense_web/components/settings/llm-role-manager.tsx
+++ b/surfsense_web/components/settings/llm-role-manager.tsx
@@ -413,19 +413,6 @@ export function LLMRoleManager({ searchSpaceId }: LLMRoleManagerProps) {
-
-
- Use cases: {role.examples}
-
-
- {role.characteristics.map((char, idx) => (
-
- {char}
-
- ))}
-
-
-
Assign LLM Configuration:
Date: Fri, 28 Nov 2025 23:43:03 -0800
Subject: [PATCH 09/36] chore: updated pricing section
---
surfsense_web/components/pricing/pricing-section.tsx | 8 +++++---
1 file changed, 5 insertions(+), 3 deletions(-)
diff --git a/surfsense_web/components/pricing/pricing-section.tsx b/surfsense_web/components/pricing/pricing-section.tsx
index 1223d0934..9823245f6 100644
--- a/surfsense_web/components/pricing/pricing-section.tsx
+++ b/surfsense_web/components/pricing/pricing-section.tsx
@@ -16,7 +16,8 @@ const demoPlans = [
"Podcasts support with local TTS providers.",
"Connects with 15+ external sources.",
"Cross-Browser Extension for dynamic webpages including authenticated content",
- "Upcoming: Mergeable MindMaps",
+ "Role-based access permissions",
+ "Collaboration and multiplayer features",
"Upcoming: Note Management",
],
description: "Open source version with powerful features",
@@ -32,9 +33,10 @@ const demoPlans = [
features: [
"Everything in Community",
"Priority Support",
- "Role-based access permissions",
- "Collaboration and multiplayer features",
"Advanced security features",
+ "Audit logs and compliance",
+ "SSO, OIDC & SAML",
+ "SLA guarantee",
],
description: "For large organizations with specific needs",
buttonText: "Contact Sales",
From 72236f65f7793c7934a2cb7def4013e6ca530178 Mon Sep 17 00:00:00 2001
From: "DESKTOP-RTLN3BA\\$punk"
Date: Sat, 29 Nov 2025 00:12:30 -0800
Subject: [PATCH 10/36] refactor: remove UserSearchSpacePreference model and
related relationships
- Deleted UserSearchSpacePreference class and its relationships from SearchSpace and User models.
- Cleaned up unused code to streamline database interactions.
---
.../42_drop_user_search_space_preferences.py | 41 +++++++++++++++++
surfsense_backend/app/db.py | 45 -------------------
2 files changed, 41 insertions(+), 45 deletions(-)
create mode 100644 surfsense_backend/alembic/versions/42_drop_user_search_space_preferences.py
diff --git a/surfsense_backend/alembic/versions/42_drop_user_search_space_preferences.py b/surfsense_backend/alembic/versions/42_drop_user_search_space_preferences.py
new file mode 100644
index 000000000..3ce491cfc
--- /dev/null
+++ b/surfsense_backend/alembic/versions/42_drop_user_search_space_preferences.py
@@ -0,0 +1,41 @@
+"""Drop user_search_space_preferences table
+
+Revision ID: 42
+Revises: 41
+Create Date: 2025-11-28
+
+This table is no longer needed after RBAC implementation:
+- LLM preferences are now stored on SearchSpace directly
+- User-SearchSpace relationships are handled by SearchSpaceMembership
+"""
+
+import sqlalchemy as sa
+
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = "42"
+down_revision = "41"
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # Drop the user_search_space_preferences table
+ op.drop_table("user_search_space_preferences")
+
+
+def downgrade():
+ # Recreate the table if rolling back
+ op.create_table(
+ "user_search_space_preferences",
+ sa.Column("id", sa.Integer(), primary_key=True),
+ sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now()),
+ sa.Column("user_id", sa.UUID(), sa.ForeignKey("user.id", ondelete="CASCADE"), nullable=False),
+ sa.Column("search_space_id", sa.Integer(), sa.ForeignKey("searchspaces.id", ondelete="CASCADE"), nullable=False),
+ sa.Column("long_context_llm_id", sa.Integer(), nullable=True),
+ sa.Column("fast_llm_id", sa.Integer(), nullable=True),
+ sa.Column("strategic_llm_id", sa.Integer(), nullable=True),
+ sa.UniqueConstraint("user_id", "search_space_id", name="uq_user_searchspace"),
+ )
+
diff --git a/surfsense_backend/app/db.py b/surfsense_backend/app/db.py
index 6195bec87..f5a8fd20f 100644
--- a/surfsense_backend/app/db.py
+++ b/surfsense_backend/app/db.py
@@ -441,11 +441,6 @@ class SearchSpace(BaseModel, TimestampMixin):
order_by="LLMConfig.id",
cascade="all, delete-orphan",
)
- user_preferences = relationship(
- "UserSearchSpacePreference",
- back_populates="search_space",
- cascade="all, delete-orphan",
- )
# RBAC relationships
roles = relationship(
@@ -527,36 +522,6 @@ class LLMConfig(BaseModel, TimestampMixin):
search_space = relationship("SearchSpace", back_populates="llm_configs")
-class UserSearchSpacePreference(BaseModel, TimestampMixin):
- __tablename__ = "user_search_space_preferences"
- __table_args__ = (
- UniqueConstraint(
- "user_id",
- "search_space_id",
- name="uq_user_searchspace",
- ),
- )
-
- user_id = Column(
- UUID(as_uuid=True), ForeignKey("user.id", ondelete="CASCADE"), nullable=False
- )
- search_space_id = Column(
- Integer, ForeignKey("searchspaces.id", ondelete="CASCADE"), nullable=False
- )
-
- # User-specific LLM preferences for this search space
- # Note: These can be negative IDs for global configs (from YAML) or positive IDs for custom configs (from DB)
- # Foreign keys removed to support global configs with negative IDs
- long_context_llm_id = Column(Integer, nullable=True)
- fast_llm_id = Column(Integer, nullable=True)
- strategic_llm_id = Column(Integer, nullable=True)
-
- # Future RBAC fields can be added here
- # role = Column(String(50), nullable=True) # e.g., 'owner', 'editor', 'viewer'
- # permissions = Column(JSON, nullable=True)
-
- user = relationship("User", back_populates="search_space_preferences")
- search_space = relationship("SearchSpace", back_populates="user_preferences")
class Log(BaseModel, TimestampMixin):
@@ -720,11 +685,6 @@ if config.AUTH_TYPE == "GOOGLE":
"OAuthAccount", lazy="joined"
)
search_spaces = relationship("SearchSpace", back_populates="user")
- search_space_preferences = relationship(
- "UserSearchSpacePreference",
- back_populates="user",
- cascade="all, delete-orphan",
- )
# RBAC relationships
search_space_memberships = relationship(
@@ -746,11 +706,6 @@ else:
class User(SQLAlchemyBaseUserTableUUID, Base):
search_spaces = relationship("SearchSpace", back_populates="user")
- search_space_preferences = relationship(
- "UserSearchSpacePreference",
- back_populates="user",
- cascade="all, delete-orphan",
- )
# RBAC relationships
search_space_memberships = relationship(
From 07234d7849aab9d26ed96a5f3c7c91c8f239537b Mon Sep 17 00:00:00 2001
From: "DESKTOP-RTLN3BA\\$punk"
Date: Sat, 29 Nov 2025 00:16:41 -0800
Subject: [PATCH 11/36] ruff format
---
...backfill_rbac_for_existing_searchspaces.py | 79 +++++++++++++------
.../42_drop_user_search_space_preferences.py | 19 ++++-
surfsense_backend/app/db.py | 2 -
3 files changed, 71 insertions(+), 29 deletions(-)
diff --git a/surfsense_backend/alembic/versions/41_backfill_rbac_for_existing_searchspaces.py b/surfsense_backend/alembic/versions/41_backfill_rbac_for_existing_searchspaces.py
index 970f4b256..3a9b3e698 100644
--- a/surfsense_backend/alembic/versions/41_backfill_rbac_for_existing_searchspaces.py
+++ b/surfsense_backend/alembic/versions/41_backfill_rbac_for_existing_searchspaces.py
@@ -31,16 +31,40 @@ DEFAULT_ROLES = [
"name": "Admin",
"description": "Can manage members, roles, and all content",
"permissions": [
- "documents:create", "documents:read", "documents:update", "documents:delete",
- "chats:create", "chats:read", "chats:update", "chats:delete",
- "llm_configs:create", "llm_configs:read", "llm_configs:update", "llm_configs:delete",
- "logs:read", "logs:delete",
- "podcasts:create", "podcasts:read", "podcasts:update", "podcasts:delete",
- "connectors:create", "connectors:read", "connectors:update", "connectors:delete",
- "members:read", "members:update", "members:delete",
- "roles:create", "roles:read", "roles:update", "roles:delete",
- "invites:create", "invites:read", "invites:delete",
- "settings:read", "settings:update",
+ "documents:create",
+ "documents:read",
+ "documents:update",
+ "documents:delete",
+ "chats:create",
+ "chats:read",
+ "chats:update",
+ "chats:delete",
+ "llm_configs:create",
+ "llm_configs:read",
+ "llm_configs:update",
+ "llm_configs:delete",
+ "logs:read",
+ "logs:delete",
+ "podcasts:create",
+ "podcasts:read",
+ "podcasts:update",
+ "podcasts:delete",
+ "connectors:create",
+ "connectors:read",
+ "connectors:update",
+ "connectors:delete",
+ "members:read",
+ "members:update",
+ "members:delete",
+ "roles:create",
+ "roles:read",
+ "roles:update",
+ "roles:delete",
+ "invites:create",
+ "invites:read",
+ "invites:delete",
+ "settings:read",
+ "settings:update",
],
"is_system_role": True,
"is_default": False,
@@ -49,12 +73,20 @@ DEFAULT_ROLES = [
"name": "Editor",
"description": "Can create and edit content",
"permissions": [
- "documents:create", "documents:read", "documents:update",
- "chats:create", "chats:read", "chats:update",
+ "documents:create",
+ "documents:read",
+ "documents:update",
+ "chats:create",
+ "chats:read",
+ "chats:update",
"llm_configs:read",
"logs:read",
- "podcasts:create", "podcasts:read", "podcasts:update",
- "connectors:create", "connectors:read", "connectors:update",
+ "podcasts:create",
+ "podcasts:read",
+ "podcasts:update",
+ "connectors:create",
+ "connectors:read",
+ "connectors:update",
"members:read",
"roles:read",
],
@@ -101,8 +133,10 @@ def upgrade():
# Create default roles for each search space
for role in DEFAULT_ROLES:
# Convert permissions list to PostgreSQL array literal format for raw SQL
- perms_literal = "ARRAY[" + ",".join(f"'{p}'" for p in role["permissions"]) + "]::TEXT[]"
-
+ perms_literal = (
+ "ARRAY[" + ",".join(f"'{p}'" for p in role["permissions"]) + "]::TEXT[]"
+ )
+
result = connection.execute(
sa.text(f"""
INSERT INTO search_space_roles
@@ -116,10 +150,10 @@ def upgrade():
"is_default": role["is_default"],
"is_system_role": role["is_system_role"],
"search_space_id": ss_id,
- }
+ },
)
role_id = result.fetchone()[0]
-
+
# Keep track of Owner role ID
if role["name"] == "Owner":
owner_role_id = role_id
@@ -132,7 +166,7 @@ def upgrade():
SELECT 1 FROM search_space_memberships
WHERE user_id = :user_id AND search_space_id = :search_space_id
"""),
- {"user_id": owner_user_id, "search_space_id": ss_id}
+ {"user_id": owner_user_id, "search_space_id": ss_id},
).fetchone()
if not existing:
@@ -146,7 +180,7 @@ def upgrade():
"user_id": owner_user_id,
"search_space_id": ss_id,
"role_id": owner_role_id,
- }
+ },
)
@@ -156,7 +190,7 @@ def downgrade():
# However, this is destructive and may affect manually created data
# So we only remove system roles and owner memberships that were auto-created
connection = op.get_bind()
-
+
# Remove memberships where user is owner and role is system Owner role
connection.execute(
sa.text("""
@@ -168,7 +202,7 @@ def downgrade():
AND ssr.name = 'Owner'
""")
)
-
+
# Remove system roles
connection.execute(
sa.text("""
@@ -176,4 +210,3 @@ def downgrade():
WHERE is_system_role = TRUE
""")
)
-
diff --git a/surfsense_backend/alembic/versions/42_drop_user_search_space_preferences.py b/surfsense_backend/alembic/versions/42_drop_user_search_space_preferences.py
index 3ce491cfc..9144421d8 100644
--- a/surfsense_backend/alembic/versions/42_drop_user_search_space_preferences.py
+++ b/surfsense_backend/alembic/versions/42_drop_user_search_space_preferences.py
@@ -30,12 +30,23 @@ def downgrade():
op.create_table(
"user_search_space_preferences",
sa.Column("id", sa.Integer(), primary_key=True),
- sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now()),
- sa.Column("user_id", sa.UUID(), sa.ForeignKey("user.id", ondelete="CASCADE"), nullable=False),
- sa.Column("search_space_id", sa.Integer(), sa.ForeignKey("searchspaces.id", ondelete="CASCADE"), nullable=False),
+ sa.Column(
+ "created_at", sa.DateTime(timezone=True), server_default=sa.func.now()
+ ),
+ sa.Column(
+ "user_id",
+ sa.UUID(),
+ sa.ForeignKey("user.id", ondelete="CASCADE"),
+ nullable=False,
+ ),
+ sa.Column(
+ "search_space_id",
+ sa.Integer(),
+ sa.ForeignKey("searchspaces.id", ondelete="CASCADE"),
+ nullable=False,
+ ),
sa.Column("long_context_llm_id", sa.Integer(), nullable=True),
sa.Column("fast_llm_id", sa.Integer(), nullable=True),
sa.Column("strategic_llm_id", sa.Integer(), nullable=True),
sa.UniqueConstraint("user_id", "search_space_id", name="uq_user_searchspace"),
)
-
diff --git a/surfsense_backend/app/db.py b/surfsense_backend/app/db.py
index f5a8fd20f..f3147a42b 100644
--- a/surfsense_backend/app/db.py
+++ b/surfsense_backend/app/db.py
@@ -522,8 +522,6 @@ class LLMConfig(BaseModel, TimestampMixin):
search_space = relationship("SearchSpace", back_populates="llm_configs")
-
-
class Log(BaseModel, TimestampMixin):
__tablename__ = "logs"
From 73dc337c1de70569d35572c1039548db838574d4 Mon Sep 17 00:00:00 2001
From: "DESKTOP-RTLN3BA\\$punk"
Date: Sat, 29 Nov 2025 00:32:03 -0800
Subject: [PATCH 12/36] docs: update README files
---
README.md | 19 ++++++++++++-------
README.zh-CN.md | 20 +++++++++++++-------
2 files changed, 25 insertions(+), 14 deletions(-)
diff --git a/README.md b/README.md
index c0725a44f..ee2848ced 100644
--- a/README.md
+++ b/README.md
@@ -38,19 +38,24 @@ https://github.com/user-attachments/assets/a0a16566-6967-4374-ac51-9b3e07fbecd7
## Key Features
### 💡 **Idea**:
-Have your own highly customizable private NotebookLM and Perplexity integrated with external sources.
+- Have your own highly customizable private NotebookLM and Perplexity integrated with external sources.
### 📁 **Multiple File Format Uploading Support**
-Save content from your own personal files *(Documents, images, videos and supports **50+ file extensions**)* to your own personal knowledge base .
+- Save content from your own personal files *(Documents, images, videos and supports **50+ file extensions**)* to your own personal knowledge base .
### 🔍 **Powerful Search**
-Quickly research or find anything in your saved content .
+- Quickly research or find anything in your saved content .
### 💬 **Chat with your Saved Content**
- Interact in Natural Language and get cited answers.
+- Interact in Natural Language and get cited answers.
### 📄 **Cited Answers**
-Get Cited answers just like Perplexity.
+- Get Cited answers just like Perplexity.
### 🔔 **Privacy & Local LLM Support**
-Works Flawlessly with Ollama local LLMs.
+- Works Flawlessly with Ollama local LLMs.
### 🏠 **Self Hostable**
-Open source and easy to deploy locally.
+- Open source and easy to deploy locally.
+### 👥 **Team Collaboration with RBAC**
+- Role-Based Access Control for Search Spaces
+- Invite team members with customizable roles (Owner, Admin, Editor, Viewer)
+- Granular permissions for documents, chats, connectors, and settings
+- Share knowledge bases securely within your organization
### 🎙️ Podcasts
- Blazingly fast podcast generation agent. (Creates a 3-minute podcast in under 20 seconds.)
- Convert your chat conversations into engaging audio content
diff --git a/README.zh-CN.md b/README.zh-CN.md
index 84bf8a133..464242a4d 100644
--- a/README.zh-CN.md
+++ b/README.zh-CN.md
@@ -39,25 +39,31 @@ https://github.com/user-attachments/assets/a0a16566-6967-4374-ac51-9b3e07fbecd7
## 核心功能
### 💡 **理念**:
-拥有您自己的高度可定制的私有 NotebookLM 和 Perplexity,并与外部数据源集成。
+- 拥有您自己的高度可定制的私有 NotebookLM 和 Perplexity,并与外部数据源集成。
### 📁 **支持多种文件格式上传**
-将您个人文件中的内容(文档、图像、视频,支持 **50+ 种文件扩展名**)保存到您自己的个人知识库。
+- 将您个人文件中的内容(文档、图像、视频,支持 **50+ 种文件扩展名**)保存到您自己的个人知识库。
### 🔍 **强大的搜索功能**
-快速研究或查找已保存内容中的任何信息。
+- 快速研究或查找已保存内容中的任何信息。
### 💬 **与已保存内容对话**
-使用自然语言交互并获得引用答案。
+- 使用自然语言交互并获得引用答案。
### 📄 **引用答案**
-像 Perplexity 一样获得带引用的答案。
+- 像 Perplexity 一样获得带引用的答案。
### 🔔 **隐私保护与本地 LLM 支持**
-完美支持 Ollama 本地大语言模型。
+- 完美支持 Ollama 本地大语言模型。
### 🏠 **可自托管**
-开源且易于本地部署。
+- 开源且易于本地部署。
+
+### 👥 **团队协作与 RBAC**
+- 搜索空间的基于角色的访问控制
+- 使用可自定义的角色(所有者、管理员、编辑者、查看者)邀请团队成员
+- 对文档、聊天、连接器和设置的细粒度权限控制
+- 在组织内安全共享知识库
### 🎙️ **播客功能**
- 超快速播客生成代理(在 20 秒内创建 3 分钟播客)
From e33b42f9a593bfd88d04cdb399da6092601b281f Mon Sep 17 00:00:00 2001
From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com>
Date: Sun, 30 Nov 2025 02:37:13 +0530
Subject: [PATCH 13/36] refactor: removed auto-save functionality in editor
page
---
.../editor/[documentId]/page.tsx | 31 +------------------
1 file changed, 1 insertion(+), 30 deletions(-)
diff --git a/surfsense_web/app/dashboard/[search_space_id]/editor/[documentId]/page.tsx b/surfsense_web/app/dashboard/[search_space_id]/editor/[documentId]/page.tsx
index d408de44c..5a0dae2d6 100644
--- a/surfsense_web/app/dashboard/[search_space_id]/editor/[documentId]/page.tsx
+++ b/surfsense_web/app/dashboard/[search_space_id]/editor/[documentId]/page.tsx
@@ -9,7 +9,6 @@ import { BlockNoteEditor } from "@/components/DynamicBlockNoteEditor";
import { Button } from "@/components/ui/button";
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card";
import { Separator } from "@/components/ui/separator";
-import { cn } from "@/lib/utils";
interface EditorContent {
document_id: number;
@@ -97,35 +96,7 @@ export default function EditorPage() {
}
}, [editorContent, document]);
- // Auto-save every 30 seconds - DIRECT CALL TO FASTAPI
- useEffect(() => {
- if (!editorContent || !token || !hasUnsavedChanges) return;
-
- const interval = setInterval(async () => {
- try {
- const response = await fetch(
- `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/documents/${documentId}/blocknote-content`,
- {
- method: "PUT",
- headers: {
- "Content-Type": "application/json",
- Authorization: `Bearer ${token}`,
- },
- body: JSON.stringify({ blocknote_document: editorContent }),
- }
- );
-
- if (response.ok) {
- setHasUnsavedChanges(false);
- toast.success("Auto-saved", { duration: 2000 });
- }
- } catch (error) {
- console.error("Auto-save failed:", error);
- }
- }, 30000); // 30 seconds
-
- return () => clearInterval(interval);
- }, [editorContent, documentId, token, hasUnsavedChanges]);
+ // TODO: Auto-save every 30 seconds - DIRECT CALL TO FASTAPI
// Save and exit - DIRECT CALL TO FASTAPI
const handleSave = async () => {
From 91bc344b56c5572e7d614815e0da6f7e11162eac Mon Sep 17 00:00:00 2001
From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com>
Date: Sun, 30 Nov 2025 03:49:43 +0530
Subject: [PATCH 14/36] feat: Added celery tasks to populate
`blocknote_document` for existing documents
---
.../38_add_blocknote_fields_to_documents.py | 18 +-
surfsense_backend/app/celery_app.py | 1 +
surfsense_backend/app/routes/editor_routes.py | 53 +++++-
.../celery_tasks/blocknote_migration_tasks.py | 161 ++++++++++++++++++
.../document_processors/file_processors.py | 8 +-
.../editor/[documentId]/page.tsx | 2 +-
6 files changed, 231 insertions(+), 12 deletions(-)
create mode 100644 surfsense_backend/app/tasks/celery_tasks/blocknote_migration_tasks.py
diff --git a/surfsense_backend/alembic/versions/38_add_blocknote_fields_to_documents.py b/surfsense_backend/alembic/versions/38_add_blocknote_fields_to_documents.py
index 742771322..474a96d23 100644
--- a/surfsense_backend/alembic/versions/38_add_blocknote_fields_to_documents.py
+++ b/surfsense_backend/alembic/versions/38_add_blocknote_fields_to_documents.py
@@ -20,8 +20,9 @@ depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
- """Upgrade schema - Add BlockNote fields only."""
+ """Upgrade schema - Add BlockNote fields and trigger population task."""
+ # Add the columns
op.add_column(
"documents",
sa.Column(
@@ -42,6 +43,21 @@ def upgrade() -> None:
sa.Column("last_edited_at", sa.TIMESTAMP(timezone=True), nullable=True),
)
+ # Trigger the Celery task to populate blocknote_document for existing documents
+ try:
+ from app.tasks.celery_tasks.blocknote_migration_tasks import (
+ populate_blocknote_for_documents_task,
+ )
+
+ # Queue the task to run asynchronously
+ populate_blocknote_for_documents_task.apply_async()
+ print("✓ Queued Celery task to populate blocknote_document for existing documents")
+ except Exception as e:
+ # If Celery is not available or task queueing fails, log but don't fail the migration
+ print(f"⚠ Warning: Could not queue blocknote population task: {e}")
+ print(" You can manually trigger it later with:")
+ print(" celery -A app.celery_app call app.tasks.celery_tasks.blocknote_migration_tasks.populate_blocknote_for_documents_task")
+
def downgrade() -> None:
"""Downgrade schema - Remove BlockNote fields."""
diff --git a/surfsense_backend/app/celery_app.py b/surfsense_backend/app/celery_app.py
index 898ab9735..1e68a9c47 100644
--- a/surfsense_backend/app/celery_app.py
+++ b/surfsense_backend/app/celery_app.py
@@ -63,6 +63,7 @@ celery_app = Celery(
"app.tasks.celery_tasks.podcast_tasks",
"app.tasks.celery_tasks.connector_tasks",
"app.tasks.celery_tasks.schedule_checker_task",
+ "app.tasks.celery_tasks.blocknote_migration_tasks",
],
)
diff --git a/surfsense_backend/app/routes/editor_routes.py b/surfsense_backend/app/routes/editor_routes.py
index a34c80db0..f52a2fef9 100644
--- a/surfsense_backend/app/routes/editor_routes.py
+++ b/surfsense_backend/app/routes/editor_routes.py
@@ -30,11 +30,13 @@ async def get_editor_content(
Get document content for editing.
Returns BlockNote JSON document. If blocknote_document is NULL,
- attempts to convert from `content` - though this won't work well
- for old documents that only have summaries.
+ attempts to generate it from chunks (lazy migration).
"""
+ from sqlalchemy.orm import selectinload
+
result = await session.execute(
select(Document)
+ .options(selectinload(Document.chunks))
.join(SearchSpace)
.filter(Document.id == document_id, SearchSpace.user_id == user.id)
)
@@ -54,12 +56,47 @@ async def get_editor_content(
else None,
}
- # For old documents without blocknote_document, return error
- # (Can't convert summary back to full document)
- raise HTTPException(
- status_code=400,
- detail="This document was uploaded before editing was enabled. Please re-upload to enable editing.",
- )
+ # Lazy migration: Try to generate blocknote_document from chunks
+ from app.utils.blocknote_converter import convert_markdown_to_blocknote
+
+ chunks = sorted(document.chunks, key=lambda c: c.id)
+
+ if not chunks:
+ raise HTTPException(
+ status_code=400,
+ detail="This document has no chunks and cannot be edited. Please re-upload to enable editing.",
+ )
+
+ # Reconstruct markdown from chunks
+ markdown_content = "\n\n".join(chunk.content for chunk in chunks)
+
+ if not markdown_content.strip():
+ raise HTTPException(
+ status_code=400,
+ detail="This document has empty content and cannot be edited.",
+ )
+
+ # Convert to BlockNote
+ blocknote_json = await convert_markdown_to_blocknote(markdown_content)
+
+ if not blocknote_json:
+ raise HTTPException(
+ status_code=500,
+ detail="Failed to convert document to editable format. Please try again later.",
+ )
+
+ # Save the generated blocknote_document (lazy migration)
+ document.blocknote_document = blocknote_json
+ document.content_needs_reindexing = False
+ document.last_edited_at = None
+ await session.commit()
+
+ return {
+ "document_id": document.id,
+ "title": document.title,
+ "blocknote_document": blocknote_json,
+ "last_edited_at": None,
+ }
@router.put("/documents/{document_id}/blocknote-content")
diff --git a/surfsense_backend/app/tasks/celery_tasks/blocknote_migration_tasks.py b/surfsense_backend/app/tasks/celery_tasks/blocknote_migration_tasks.py
new file mode 100644
index 000000000..abac51a40
--- /dev/null
+++ b/surfsense_backend/app/tasks/celery_tasks/blocknote_migration_tasks.py
@@ -0,0 +1,161 @@
+"""Celery tasks for populating blocknote_document for existing documents."""
+
+import logging
+from typing import Any
+
+from sqlalchemy import select
+from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
+from sqlalchemy.orm import selectinload
+from sqlalchemy.pool import NullPool
+
+from app.celery_app import celery_app
+from app.config import config
+from app.db import Chunk, Document
+from app.utils.blocknote_converter import convert_markdown_to_blocknote
+
+logger = logging.getLogger(__name__)
+
+
+def get_celery_session_maker():
+ """
+ Create a new async session maker for Celery tasks.
+ This is necessary because Celery tasks run in a new event loop,
+ and the default session maker is bound to the main app's event loop.
+ """
+ engine = create_async_engine(
+ config.DATABASE_URL,
+ poolclass=NullPool,
+ echo=False,
+ )
+ return async_sessionmaker(engine, expire_on_commit=False)
+
+
+@celery_app.task(name="populate_blocknote_for_documents", bind=True)
+def populate_blocknote_for_documents_task(
+ self, document_ids: list[int] | None = None, batch_size: int = 50
+):
+ """
+ Celery task to populate blocknote_document for existing documents.
+
+ Args:
+ document_ids: Optional list of specific document IDs to process.
+ If None, processes all documents with blocknote_document IS NULL.
+ batch_size: Number of documents to process in each batch (default: 50)
+ """
+ import asyncio
+
+ loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(loop)
+
+ try:
+ loop.run_until_complete(
+ _populate_blocknote_for_documents(document_ids, batch_size)
+ )
+ finally:
+ loop.close()
+
+
+async def _populate_blocknote_for_documents(
+ document_ids: list[int] | None = None, batch_size: int = 50
+):
+ """
+ Async function to populate blocknote_document for documents.
+
+ Args:
+ document_ids: Optional list of specific document IDs to process
+ batch_size: Number of documents to process per batch
+ """
+ async with get_celery_session_maker()() as session:
+ try:
+ # Build query for documents that need blocknote_document populated
+ query = select(Document).where(Document.blocknote_document.is_(None))
+
+ # If specific document IDs provided, filter by them
+ if document_ids:
+ query = query.where(Document.id.in_(document_ids))
+
+ # Load chunks relationship to avoid N+1 queries
+ query = query.options(selectinload(Document.chunks))
+
+ # Execute query
+ result = await session.execute(query)
+ documents = result.scalars().all()
+
+ total_documents = len(documents)
+ logger.info(f"Found {total_documents} documents to process")
+
+ if total_documents == 0:
+ logger.info("No documents to process")
+ return
+
+ # Process documents in batches
+ processed = 0
+ failed = 0
+
+ for i in range(0, total_documents, batch_size):
+ batch = documents[i : i + batch_size]
+ logger.info(f"Processing batch {i // batch_size + 1}: documents {i+1}-{min(i+batch_size, total_documents)}")
+
+ for document in batch:
+ try:
+ # Use preloaded chunks from selectinload - no need to query again
+ chunks = sorted(document.chunks, key=lambda c: c.id)
+
+ if not chunks:
+ logger.warning(
+ f"Document {document.id} ({document.title}) has no chunks, skipping"
+ )
+ failed += 1
+ continue
+
+ # Reconstruct markdown by concatenating chunk contents
+ markdown_content = "\n\n".join(chunk.content for chunk in chunks)
+
+ if not markdown_content or not markdown_content.strip():
+ logger.warning(
+ f"Document {document.id} ({document.title}) has empty markdown content, skipping"
+ )
+ failed += 1
+ continue
+
+ # Convert markdown to BlockNote JSON
+ blocknote_json = await convert_markdown_to_blocknote(markdown_content)
+
+ if not blocknote_json:
+ logger.warning(
+ f"Failed to convert markdown to BlockNote for document {document.id} ({document.title})"
+ )
+ failed += 1
+ continue
+
+ # Update document with blocknote_document (other fields already have correct defaults)
+ document.blocknote_document = blocknote_json
+
+ processed += 1
+
+ # Commit every batch_size documents to avoid long transactions
+ if processed % batch_size == 0:
+ await session.commit()
+ logger.info(f"Committed batch: {processed} documents processed so far")
+
+ except Exception as e:
+ logger.error(
+ f"Error processing document {document.id} ({document.title}): {e}",
+ exc_info=True,
+ )
+ failed += 1
+ # Continue with next document instead of failing entire batch
+ continue
+
+ # Commit remaining changes in the batch
+ await session.commit()
+ logger.info(f"Completed batch {i // batch_size + 1}")
+
+ logger.info(
+ f"Migration complete: {processed} documents processed, {failed} failed"
+ )
+
+ except Exception as e:
+ await session.rollback()
+ logger.error(f"Error in blocknote migration task: {e}", exc_info=True)
+ raise
diff --git a/surfsense_backend/app/tasks/document_processors/file_processors.py b/surfsense_backend/app/tasks/document_processors/file_processors.py
index 95cf1c462..3b026b93e 100644
--- a/surfsense_backend/app/tasks/document_processors/file_processors.py
+++ b/surfsense_backend/app/tasks/document_processors/file_processors.py
@@ -396,7 +396,9 @@ async def add_received_file_document_using_docling(
"ETL_SERVICE": "DOCLING",
}
existing_document.chunks = chunks
- existing_document.blocknote_document = blocknote_json
+ existing_document.blocknote_document = None
+ existing_document.content_needs_reindexing = False
+ existing_document.last_edited_at = None
await session.commit()
await session.refresh(existing_document)
@@ -416,7 +418,9 @@ async def add_received_file_document_using_docling(
chunks=chunks,
content_hash=content_hash,
unique_identifier_hash=unique_identifier_hash,
- blocknote_document=blocknote_json,
+ blocknote_document=None,
+ content_needs_reindexing=False,
+ last_edited_at=None,
)
session.add(document)
diff --git a/surfsense_web/app/dashboard/[search_space_id]/editor/[documentId]/page.tsx b/surfsense_web/app/dashboard/[search_space_id]/editor/[documentId]/page.tsx
index 5a0dae2d6..ce26afc38 100644
--- a/surfsense_web/app/dashboard/[search_space_id]/editor/[documentId]/page.tsx
+++ b/surfsense_web/app/dashboard/[search_space_id]/editor/[documentId]/page.tsx
@@ -96,7 +96,7 @@ export default function EditorPage() {
}
}, [editorContent, document]);
- // TODO: Auto-save every 30 seconds - DIRECT CALL TO FASTAPI
+ // TODO: Maybe add Auto-save every 30 seconds - DIRECT CALL TO FASTAPI
// Save and exit - DIRECT CALL TO FASTAPI
const handleSave = async () => {
From f8e4926969c15e31aecd9b152be2741ac29d5d1b Mon Sep 17 00:00:00 2001
From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com>
Date: Sun, 30 Nov 2025 04:08:12 +0530
Subject: [PATCH 15/36] feat: Implement document saving with reindexing
- Updated the document saving endpoint to trigger reindexing after saving.
- Introduced a new Celery task for reindexing documents.
- Refactored the editor page to reflect the changes in the API endpoint and method.
---
surfsense_backend/app/celery_app.py | 1 +
surfsense_backend/app/routes/editor_routes.py | 102 ++++----------
.../celery_tasks/document_reindex_tasks.py | 128 ++++++++++++++++++
.../editor/[documentId]/page.tsx | 8 +-
4 files changed, 156 insertions(+), 83 deletions(-)
create mode 100644 surfsense_backend/app/tasks/celery_tasks/document_reindex_tasks.py
diff --git a/surfsense_backend/app/celery_app.py b/surfsense_backend/app/celery_app.py
index 1e68a9c47..f7bea8cc3 100644
--- a/surfsense_backend/app/celery_app.py
+++ b/surfsense_backend/app/celery_app.py
@@ -64,6 +64,7 @@ celery_app = Celery(
"app.tasks.celery_tasks.connector_tasks",
"app.tasks.celery_tasks.schedule_checker_task",
"app.tasks.celery_tasks.blocknote_migration_tasks",
+ "app.tasks.celery_tasks.document_reindex_tasks",
],
)
diff --git a/surfsense_backend/app/routes/editor_routes.py b/surfsense_backend/app/routes/editor_routes.py
index f52a2fef9..8d0af667c 100644
--- a/surfsense_backend/app/routes/editor_routes.py
+++ b/surfsense_backend/app/routes/editor_routes.py
@@ -99,103 +99,47 @@ async def get_editor_content(
}
-@router.put("/documents/{document_id}/blocknote-content")
-async def update_blocknote_content(
+@router.post("/documents/{document_id}/save")
+async def save_document(
document_id: int,
data: dict[str, Any],
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""
- Auto-save BlockNote document during editing.
- Only updates blocknote_document field, not content.
+ Save BlockNote document and trigger reindexing.
+ Called when user clicks 'Save & Exit'.
"""
+ from app.tasks.celery_tasks.document_reindex_tasks import reindex_document_task
+
+ # Verify ownership
result = await session.execute(
select(Document)
.join(SearchSpace)
.filter(Document.id == document_id, SearchSpace.user_id == user.id)
)
document = result.scalars().first()
-
+
if not document:
raise HTTPException(status_code=404, detail="Document not found")
-
+
blocknote_document = data.get("blocknote_document")
if not blocknote_document:
raise HTTPException(status_code=400, detail="blocknote_document is required")
-
- # Update only blocknote_document and last_edited_at
+
+ # Save BlockNote document
document.blocknote_document = blocknote_document
document.last_edited_at = datetime.now(UTC)
-
+ document.content_needs_reindexing = True
+
await session.commit()
- await session.refresh(document)
-
- return {"status": "saved", "last_edited_at": document.last_edited_at.isoformat()}
-
-
-# did not implement reindexing (for now)
-# @router.post("/documents/{document_id}/finalize-edit")
-# async def finalize_edit(
-# document_id: int,
-# session: AsyncSession = Depends(get_async_session),
-# user: User = Depends(current_active_user),
-# ):
-# """
-# Finalize document editing: convert BlockNote to markdown,
-# update content (summary), and trigger reindexing.
-# """
-# result = await session.execute(
-# select(Document)
-# .join(SearchSpace)
-# .filter(Document.id == document_id, SearchSpace.user_id == user.id)
-# )
-# document = result.scalars().first()
-
-# if not document:
-# raise HTTPException(status_code=404, detail="Document not found")
-
-# if not document.blocknote_document:
-# raise HTTPException(
-# status_code=400,
-# detail="Document has no BlockNote content to finalize"
-# )
-
-# # 1. Convert BlockNote JSON → Markdown
-# full_markdown = await convert_blocknote_to_markdown(document.blocknote_document)
-
-# if not full_markdown:
-# raise HTTPException(
-# status_code=500,
-# detail="Failed to convert BlockNote document to markdown"
-# )
-
-# # 2. Generate new summary from full markdown
-# from app.services.llm_service import get_user_long_context_llm
-# from app.utils.document_converters import generate_document_summary
-
-# user_llm = await get_user_long_context_llm(session, str(user.id), document.search_space_id)
-# if not user_llm:
-# raise HTTPException(
-# status_code=500,
-# detail="No LLM configured for summary generation"
-# )
-
-# document_metadata = document.document_metadata or {}
-# summary_content, summary_embedding = await generate_document_summary(
-# full_markdown, user_llm, document_metadata
-# )
-
-# # 3. Update document fields
-# document.content = summary_content
-# document.embedding = summary_embedding
-# document.content_needs_reindexing = True # Trigger chunk regeneration
-# document.last_edited_at = datetime.now(UTC)
-
-# await session.commit()
-
-# return {
-# "status": "finalized",
-# "message": "Document saved. Summary and chunks will be regenerated in the background.",
-# "content_needs_reindexing": True,
-# }
+
+ # Queue reindex task
+ reindex_document_task.delay(document_id, str(user.id))
+
+ return {
+ "status": "saved",
+ "document_id": document_id,
+ "message": "Document saved and will be reindexed in the background",
+ "last_edited_at": document.last_edited_at.isoformat()
+ }
diff --git a/surfsense_backend/app/tasks/celery_tasks/document_reindex_tasks.py b/surfsense_backend/app/tasks/celery_tasks/document_reindex_tasks.py
new file mode 100644
index 000000000..93c33ce49
--- /dev/null
+++ b/surfsense_backend/app/tasks/celery_tasks/document_reindex_tasks.py
@@ -0,0 +1,128 @@
+"""Celery tasks for reindexing edited documents."""
+
+import logging
+
+from sqlalchemy import select
+from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
+from sqlalchemy.pool import NullPool
+from sqlalchemy import delete
+from sqlalchemy.orm import selectinload
+
+from app.celery_app import celery_app
+from app.config import config
+from app.db import Document
+from app.utils.blocknote_converter import convert_blocknote_to_markdown
+from app.utils.document_converters import (
+ create_document_chunks,
+ generate_document_summary,
+)
+from app.services.llm_service import get_user_long_context_llm
+
+logger = logging.getLogger(__name__)
+
+
+def get_celery_session_maker():
+ """Create async session maker for Celery tasks."""
+ engine = create_async_engine(
+ config.DATABASE_URL,
+ poolclass=NullPool,
+ echo=False,
+ )
+ return async_sessionmaker(engine, expire_on_commit=False)
+
+
+@celery_app.task(name="reindex_document", bind=True)
+def reindex_document_task(self, document_id: int, user_id: str):
+ """
+ Celery task to reindex a document after editing.
+
+ Args:
+ document_id: ID of document to reindex
+ user_id: ID of user who edited the document
+ """
+ import asyncio
+
+ loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(loop)
+
+ try:
+ loop.run_until_complete(_reindex_document(document_id, user_id))
+ finally:
+ loop.close()
+
+
+async def _reindex_document(document_id: int, user_id: str):
+ """Async function to reindex a document."""
+ async with get_celery_session_maker()() as session:
+ try:
+ # Get document
+ result = await session.execute(
+ select(Document)
+ .options(selectinload(Document.chunks)) # Eagerly load chunks
+ .where(Document.id == document_id)
+ )
+ document = result.scalars().first()
+
+ if not document:
+ logger.error(f"Document {document_id} not found")
+ return
+
+ if not document.blocknote_document:
+ logger.warning(f"Document {document_id} has no BlockNote content")
+ return
+
+ logger.info(f"Reindexing document {document_id} ({document.title})")
+
+ # 1. Convert BlockNote → Markdown
+ markdown_content = await convert_blocknote_to_markdown(
+ document.blocknote_document
+ )
+
+ if not markdown_content:
+ logger.error(f"Failed to convert document {document_id} to markdown")
+ return
+
+ # 2. Delete old chunks explicitly
+ from app.db import Chunk
+ await session.execute(
+ delete(Chunk).where(Chunk.document_id == document_id)
+ )
+ await session.flush() # Ensure old chunks are deleted
+
+ # 3. Create new chunks
+ new_chunks = await create_document_chunks(markdown_content)
+
+ # 4. Add new chunks to session
+ for chunk in new_chunks:
+ chunk.document_id = document_id
+ session.add(chunk)
+
+ logger.info(f"Created {len(new_chunks)} chunks for document {document_id}")
+
+ # 5. Regenerate summary
+ user_llm = await get_user_long_context_llm(
+ session, user_id, document.search_space_id
+ )
+
+ document_metadata = {
+ "title": document.title,
+ "document_type": document.document_type.value,
+ }
+
+ summary_content, summary_embedding = await generate_document_summary(
+ markdown_content, user_llm, document_metadata
+ )
+
+ # 6. Update document
+ document.content = summary_content
+ document.embedding = summary_embedding
+ document.content_needs_reindexing = False
+
+ await session.commit()
+
+ logger.info(f"Successfully reindexed document {document_id}")
+
+ except Exception as e:
+ await session.rollback()
+ logger.error(f"Error reindexing document {document_id}: {e}", exc_info=True)
+ raise
diff --git a/surfsense_web/app/dashboard/[search_space_id]/editor/[documentId]/page.tsx b/surfsense_web/app/dashboard/[search_space_id]/editor/[documentId]/page.tsx
index ce26afc38..544834372 100644
--- a/surfsense_web/app/dashboard/[search_space_id]/editor/[documentId]/page.tsx
+++ b/surfsense_web/app/dashboard/[search_space_id]/editor/[documentId]/page.tsx
@@ -112,11 +112,11 @@ export default function EditorPage() {
setSaving(true);
try {
- // Save blocknote_document to database (without finalizing/reindexing)
+ // Save blocknote_document and trigger reindexing in background
const response = await fetch(
- `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/documents/${documentId}/blocknote-content`,
+ `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/documents/${documentId}/save`,
{
- method: "PUT",
+ method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${token}`,
@@ -133,7 +133,7 @@ export default function EditorPage() {
}
setHasUnsavedChanges(false);
- toast.success("Document saved successfully");
+ toast.success("Document saved! Reindexing in background...");
// Small delay before redirect to show success message
setTimeout(() => {
From e419702ebd56e5d756b14d35f80c5e1ccc25ad79 Mon Sep 17 00:00:00 2001
From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com>
Date: Sun, 30 Nov 2025 04:15:38 +0530
Subject: [PATCH 16/36] fix: run ruff formatter to fix code quality
---
.../38_add_blocknote_fields_to_documents.py | 10 +++-
surfsense_backend/app/routes/editor_routes.py | 34 +++++------
.../celery_tasks/blocknote_migration_tasks.py | 60 +++++++++++--------
.../celery_tasks/document_reindex_tasks.py | 39 ++++++------
4 files changed, 77 insertions(+), 66 deletions(-)
diff --git a/surfsense_backend/alembic/versions/38_add_blocknote_fields_to_documents.py b/surfsense_backend/alembic/versions/38_add_blocknote_fields_to_documents.py
index 474a96d23..d575f53ad 100644
--- a/surfsense_backend/alembic/versions/38_add_blocknote_fields_to_documents.py
+++ b/surfsense_backend/alembic/versions/38_add_blocknote_fields_to_documents.py
@@ -48,15 +48,19 @@ def upgrade() -> None:
from app.tasks.celery_tasks.blocknote_migration_tasks import (
populate_blocknote_for_documents_task,
)
-
+
# Queue the task to run asynchronously
populate_blocknote_for_documents_task.apply_async()
- print("✓ Queued Celery task to populate blocknote_document for existing documents")
+ print(
+ "✓ Queued Celery task to populate blocknote_document for existing documents"
+ )
except Exception as e:
# If Celery is not available or task queueing fails, log but don't fail the migration
print(f"⚠ Warning: Could not queue blocknote population task: {e}")
print(" You can manually trigger it later with:")
- print(" celery -A app.celery_app call app.tasks.celery_tasks.blocknote_migration_tasks.populate_blocknote_for_documents_task")
+ print(
+ " celery -A app.celery_app call app.tasks.celery_tasks.blocknote_migration_tasks.populate_blocknote_for_documents_task"
+ )
def downgrade() -> None:
diff --git a/surfsense_backend/app/routes/editor_routes.py b/surfsense_backend/app/routes/editor_routes.py
index 8d0af667c..5e2363836 100644
--- a/surfsense_backend/app/routes/editor_routes.py
+++ b/surfsense_backend/app/routes/editor_routes.py
@@ -33,7 +33,7 @@ async def get_editor_content(
attempts to generate it from chunks (lazy migration).
"""
from sqlalchemy.orm import selectinload
-
+
result = await session.execute(
select(Document)
.options(selectinload(Document.chunks))
@@ -58,39 +58,39 @@ async def get_editor_content(
# Lazy migration: Try to generate blocknote_document from chunks
from app.utils.blocknote_converter import convert_markdown_to_blocknote
-
+
chunks = sorted(document.chunks, key=lambda c: c.id)
-
+
if not chunks:
raise HTTPException(
status_code=400,
detail="This document has no chunks and cannot be edited. Please re-upload to enable editing.",
)
-
+
# Reconstruct markdown from chunks
markdown_content = "\n\n".join(chunk.content for chunk in chunks)
-
+
if not markdown_content.strip():
raise HTTPException(
status_code=400,
detail="This document has empty content and cannot be edited.",
)
-
+
# Convert to BlockNote
blocknote_json = await convert_markdown_to_blocknote(markdown_content)
-
+
if not blocknote_json:
raise HTTPException(
status_code=500,
detail="Failed to convert document to editable format. Please try again later.",
)
-
+
# Save the generated blocknote_document (lazy migration)
document.blocknote_document = blocknote_json
document.content_needs_reindexing = False
document.last_edited_at = None
await session.commit()
-
+
return {
"document_id": document.id,
"title": document.title,
@@ -111,7 +111,7 @@ async def save_document(
Called when user clicks 'Save & Exit'.
"""
from app.tasks.celery_tasks.document_reindex_tasks import reindex_document_task
-
+
# Verify ownership
result = await session.execute(
select(Document)
@@ -119,27 +119,27 @@ async def save_document(
.filter(Document.id == document_id, SearchSpace.user_id == user.id)
)
document = result.scalars().first()
-
+
if not document:
raise HTTPException(status_code=404, detail="Document not found")
-
+
blocknote_document = data.get("blocknote_document")
if not blocknote_document:
raise HTTPException(status_code=400, detail="blocknote_document is required")
-
+
# Save BlockNote document
document.blocknote_document = blocknote_document
document.last_edited_at = datetime.now(UTC)
document.content_needs_reindexing = True
-
+
await session.commit()
-
+
# Queue reindex task
reindex_document_task.delay(document_id, str(user.id))
-
+
return {
"status": "saved",
"document_id": document_id,
"message": "Document saved and will be reindexed in the background",
- "last_edited_at": document.last_edited_at.isoformat()
+ "last_edited_at": document.last_edited_at.isoformat(),
}
diff --git a/surfsense_backend/app/tasks/celery_tasks/blocknote_migration_tasks.py b/surfsense_backend/app/tasks/celery_tasks/blocknote_migration_tasks.py
index abac51a40..f9b7789b3 100644
--- a/surfsense_backend/app/tasks/celery_tasks/blocknote_migration_tasks.py
+++ b/surfsense_backend/app/tasks/celery_tasks/blocknote_migration_tasks.py
@@ -36,7 +36,7 @@ def populate_blocknote_for_documents_task(
):
"""
Celery task to populate blocknote_document for existing documents.
-
+
Args:
document_ids: Optional list of specific document IDs to process.
If None, processes all documents with blocknote_document IS NULL.
@@ -60,7 +60,7 @@ async def _populate_blocknote_for_documents(
):
"""
Async function to populate blocknote_document for documents.
-
+
Args:
document_ids: Optional list of specific document IDs to process
batch_size: Number of documents to process per batch
@@ -69,75 +69,83 @@ async def _populate_blocknote_for_documents(
try:
# Build query for documents that need blocknote_document populated
query = select(Document).where(Document.blocknote_document.is_(None))
-
+
# If specific document IDs provided, filter by them
if document_ids:
query = query.where(Document.id.in_(document_ids))
-
+
# Load chunks relationship to avoid N+1 queries
query = query.options(selectinload(Document.chunks))
-
+
# Execute query
result = await session.execute(query)
documents = result.scalars().all()
-
+
total_documents = len(documents)
logger.info(f"Found {total_documents} documents to process")
-
+
if total_documents == 0:
logger.info("No documents to process")
return
-
+
# Process documents in batches
processed = 0
failed = 0
-
+
for i in range(0, total_documents, batch_size):
batch = documents[i : i + batch_size]
- logger.info(f"Processing batch {i // batch_size + 1}: documents {i+1}-{min(i+batch_size, total_documents)}")
-
+ logger.info(
+ f"Processing batch {i // batch_size + 1}: documents {i + 1}-{min(i + batch_size, total_documents)}"
+ )
+
for document in batch:
try:
# Use preloaded chunks from selectinload - no need to query again
chunks = sorted(document.chunks, key=lambda c: c.id)
-
+
if not chunks:
logger.warning(
f"Document {document.id} ({document.title}) has no chunks, skipping"
)
failed += 1
continue
-
+
# Reconstruct markdown by concatenating chunk contents
- markdown_content = "\n\n".join(chunk.content for chunk in chunks)
-
+ markdown_content = "\n\n".join(
+ chunk.content for chunk in chunks
+ )
+
if not markdown_content or not markdown_content.strip():
logger.warning(
f"Document {document.id} ({document.title}) has empty markdown content, skipping"
)
failed += 1
continue
-
+
# Convert markdown to BlockNote JSON
- blocknote_json = await convert_markdown_to_blocknote(markdown_content)
-
+ blocknote_json = await convert_markdown_to_blocknote(
+ markdown_content
+ )
+
if not blocknote_json:
logger.warning(
f"Failed to convert markdown to BlockNote for document {document.id} ({document.title})"
)
failed += 1
continue
-
+
# Update document with blocknote_document (other fields already have correct defaults)
document.blocknote_document = blocknote_json
-
+
processed += 1
-
+
# Commit every batch_size documents to avoid long transactions
if processed % batch_size == 0:
await session.commit()
- logger.info(f"Committed batch: {processed} documents processed so far")
-
+ logger.info(
+ f"Committed batch: {processed} documents processed so far"
+ )
+
except Exception as e:
logger.error(
f"Error processing document {document.id} ({document.title}): {e}",
@@ -146,15 +154,15 @@ async def _populate_blocknote_for_documents(
failed += 1
# Continue with next document instead of failing entire batch
continue
-
+
# Commit remaining changes in the batch
await session.commit()
logger.info(f"Completed batch {i // batch_size + 1}")
-
+
logger.info(
f"Migration complete: {processed} documents processed, {failed} failed"
)
-
+
except Exception as e:
await session.rollback()
logger.error(f"Error in blocknote migration task: {e}", exc_info=True)
diff --git a/surfsense_backend/app/tasks/celery_tasks/document_reindex_tasks.py b/surfsense_backend/app/tasks/celery_tasks/document_reindex_tasks.py
index 93c33ce49..e969cc806 100644
--- a/surfsense_backend/app/tasks/celery_tasks/document_reindex_tasks.py
+++ b/surfsense_backend/app/tasks/celery_tasks/document_reindex_tasks.py
@@ -35,7 +35,7 @@ def get_celery_session_maker():
def reindex_document_task(self, document_id: int, user_id: str):
"""
Celery task to reindex a document after editing.
-
+
Args:
document_id: ID of document to reindex
user_id: ID of user who edited the document
@@ -62,66 +62,65 @@ async def _reindex_document(document_id: int, user_id: str):
.where(Document.id == document_id)
)
document = result.scalars().first()
-
+
if not document:
logger.error(f"Document {document_id} not found")
return
-
+
if not document.blocknote_document:
logger.warning(f"Document {document_id} has no BlockNote content")
return
-
+
logger.info(f"Reindexing document {document_id} ({document.title})")
-
+
# 1. Convert BlockNote → Markdown
markdown_content = await convert_blocknote_to_markdown(
document.blocknote_document
)
-
+
if not markdown_content:
logger.error(f"Failed to convert document {document_id} to markdown")
return
-
+
# 2. Delete old chunks explicitly
from app.db import Chunk
- await session.execute(
- delete(Chunk).where(Chunk.document_id == document_id)
- )
+
+ await session.execute(delete(Chunk).where(Chunk.document_id == document_id))
await session.flush() # Ensure old chunks are deleted
-
+
# 3. Create new chunks
new_chunks = await create_document_chunks(markdown_content)
-
+
# 4. Add new chunks to session
for chunk in new_chunks:
chunk.document_id = document_id
session.add(chunk)
-
+
logger.info(f"Created {len(new_chunks)} chunks for document {document_id}")
-
+
# 5. Regenerate summary
user_llm = await get_user_long_context_llm(
session, user_id, document.search_space_id
)
-
+
document_metadata = {
"title": document.title,
"document_type": document.document_type.value,
}
-
+
summary_content, summary_embedding = await generate_document_summary(
markdown_content, user_llm, document_metadata
)
-
+
# 6. Update document
document.content = summary_content
document.embedding = summary_embedding
document.content_needs_reindexing = False
-
+
await session.commit()
-
+
logger.info(f"Successfully reindexed document {document_id}")
-
+
except Exception as e:
await session.rollback()
logger.error(f"Error reindexing document {document_id}: {e}", exc_info=True)
From 4e6b4cc57dbd201a67ad97cf0621d03d35c3ab5b Mon Sep 17 00:00:00 2001
From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com>
Date: Sun, 30 Nov 2025 04:25:27 +0530
Subject: [PATCH 17/36] fixed: cleaned up code and added import in init
---
surfsense_backend/app/routes/__init__.py | 1 +
surfsense_backend/app/routes/editor_routes.py | 5 -----
.../app/tasks/celery_tasks/document_reindex_tasks.py | 7 +++----
3 files changed, 4 insertions(+), 9 deletions(-)
diff --git a/surfsense_backend/app/routes/__init__.py b/surfsense_backend/app/routes/__init__.py
index 10ddefe14..4b829fe84 100644
--- a/surfsense_backend/app/routes/__init__.py
+++ b/surfsense_backend/app/routes/__init__.py
@@ -24,6 +24,7 @@ router = APIRouter()
router.include_router(search_spaces_router)
router.include_router(rbac_router) # RBAC routes for roles, members, invites
+router.include_router(editor_router)
router.include_router(documents_router)
router.include_router(podcasts_router)
router.include_router(chats_router)
diff --git a/surfsense_backend/app/routes/editor_routes.py b/surfsense_backend/app/routes/editor_routes.py
index 5e2363836..0bc7ed50b 100644
--- a/surfsense_backend/app/routes/editor_routes.py
+++ b/surfsense_backend/app/routes/editor_routes.py
@@ -12,11 +12,6 @@ from sqlalchemy.ext.asyncio import AsyncSession
from app.db import Document, SearchSpace, User, get_async_session
from app.users import current_active_user
-# from app.utils.blocknote_converter import (
-# convert_blocknote_to_markdown,
-# convert_markdown_to_blocknote,
-# )
-
router = APIRouter()
diff --git a/surfsense_backend/app/tasks/celery_tasks/document_reindex_tasks.py b/surfsense_backend/app/tasks/celery_tasks/document_reindex_tasks.py
index e969cc806..8ab5309f2 100644
--- a/surfsense_backend/app/tasks/celery_tasks/document_reindex_tasks.py
+++ b/surfsense_backend/app/tasks/celery_tasks/document_reindex_tasks.py
@@ -2,21 +2,20 @@
import logging
-from sqlalchemy import select
+from sqlalchemy import delete, select
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
-from sqlalchemy.pool import NullPool
-from sqlalchemy import delete
from sqlalchemy.orm import selectinload
+from sqlalchemy.pool import NullPool
from app.celery_app import celery_app
from app.config import config
from app.db import Document
+from app.services.llm_service import get_user_long_context_llm
from app.utils.blocknote_converter import convert_blocknote_to_markdown
from app.utils.document_converters import (
create_document_chunks,
generate_document_summary,
)
-from app.services.llm_service import get_user_long_context_llm
logger = logging.getLogger(__name__)
From 5e53207edc3868cf94fb557adfb33fa88d05cc37 Mon Sep 17 00:00:00 2001
From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com>
Date: Sun, 30 Nov 2025 04:57:07 +0530
Subject: [PATCH 18/36] refactor: update alembic migration revision ID and
added some defaults for blocknote in `file_processors.py` file
---
...py => 43_add_blocknote_fields_to_documents.py} | 15 ++++++++++-----
.../tasks/document_processors/file_processors.py | 12 ++++++++++--
2 files changed, 20 insertions(+), 7 deletions(-)
rename surfsense_backend/alembic/versions/{38_add_blocknote_fields_to_documents.py => 43_add_blocknote_fields_to_documents.py} (85%)
diff --git a/surfsense_backend/alembic/versions/38_add_blocknote_fields_to_documents.py b/surfsense_backend/alembic/versions/43_add_blocknote_fields_to_documents.py
similarity index 85%
rename from surfsense_backend/alembic/versions/38_add_blocknote_fields_to_documents.py
rename to surfsense_backend/alembic/versions/43_add_blocknote_fields_to_documents.py
index d575f53ad..32e7780eb 100644
--- a/surfsense_backend/alembic/versions/38_add_blocknote_fields_to_documents.py
+++ b/surfsense_backend/alembic/versions/43_add_blocknote_fields_to_documents.py
@@ -1,8 +1,13 @@
-"""38_add_blocknote_fields_to_documents
+"""43_add_blocknote_fields_to_documents
-Revision ID: 38
-Revises: 37
+Revision ID: 43
+Revises: 42
+Create Date: 2025-11-30
+Adds fields for live document editing:
+- blocknote_document: JSONB editor state
+- content_needs_reindexing: Flag for regenerating chunks/summary
+- last_edited_at: Last edit timestamp
"""
from collections.abc import Sequence
@@ -13,8 +18,8 @@ from sqlalchemy.dialects import postgresql
from alembic import op
# revision identifiers, used by Alembic.
-revision: str = "38"
-down_revision: str | None = "37"
+revision: str = "43"
+down_revision: str | None = "42"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
diff --git a/surfsense_backend/app/tasks/document_processors/file_processors.py b/surfsense_backend/app/tasks/document_processors/file_processors.py
index 3b026b93e..4ae04e050 100644
--- a/surfsense_backend/app/tasks/document_processors/file_processors.py
+++ b/surfsense_backend/app/tasks/document_processors/file_processors.py
@@ -122,6 +122,8 @@ async def add_received_file_document_using_unstructured(
}
existing_document.chunks = chunks
existing_document.blocknote_document = blocknote_json
+ existing_document.content_needs_reindexing = False
+ existing_document.last_edited_at = None
await session.commit()
await session.refresh(existing_document)
@@ -142,6 +144,8 @@ async def add_received_file_document_using_unstructured(
content_hash=content_hash,
unique_identifier_hash=unique_identifier_hash,
blocknote_document=blocknote_json,
+ content_needs_reindexing=False,
+ last_edited_at=None,
)
session.add(document)
@@ -247,6 +251,8 @@ async def add_received_file_document_using_llamacloud(
}
existing_document.chunks = chunks
existing_document.blocknote_document = blocknote_json
+ existing_document.content_needs_reindexing = False
+ existing_document.last_edited_at = None
await session.commit()
await session.refresh(existing_document)
@@ -267,6 +273,8 @@ async def add_received_file_document_using_llamacloud(
content_hash=content_hash,
unique_identifier_hash=unique_identifier_hash,
blocknote_document=blocknote_json,
+ content_needs_reindexing=False,
+ last_edited_at=None,
)
session.add(document)
@@ -396,7 +404,7 @@ async def add_received_file_document_using_docling(
"ETL_SERVICE": "DOCLING",
}
existing_document.chunks = chunks
- existing_document.blocknote_document = None
+ existing_document.blocknote_document = blocknote_json
existing_document.content_needs_reindexing = False
existing_document.last_edited_at = None
@@ -418,7 +426,7 @@ async def add_received_file_document_using_docling(
chunks=chunks,
content_hash=content_hash,
unique_identifier_hash=unique_identifier_hash,
- blocknote_document=None,
+ blocknote_document=blocknote_json,
content_needs_reindexing=False,
last_edited_at=None,
)
From f92112a79ac8de40d956a11f7bdad7bfb5e0049e Mon Sep 17 00:00:00 2001
From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com>
Date: Sun, 30 Nov 2025 11:37:50 +0530
Subject: [PATCH 19/36] fix: code cleanup
---
.../app/tasks/celery_tasks/blocknote_migration_tasks.py | 3 +--
1 file changed, 1 insertion(+), 2 deletions(-)
diff --git a/surfsense_backend/app/tasks/celery_tasks/blocknote_migration_tasks.py b/surfsense_backend/app/tasks/celery_tasks/blocknote_migration_tasks.py
index f9b7789b3..c945bcb04 100644
--- a/surfsense_backend/app/tasks/celery_tasks/blocknote_migration_tasks.py
+++ b/surfsense_backend/app/tasks/celery_tasks/blocknote_migration_tasks.py
@@ -1,7 +1,6 @@
"""Celery tasks for populating blocknote_document for existing documents."""
import logging
-from typing import Any
from sqlalchemy import select
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
@@ -10,7 +9,7 @@ from sqlalchemy.pool import NullPool
from app.celery_app import celery_app
from app.config import config
-from app.db import Chunk, Document
+from app.db import Document
from app.utils.blocknote_converter import convert_markdown_to_blocknote
logger = logging.getLogger(__name__)
From d0c7be7eca0a9d3975c56dc6854a65b6a4b63ce2 Mon Sep 17 00:00:00 2001
From: "DESKTOP-RTLN3BA\\$punk"
Date: Sun, 30 Nov 2025 14:27:27 -0800
Subject: [PATCH 20/36] feat: update document editor routes to include search
space context
- Modified API endpoints to include search space ID in the URL for fetching and saving documents.
- Added permission checks for reading and updating documents based on search space.
---
surfsense_backend/app/routes/editor_routes.py | 48 +++++++++++++++----
.../editor/[documentId]/page.tsx | 4 +-
2 files changed, 40 insertions(+), 12 deletions(-)
diff --git a/surfsense_backend/app/routes/editor_routes.py b/surfsense_backend/app/routes/editor_routes.py
index 0bc7ed50b..0d7f6eba6 100644
--- a/surfsense_backend/app/routes/editor_routes.py
+++ b/surfsense_backend/app/routes/editor_routes.py
@@ -8,15 +8,20 @@ from typing import Any
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
+from sqlalchemy.orm import selectinload
-from app.db import Document, SearchSpace, User, get_async_session
+from app.db import Document, Permission, User, get_async_session
from app.users import current_active_user
+from app.utils.rbac import check_permission
router = APIRouter()
-@router.get("/documents/{document_id}/editor-content")
+@router.get(
+ "/search-spaces/{search_space_id}/documents/{document_id}/editor-content"
+)
async def get_editor_content(
+ search_space_id: int,
document_id: int,
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
@@ -26,14 +31,25 @@ async def get_editor_content(
Returns BlockNote JSON document. If blocknote_document is NULL,
attempts to generate it from chunks (lazy migration).
+
+ Requires DOCUMENTS_READ permission.
"""
- from sqlalchemy.orm import selectinload
+ # Check RBAC permission
+ await check_permission(
+ session,
+ user,
+ search_space_id,
+ Permission.DOCUMENTS_READ.value,
+ "You don't have permission to read documents in this search space",
+ )
result = await session.execute(
select(Document)
.options(selectinload(Document.chunks))
- .join(SearchSpace)
- .filter(Document.id == document_id, SearchSpace.user_id == user.id)
+ .filter(
+ Document.id == document_id,
+ Document.search_space_id == search_space_id,
+ )
)
document = result.scalars().first()
@@ -94,8 +110,9 @@ async def get_editor_content(
}
-@router.post("/documents/{document_id}/save")
+@router.post("/search-spaces/{search_space_id}/documents/{document_id}/save")
async def save_document(
+ search_space_id: int,
document_id: int,
data: dict[str, Any],
session: AsyncSession = Depends(get_async_session),
@@ -104,14 +121,25 @@ async def save_document(
"""
Save BlockNote document and trigger reindexing.
Called when user clicks 'Save & Exit'.
+
+ Requires DOCUMENTS_UPDATE permission.
"""
from app.tasks.celery_tasks.document_reindex_tasks import reindex_document_task
- # Verify ownership
+ # Check RBAC permission
+ await check_permission(
+ session,
+ user,
+ search_space_id,
+ Permission.DOCUMENTS_UPDATE.value,
+ "You don't have permission to update documents in this search space",
+ )
+
result = await session.execute(
- select(Document)
- .join(SearchSpace)
- .filter(Document.id == document_id, SearchSpace.user_id == user.id)
+ select(Document).filter(
+ Document.id == document_id,
+ Document.search_space_id == search_space_id,
+ )
)
document = result.scalars().first()
diff --git a/surfsense_web/app/dashboard/[search_space_id]/editor/[documentId]/page.tsx b/surfsense_web/app/dashboard/[search_space_id]/editor/[documentId]/page.tsx
index 544834372..71ce2fa2f 100644
--- a/surfsense_web/app/dashboard/[search_space_id]/editor/[documentId]/page.tsx
+++ b/surfsense_web/app/dashboard/[search_space_id]/editor/[documentId]/page.tsx
@@ -45,7 +45,7 @@ export default function EditorPage() {
try {
const response = await fetch(
- `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/documents/${documentId}/editor-content`,
+ `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-spaces/${params.search_space_id}/documents/${documentId}/editor-content`,
{
headers: {
Authorization: `Bearer ${token}`,
@@ -114,7 +114,7 @@ export default function EditorPage() {
try {
// Save blocknote_document and trigger reindexing in background
const response = await fetch(
- `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/documents/${documentId}/save`,
+ `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-spaces/${params.search_space_id}/documents/${documentId}/save`,
{
method: "POST",
headers: {
From ea94c778c912f6cbc55e2c3473bed8c0fd7eb1e5 Mon Sep 17 00:00:00 2001
From: "DESKTOP-RTLN3BA\\$punk"
Date: Sun, 30 Nov 2025 15:06:48 -0800
Subject: [PATCH 21/36] refactor: streamline document actions
- Simplified the RowActions component by removing unused imports and replacing the dropdown menu with tooltip buttons for editing, viewing metadata, and deleting documents.
- Updated the JsonMetadataViewer component to support controlled mode for better integration with the RowActions component.
- Adjusted CSS variables for destructive actions in globals.css for improved visual consistency.
---
surfsense_backend/app/routes/editor_routes.py | 4 +-
.../(manage)/components/RowActions.tsx | 174 +++++++++++-------
surfsense_web/app/globals.css | 6 +-
.../components/json-metadata-viewer.tsx | 27 ++-
4 files changed, 135 insertions(+), 76 deletions(-)
diff --git a/surfsense_backend/app/routes/editor_routes.py b/surfsense_backend/app/routes/editor_routes.py
index 0d7f6eba6..9beebfc8e 100644
--- a/surfsense_backend/app/routes/editor_routes.py
+++ b/surfsense_backend/app/routes/editor_routes.py
@@ -17,9 +17,7 @@ from app.utils.rbac import check_permission
router = APIRouter()
-@router.get(
- "/search-spaces/{search_space_id}/documents/{document_id}/editor-content"
-)
+@router.get("/search-spaces/{search_space_id}/documents/{document_id}/editor-content")
async def get_editor_content(
search_space_id: int,
document_id: int,
diff --git a/surfsense_web/app/dashboard/[search_space_id]/documents/(manage)/components/RowActions.tsx b/surfsense_web/app/dashboard/[search_space_id]/documents/(manage)/components/RowActions.tsx
index ea4c66228..1c4d440e7 100644
--- a/surfsense_web/app/dashboard/[search_space_id]/documents/(manage)/components/RowActions.tsx
+++ b/surfsense_web/app/dashboard/[search_space_id]/documents/(manage)/components/RowActions.tsx
@@ -1,6 +1,7 @@
"use client";
-import { FileText, MoreHorizontal, Pencil, Trash2 } from "lucide-react";
+import { FileText, Pencil, Trash2 } from "lucide-react";
+import { motion } from "motion/react";
import { useRouter } from "next/navigation";
import { useState } from "react";
import { toast } from "sonner";
@@ -13,16 +14,9 @@ import {
AlertDialogFooter,
AlertDialogHeader,
AlertDialogTitle,
- AlertDialogTrigger,
} from "@/components/ui/alert-dialog";
import { Button } from "@/components/ui/button";
-import {
- DropdownMenu,
- DropdownMenuContent,
- DropdownMenuItem,
- DropdownMenuSeparator,
- DropdownMenuTrigger,
-} from "@/components/ui/dropdown-menu";
+import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip";
import type { Document } from "./types";
export function RowActions({
@@ -36,7 +30,8 @@ export function RowActions({
refreshDocuments: () => Promise;
searchSpaceId: string;
}) {
- const [isOpen, setIsOpen] = useState(false);
+ const [isDeleteOpen, setIsDeleteOpen] = useState(false);
+ const [isMetadataOpen, setIsMetadataOpen] = useState(false);
const [isDeleting, setIsDeleting] = useState(false);
const router = useRouter();
@@ -52,7 +47,7 @@ export function RowActions({
toast.error("Failed to delete document");
} finally {
setIsDeleting(false);
- setIsOpen(false);
+ setIsDeleteOpen(false);
}
};
@@ -61,64 +56,105 @@ export function RowActions({
};
return (
-
-
-
-
- Open menu
-
-
-
-
-
-
- Edit Document
-
-
- e.preventDefault()}>
-
- View Metadata
-
- }
- />
-
-
-
- {
- e.preventDefault();
- setIsOpen(true);
- }}
- >
-
- Delete
-
-
-
-
- Are you sure?
-
-
- Cancel
- {
- e.preventDefault();
- handleDelete();
- }}
- disabled={isDeleting}
- >
- {isDeleting ? "Deleting..." : "Delete"}
-
-
-
-
-
-
+
+ {/* Edit Button */}
+
+
+
+
+
+ Edit Document
+
+
+
+
+ Edit Document
+
+
+
+ {/* View Metadata Button */}
+
+
+
+ setIsMetadataOpen(true)}
+ >
+
+ View Metadata
+
+
+
+
+ View Metadata
+
+
+
+
+ {/* Delete Button */}
+
+
+
+ setIsDeleteOpen(true)}
+ disabled={isDeleting}
+ >
+
+ Delete
+
+
+
+
+ Delete
+
+
+
+
+
+ Are you sure?
+
+
+ Cancel
+ {
+ e.preventDefault();
+ handleDelete();
+ }}
+ disabled={isDeleting}
+ className="bg-destructive text-destructive-foreground hover:bg-destructive/90"
+ >
+ {isDeleting ? "Deleting..." : "Delete"}
+
+
+
+
);
}
diff --git a/surfsense_web/app/globals.css b/surfsense_web/app/globals.css
index a1ee277c6..5aee982bb 100644
--- a/surfsense_web/app/globals.css
+++ b/surfsense_web/app/globals.css
@@ -27,7 +27,7 @@
--accent: oklch(0.97 0 0);
--accent-foreground: oklch(0.205 0 0);
--destructive: oklch(0.577 0.245 27.325);
- --destructive-foreground: oklch(0.577 0.245 27.325);
+ --destructive-foreground: oklch(0.985 0 0);
--border: oklch(0.922 0 0);
--input: oklch(0.922 0 0);
--ring: oklch(0.708 0 0);
@@ -63,8 +63,8 @@
--muted-foreground: oklch(0.708 0 0);
--accent: oklch(0.269 0 0);
--accent-foreground: oklch(0.985 0 0);
- --destructive: oklch(0.396 0.141 25.723);
- --destructive-foreground: oklch(0.637 0.237 25.331);
+ --destructive: oklch(0.577 0.245 27.325);
+ --destructive-foreground: oklch(0.985 0 0);
--border: oklch(0.269 0 0);
--input: oklch(0.269 0 0);
--ring: oklch(0.439 0 0);
diff --git a/surfsense_web/components/json-metadata-viewer.tsx b/surfsense_web/components/json-metadata-viewer.tsx
index 11dd71581..8fe1b10ae 100644
--- a/surfsense_web/components/json-metadata-viewer.tsx
+++ b/surfsense_web/components/json-metadata-viewer.tsx
@@ -15,9 +15,17 @@ interface JsonMetadataViewerProps {
title: string;
metadata: any;
trigger?: React.ReactNode;
+ open?: boolean;
+ onOpenChange?: (open: boolean) => void;
}
-export function JsonMetadataViewer({ title, metadata, trigger }: JsonMetadataViewerProps) {
+export function JsonMetadataViewer({
+ title,
+ metadata,
+ trigger,
+ open,
+ onOpenChange,
+}: JsonMetadataViewerProps) {
// Ensure metadata is a valid object
const jsonData = React.useMemo(() => {
if (!metadata) return {};
@@ -35,6 +43,23 @@ export function JsonMetadataViewer({ title, metadata, trigger }: JsonMetadataVie
}
}, [metadata]);
+ // Controlled mode: when open and onOpenChange are provided
+ if (open !== undefined && onOpenChange !== undefined) {
+ return (
+
+
+
+ {title} - Metadata
+
+
+
+
+
+
+ );
+ }
+
+ // Uncontrolled mode: when using trigger
return (
From 5587b1270e0fddcd312411094a534faf14184699 Mon Sep 17 00:00:00 2001
From: "DESKTOP-RTLN3BA\\$punk"
Date: Sun, 30 Nov 2025 15:15:27 -0800
Subject: [PATCH 22/36] fix: update API endpoint for fetching editor content
- Changed the API URL in the DashboardBreadcrumb component to include search space ID for fetching editor content, ensuring correct context is used when accessing documents.
---
surfsense_web/components/dashboard-breadcrumb.tsx | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/surfsense_web/components/dashboard-breadcrumb.tsx b/surfsense_web/components/dashboard-breadcrumb.tsx
index a05368eba..76abe3817 100644
--- a/surfsense_web/components/dashboard-breadcrumb.tsx
+++ b/surfsense_web/components/dashboard-breadcrumb.tsx
@@ -46,7 +46,7 @@ export function DashboardBreadcrumb() {
if (token) {
fetch(
- `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/documents/${documentId}/editor-content`,
+ `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-spaces/${searchSpaceId}/documents/${documentId}/editor-content`,
{
headers: {
Authorization: `Bearer ${token}`,
From 9c858a520d113f47e1f2268c851595a0ee1ae5f8 Mon Sep 17 00:00:00 2001
From: "DESKTOP-RTLN3BA\\$punk"
Date: Tue, 2 Dec 2025 00:05:28 -0800
Subject: [PATCH 23/36] refactor: enhance onboarding experience
- Replaced the completion step component with a more streamlined onboarding page that includes action cards for managing teams, adding sources, and starting chats.
- Updated the search space form to make the description field optional and improved validation messages.
- Adjusted the onboarding logic to support auto-configuration of LLM roles and improved loading states.
- Removed unused imports and components to clean up the codebase.
---
.../[search_space_id]/onboard/page.tsx | 413 +++++++++---------
.../app/dashboard/searchspaces/page.tsx | 10 +-
.../components/onboard/completion-step.tsx | 208 ---------
surfsense_web/components/onboard/index.ts | 8 +
.../onboard/onboard-action-card.tsx | 114 +++++
.../onboard/onboard-advanced-settings.tsx | 144 ++++++
.../components/onboard/onboard-header.tsx | 56 +++
.../components/onboard/onboard-llm-setup.tsx | 93 ++++
.../components/onboard/onboard-loading.tsx | 47 ++
.../components/onboard/onboard-stats.tsx | 38 ++
.../components/search-space-form.tsx | 12 +-
11 files changed, 715 insertions(+), 428 deletions(-)
delete mode 100644 surfsense_web/components/onboard/completion-step.tsx
create mode 100644 surfsense_web/components/onboard/index.ts
create mode 100644 surfsense_web/components/onboard/onboard-action-card.tsx
create mode 100644 surfsense_web/components/onboard/onboard-advanced-settings.tsx
create mode 100644 surfsense_web/components/onboard/onboard-header.tsx
create mode 100644 surfsense_web/components/onboard/onboard-llm-setup.tsx
create mode 100644 surfsense_web/components/onboard/onboard-loading.tsx
create mode 100644 surfsense_web/components/onboard/onboard-stats.tsx
diff --git a/surfsense_web/app/dashboard/[search_space_id]/onboard/page.tsx b/surfsense_web/app/dashboard/[search_space_id]/onboard/page.tsx
index 1588743e8..150cd772c 100644
--- a/surfsense_web/app/dashboard/[search_space_id]/onboard/page.tsx
+++ b/surfsense_web/app/dashboard/[search_space_id]/onboard/page.tsx
@@ -1,21 +1,19 @@
"use client";
-import { ArrowLeft, ArrowRight, Bot, CheckCircle, MessageSquare, Sparkles } from "lucide-react";
-import { AnimatePresence, motion } from "motion/react";
+import { FileText, MessageSquare, UserPlus, Users } from "lucide-react";
+import { motion } from "motion/react";
import { useParams, useRouter } from "next/navigation";
import { useTranslations } from "next-intl";
-import { useEffect, useRef, useState } from "react";
-import { Logo } from "@/components/Logo";
-import { CompletionStep } from "@/components/onboard/completion-step";
-import { SetupLLMStep } from "@/components/onboard/setup-llm-step";
-import { SetupPromptStep } from "@/components/onboard/setup-prompt-step";
-import { Button } from "@/components/ui/button";
-import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card";
-import { Progress } from "@/components/ui/progress";
+import { useCallback, useEffect, useRef, useState } from "react";
+import { toast } from "sonner";
+import { OnboardActionCard } from "@/components/onboard/onboard-action-card";
+import { OnboardAdvancedSettings } from "@/components/onboard/onboard-advanced-settings";
+import { OnboardHeader } from "@/components/onboard/onboard-header";
+import { OnboardLLMSetup } from "@/components/onboard/onboard-llm-setup";
+import { OnboardLoading } from "@/components/onboard/onboard-loading";
+import { OnboardStats } from "@/components/onboard/onboard-stats";
import { useGlobalLLMConfigs, useLLMConfigs, useLLMPreferences } from "@/hooks/use-llm-configs";
-const TOTAL_STEPS = 3;
-
const OnboardPage = () => {
const t = useTranslations("onboard");
const router = useRouter();
@@ -28,10 +26,17 @@ const OnboardPage = () => {
preferences,
loading: preferencesLoading,
isOnboardingComplete,
+ updatePreferences,
refreshPreferences,
} = useLLMPreferences(searchSpaceId);
- const [currentStep, setCurrentStep] = useState(1);
- const [hasUserProgressed, setHasUserProgressed] = useState(false);
+
+ const [isAutoConfiguring, setIsAutoConfiguring] = useState(false);
+ const [autoConfigComplete, setAutoConfigComplete] = useState(false);
+ const [showAdvancedSettings, setShowAdvancedSettings] = useState(false);
+ const [showPromptSettings, setShowPromptSettings] = useState(false);
+
+ // Track if we've already attempted auto-configuration
+ const hasAttemptedAutoConfig = useRef(false);
// Track if onboarding was complete on initial mount
const wasCompleteOnMount = useRef(null);
@@ -59,231 +64,215 @@ const OnboardPage = () => {
}
}, [preferencesLoading, configsLoading, globalConfigsLoading, isOnboardingComplete]);
- // Track if user has progressed beyond step 1
+ // Redirect to dashboard if onboarding was already complete
useEffect(() => {
- if (currentStep > 1) {
- setHasUserProgressed(true);
- }
- }, [currentStep]);
-
- // Redirect to dashboard if onboarding was already complete on mount (not during this session)
- useEffect(() => {
- // Only redirect if:
- // 1. Onboarding was complete when page loaded
- // 2. User hasn't progressed past step 1
- // 3. All data is loaded
if (
wasCompleteOnMount.current === true &&
- !hasUserProgressed &&
!preferencesLoading &&
!configsLoading &&
!globalConfigsLoading
) {
- // Small delay to ensure the check is stable on initial load
const timer = setTimeout(() => {
router.push(`/dashboard/${searchSpaceId}`);
}, 300);
return () => clearTimeout(timer);
}
- }, [
- hasUserProgressed,
- preferencesLoading,
- configsLoading,
- globalConfigsLoading,
- router,
- searchSpaceId,
- ]);
+ }, [preferencesLoading, configsLoading, globalConfigsLoading, router, searchSpaceId]);
- const progress = (currentStep / TOTAL_STEPS) * 100;
-
- const stepTitles = [t("setup_llm_configuration"), "Configure AI Responses", t("setup_complete")];
-
- const stepDescriptions = [
- t("configure_providers_and_assign_roles"),
- "Customize how the AI responds to your queries (Optional)",
- t("all_set"),
- ];
-
- // User can proceed to step 2 if all roles are assigned
- const canProceedToStep2 =
- !preferencesLoading &&
- preferences.long_context_llm_id &&
- preferences.fast_llm_id &&
- preferences.strategic_llm_id;
-
- // User can always proceed from step 2 to step 3 (prompt config is optional)
- const canProceedToStep3 = true;
-
- const handleNext = () => {
- if (currentStep < TOTAL_STEPS) {
- setCurrentStep(currentStep + 1);
+ // Auto-configure LLM roles if global configs are available
+ const autoConfigureLLMs = useCallback(async () => {
+ if (hasAttemptedAutoConfig.current) return;
+ if (globalConfigs.length === 0) return;
+ if (isOnboardingComplete()) {
+ setAutoConfigComplete(true);
+ return;
}
- };
- const handlePrevious = () => {
- if (currentStep > 1) {
- setCurrentStep(currentStep - 1);
+ hasAttemptedAutoConfig.current = true;
+ setIsAutoConfiguring(true);
+
+ try {
+ const allConfigs = [...globalConfigs, ...llmConfigs];
+
+ if (allConfigs.length === 0) {
+ setIsAutoConfiguring(false);
+ return;
+ }
+
+ // Use first available config for all roles
+ const defaultConfigId = allConfigs[0].id;
+
+ const newPreferences = {
+ long_context_llm_id: defaultConfigId,
+ fast_llm_id: defaultConfigId,
+ strategic_llm_id: defaultConfigId,
+ };
+
+ const success = await updatePreferences(newPreferences);
+
+ if (success) {
+ await refreshPreferences();
+ setAutoConfigComplete(true);
+ toast.success("AI models configured automatically!", {
+ description: "You can customize these in advanced settings.",
+ });
+ }
+ } catch (error) {
+ console.error("Auto-configuration failed:", error);
+ } finally {
+ setIsAutoConfiguring(false);
}
- };
+ }, [globalConfigs, llmConfigs, isOnboardingComplete, updatePreferences, refreshPreferences]);
- if (configsLoading || preferencesLoading || globalConfigsLoading) {
+ // Trigger auto-configuration once data is loaded
+ useEffect(() => {
+ if (!configsLoading && !globalConfigsLoading && !preferencesLoading) {
+ autoConfigureLLMs();
+ }
+ }, [configsLoading, globalConfigsLoading, preferencesLoading, autoConfigureLLMs]);
+
+ const allConfigs = [...globalConfigs, ...llmConfigs];
+ const isReady = autoConfigComplete || isOnboardingComplete();
+
+ // Loading state
+ if (configsLoading || preferencesLoading || globalConfigsLoading || isAutoConfiguring) {
return (
-
-
-
-
- {t("loading_config")}
-
-
-
+
);
}
+ // No configs available - show LLM setup
+ if (allConfigs.length === 0) {
+ return (
+
+ );
+ }
+
+ // Main onboarding view
return (
-
-
- {/* Header */}
-
-
-
-
{t("welcome_title")}
-
-
{t("welcome_subtitle")}
-
+
+
+
+ {/* Header */}
+
- {/* Progress */}
-
-
-
-
- {t("step_of", { current: currentStep, total: TOTAL_STEPS })}
-
-
- {t("percent_complete", { percent: Math.round(progress) })}
-
-
-
-
- {Array.from({ length: TOTAL_STEPS }, (_, i) => {
- const stepNum = i + 1;
- const isCompleted = stepNum < currentStep;
- const isCurrent = stepNum === currentStep;
+ {/* Quick Stats */}
+
- return (
-
-
- {isCompleted ? : stepNum}
-
-
-
- );
- })}
-
-
-
+ {/* Action Cards */}
+
+ router.push(`/dashboard/${searchSpaceId}/team`)}
+ colorScheme="emerald"
+ delay={0.7}
+ />
- {/* Step Content */}
-
-
-
- {currentStep === 1 && }
- {currentStep === 2 && }
- {currentStep === 3 && }
- {stepTitles[currentStep - 1]}
-
-
- {stepDescriptions[currentStep - 1]}
-
-
-
-
-
- {currentStep === 1 && (
-
- )}
- {currentStep === 2 && (
-
- )}
- {currentStep === 3 && }
-
-
-
-
+ router.push(`/dashboard/${searchSpaceId}/sources/add`)}
+ colorScheme="blue"
+ delay={0.8}
+ />
- {/* Navigation */}
-
- {currentStep === 1 ? (
- <>
-
-
router.push(`/dashboard/${searchSpaceId}/researcher`)}
+ colorScheme="violet"
+ delay={0.9}
+ />
+
+
+ {/* Advanced Settings */}
+
+
+ {/* Footer */}
+
+
+ You can always adjust these settings later in{" "}
+ router.push(`/dashboard/${searchSpaceId}/settings`)}
+ className="text-primary hover:underline underline-offset-2 transition-colors"
>
- {t("next")}
-
-
- >
- ) : currentStep === 2 ? (
- <>
-
-
- {t("previous")}
-
- {/* Next button is handled by SetupPromptStep component */}
-
- >
- ) : (
- <>
-
-
- {t("previous")}
-
-
- >
- )}
-
-
+ Settings
+
+
+
+
+
);
};
diff --git a/surfsense_web/app/dashboard/searchspaces/page.tsx b/surfsense_web/app/dashboard/searchspaces/page.tsx
index 598536c1b..5d18195af 100644
--- a/surfsense_web/app/dashboard/searchspaces/page.tsx
+++ b/surfsense_web/app/dashboard/searchspaces/page.tsx
@@ -6,7 +6,7 @@ import { toast } from "sonner";
import { SearchSpaceForm } from "@/components/search-space-form";
export default function SearchSpacesPage() {
const router = useRouter();
- const handleCreateSearchSpace = async (data: { name: string; description: string }) => {
+ const handleCreateSearchSpace = async (data: { name: string; description?: string }) => {
try {
const response = await fetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces`,
@@ -16,7 +16,10 @@ export default function SearchSpacesPage() {
"Content-Type": "application/json",
Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
},
- body: JSON.stringify(data),
+ body: JSON.stringify({
+ name: data.name,
+ description: data.description || "",
+ }),
}
);
@@ -31,7 +34,8 @@ export default function SearchSpacesPage() {
description: `"${data.name}" has been created.`,
});
- router.push(`/dashboard`);
+ // Redirect to the newly created search space's onboarding
+ router.push(`/dashboard/${result.id}/onboard`);
return result;
} catch (error) {
diff --git a/surfsense_web/components/onboard/completion-step.tsx b/surfsense_web/components/onboard/completion-step.tsx
deleted file mode 100644
index a8dbbd76c..000000000
--- a/surfsense_web/components/onboard/completion-step.tsx
+++ /dev/null
@@ -1,208 +0,0 @@
-"use client";
-
-import {
- ArrowRight,
- Bot,
- Brain,
- CheckCircle,
- FileText,
- MessageSquare,
- Sparkles,
- UserPlus,
- Users,
- Zap,
-} from "lucide-react";
-import { motion } from "motion/react";
-import { useRouter } from "next/navigation";
-import { Badge } from "@/components/ui/badge";
-import { Button } from "@/components/ui/button";
-import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card";
-import { useGlobalLLMConfigs, useLLMConfigs, useLLMPreferences } from "@/hooks/use-llm-configs";
-
-interface CompletionStepProps {
- searchSpaceId: number;
-}
-
-export function CompletionStep({ searchSpaceId }: CompletionStepProps) {
- const router = useRouter();
- const { llmConfigs } = useLLMConfigs(searchSpaceId);
- const { globalConfigs } = useGlobalLLMConfigs();
- const { preferences } = useLLMPreferences(searchSpaceId);
-
- // Combine global and user-specific configs
- const allConfigs = [...globalConfigs, ...llmConfigs];
-
- const assignedConfigs = {
- long_context: allConfigs.find((c) => c.id === preferences.long_context_llm_id),
- fast: allConfigs.find((c) => c.id === preferences.fast_llm_id),
- strategic: allConfigs.find((c) => c.id === preferences.strategic_llm_id),
- };
-
- return (
-
- {/* Next Steps - What would you like to do? */}
-
-
-
What would you like to do next?
-
Choose an option to continue
-
-
-
- {/* Manage Team Card */}
-
-
-
-
-
-
-
- Manage Team
-
- Invite team members and collaborate on your search space
-
-
-
-
-
-
- Invite team members
-
-
-
- Assign roles & permissions
-
-
-
- Collaborate together
-
-
- router.push(`/dashboard/${searchSpaceId}/team`)}
- >
- Manage Team
-
-
-
-
-
-
- {/* Add Sources Card */}
-
-
-
-
-
-
-
- Add Sources
-
- Connect your data sources to start building your knowledge base
-
-
-
-
-
-
- Connect documents and files
-
-
-
- Import from various sources
-
-
-
- Build your knowledge base
-
-
- router.push(`/dashboard/${searchSpaceId}/sources/add`)}
- >
- Add Sources
-
-
-
-
-
-
- {/* Start Chatting Card */}
-
-
-
-
-
-
-
- Start Chatting
-
- Jump right into the AI researcher and start asking questions
-
-
-
-
-
-
- AI-powered conversations
-
-
-
- Research and explore topics
-
-
-
- Get instant insights
-
-
- router.push(`/dashboard/${searchSpaceId}/researcher`)}
- >
- Start Chatting
-
-
-
-
-
-
-
- {/* Quick Stats */}
-
-
- ✓ {allConfigs.length} LLM provider{allConfigs.length > 1 ? "s" : ""} available
-
- {globalConfigs.length > 0 && (
- ✓ {globalConfigs.length} Global config(s)
- )}
- {llmConfigs.length > 0 && (
- ✓ {llmConfigs.length} Custom config(s)
- )}
- ✓ All roles assigned
- ✓ Ready to use
-
-
-
- );
-}
diff --git a/surfsense_web/components/onboard/index.ts b/surfsense_web/components/onboard/index.ts
new file mode 100644
index 000000000..607ba4e7d
--- /dev/null
+++ b/surfsense_web/components/onboard/index.ts
@@ -0,0 +1,8 @@
+export { OnboardActionCard } from "./onboard-action-card";
+export { OnboardAdvancedSettings } from "./onboard-advanced-settings";
+export { OnboardHeader } from "./onboard-header";
+export { OnboardLLMSetup } from "./onboard-llm-setup";
+export { OnboardLoading } from "./onboard-loading";
+export { OnboardStats } from "./onboard-stats";
+export { SetupLLMStep } from "./setup-llm-step";
+export { SetupPromptStep } from "./setup-prompt-step";
diff --git a/surfsense_web/components/onboard/onboard-action-card.tsx b/surfsense_web/components/onboard/onboard-action-card.tsx
new file mode 100644
index 000000000..c6bb41dbf
--- /dev/null
+++ b/surfsense_web/components/onboard/onboard-action-card.tsx
@@ -0,0 +1,114 @@
+"use client";
+
+import { ArrowRight, CheckCircle, type LucideIcon } from "lucide-react";
+import { motion } from "motion/react";
+import { Button } from "@/components/ui/button";
+import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card";
+import { cn } from "@/lib/utils";
+
+interface OnboardActionCardProps {
+ title: string;
+ description: string;
+ icon: LucideIcon;
+ features: string[];
+ buttonText: string;
+ onClick: () => void;
+ colorScheme: "emerald" | "blue" | "violet";
+ delay?: number;
+}
+
+const colorSchemes = {
+ emerald: {
+ iconBg: "bg-emerald-500/10 dark:bg-emerald-500/20",
+ iconRing: "ring-emerald-500/20 dark:ring-emerald-500/30",
+ iconColor: "text-emerald-600 dark:text-emerald-400",
+ checkColor: "text-emerald-500",
+ buttonBg: "bg-emerald-600 hover:bg-emerald-500",
+ hoverBorder: "hover:border-emerald-500/50",
+ },
+ blue: {
+ iconBg: "bg-blue-500/10 dark:bg-blue-500/20",
+ iconRing: "ring-blue-500/20 dark:ring-blue-500/30",
+ iconColor: "text-blue-600 dark:text-blue-400",
+ checkColor: "text-blue-500",
+ buttonBg: "bg-blue-600 hover:bg-blue-500",
+ hoverBorder: "hover:border-blue-500/50",
+ },
+ violet: {
+ iconBg: "bg-violet-500/10 dark:bg-violet-500/20",
+ iconRing: "ring-violet-500/20 dark:ring-violet-500/30",
+ iconColor: "text-violet-600 dark:text-violet-400",
+ checkColor: "text-violet-500",
+ buttonBg: "bg-violet-600 hover:bg-violet-500",
+ hoverBorder: "hover:border-violet-500/50",
+ },
+};
+
+export function OnboardActionCard({
+ title,
+ description,
+ icon: Icon,
+ features,
+ buttonText,
+ onClick,
+ colorScheme,
+ delay = 0,
+}: OnboardActionCardProps) {
+ const colors = colorSchemes[colorScheme];
+
+ return (
+
+
+
+
+
+
+ {title}
+ {description}
+
+
+
+
+ {features.map((feature, index) => (
+
+
+ {feature}
+
+ ))}
+
+
+
+ {buttonText}
+
+
+
+
+
+ );
+}
diff --git a/surfsense_web/components/onboard/onboard-advanced-settings.tsx b/surfsense_web/components/onboard/onboard-advanced-settings.tsx
new file mode 100644
index 000000000..b2b9c5080
--- /dev/null
+++ b/surfsense_web/components/onboard/onboard-advanced-settings.tsx
@@ -0,0 +1,144 @@
+"use client";
+
+import { ChevronDown, MessageSquare, Settings2 } from "lucide-react";
+import { AnimatePresence, motion } from "motion/react";
+import { SetupLLMStep } from "@/components/onboard/setup-llm-step";
+import { SetupPromptStep } from "@/components/onboard/setup-prompt-step";
+import { Card, CardContent } from "@/components/ui/card";
+import { Collapsible, CollapsibleContent, CollapsibleTrigger } from "@/components/ui/collapsible";
+import { cn } from "@/lib/utils";
+
+interface OnboardAdvancedSettingsProps {
+ searchSpaceId: number;
+ showLLMSettings: boolean;
+ setShowLLMSettings: (show: boolean) => void;
+ showPromptSettings: boolean;
+ setShowPromptSettings: (show: boolean) => void;
+ onConfigCreated: () => void;
+ onConfigDeleted: () => void;
+ onPreferencesUpdated: () => Promise;
+}
+
+export function OnboardAdvancedSettings({
+ searchSpaceId,
+ showLLMSettings,
+ setShowLLMSettings,
+ showPromptSettings,
+ setShowPromptSettings,
+ onConfigCreated,
+ onConfigDeleted,
+ onPreferencesUpdated,
+}: OnboardAdvancedSettingsProps) {
+ return (
+
+ {/* LLM Configuration */}
+
+
+
+
+
+
+
+
+
+
+
LLM Configuration
+
+ Customize AI models and role assignments
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {showLLMSettings && (
+
+
+
+
+
+
+
+ )}
+
+
+
+
+ {/* Prompt Configuration */}
+
+
+
+
+
+
+
+
+
+
+
AI Response Settings
+
+ Configure citations and custom instructions (Optional)
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {showPromptSettings && (
+
+
+
+ setShowPromptSettings(false)}
+ />
+
+
+
+ )}
+
+
+
+
+ );
+}
diff --git a/surfsense_web/components/onboard/onboard-header.tsx b/surfsense_web/components/onboard/onboard-header.tsx
new file mode 100644
index 000000000..d84bb5adc
--- /dev/null
+++ b/surfsense_web/components/onboard/onboard-header.tsx
@@ -0,0 +1,56 @@
+"use client";
+
+import { CheckCircle } from "lucide-react";
+import { motion } from "motion/react";
+import { Logo } from "@/components/Logo";
+import { Badge } from "@/components/ui/badge";
+
+interface OnboardHeaderProps {
+ title: string;
+ subtitle: string;
+ isReady?: boolean;
+}
+
+export function OnboardHeader({ title, subtitle, isReady }: OnboardHeaderProps) {
+ return (
+
+
+
+
+
+
+ {title}
+ {subtitle}
+
+
+ {isReady && (
+
+
+
+ AI Configuration Complete
+
+
+ )}
+
+ );
+}
diff --git a/surfsense_web/components/onboard/onboard-llm-setup.tsx b/surfsense_web/components/onboard/onboard-llm-setup.tsx
new file mode 100644
index 000000000..b0b2d3fac
--- /dev/null
+++ b/surfsense_web/components/onboard/onboard-llm-setup.tsx
@@ -0,0 +1,93 @@
+"use client";
+
+import { Bot } from "lucide-react";
+import { motion } from "motion/react";
+import { Logo } from "@/components/Logo";
+import { SetupLLMStep } from "@/components/onboard/setup-llm-step";
+import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card";
+
+interface OnboardLLMSetupProps {
+ searchSpaceId: number;
+ title: string;
+ configTitle: string;
+ configDescription: string;
+ onConfigCreated: () => void;
+ onConfigDeleted: () => void;
+ onPreferencesUpdated: () => Promise;
+}
+
+export function OnboardLLMSetup({
+ searchSpaceId,
+ title,
+ configTitle,
+ configDescription,
+ onConfigCreated,
+ onConfigDeleted,
+ onPreferencesUpdated,
+}: OnboardLLMSetupProps) {
+ return (
+
+
+ {/* Header */}
+
+
+
+
+
+ {title}
+
+
+ Configure your AI model to get started
+
+
+
+ {/* LLM Setup Card */}
+
+
+
+
+ {configDescription}
+
+
+
+
+
+
+
+
+ );
+}
diff --git a/surfsense_web/components/onboard/onboard-loading.tsx b/surfsense_web/components/onboard/onboard-loading.tsx
new file mode 100644
index 000000000..4a85736d2
--- /dev/null
+++ b/surfsense_web/components/onboard/onboard-loading.tsx
@@ -0,0 +1,47 @@
+"use client";
+
+import { Wand2 } from "lucide-react";
+import { motion } from "motion/react";
+
+interface OnboardLoadingProps {
+ title: string;
+ subtitle: string;
+}
+
+export function OnboardLoading({ title, subtitle }: OnboardLoadingProps) {
+ return (
+
+
+
+
+
+
+
+ {title}
+ {subtitle}
+
+ {[0, 1, 2].map((i) => (
+
+ ))}
+
+
+
+ );
+}
diff --git a/surfsense_web/components/onboard/onboard-stats.tsx b/surfsense_web/components/onboard/onboard-stats.tsx
new file mode 100644
index 000000000..0918c74e2
--- /dev/null
+++ b/surfsense_web/components/onboard/onboard-stats.tsx
@@ -0,0 +1,38 @@
+"use client";
+
+import { Bot, Brain, Sparkles } from "lucide-react";
+import { motion } from "motion/react";
+import { Badge } from "@/components/ui/badge";
+
+interface OnboardStatsProps {
+ globalConfigsCount: number;
+ userConfigsCount: number;
+}
+
+export function OnboardStats({ globalConfigsCount, userConfigsCount }: OnboardStatsProps) {
+ return (
+
+ {globalConfigsCount > 0 && (
+
+
+ {globalConfigsCount} Global Model{globalConfigsCount > 1 ? "s" : ""}
+
+ )}
+ {userConfigsCount > 0 && (
+
+
+ {userConfigsCount} Custom Config{userConfigsCount > 1 ? "s" : ""}
+
+ )}
+
+
+ All Roles Assigned
+
+
+ );
+}
diff --git a/surfsense_web/components/search-space-form.tsx b/surfsense_web/components/search-space-form.tsx
index 79102dbcf..ccb290dc8 100644
--- a/surfsense_web/components/search-space-form.tsx
+++ b/surfsense_web/components/search-space-form.tsx
@@ -36,19 +36,19 @@ import { cn } from "@/lib/utils";
// Define the form schema with Zod
const searchSpaceFormSchema = z.object({
- name: z.string().min(3, "Name is required"),
- description: z.string().min(10, "Description is required"),
+ name: z.string().min(3, "Name must be at least 3 characters"),
+ description: z.string().optional(),
});
// Define the type for the form values
type SearchSpaceFormValues = z.infer;
interface SearchSpaceFormProps {
- onSubmit?: (data: { name: string; description: string }) => void;
+ onSubmit?: (data: { name: string; description?: string }) => void;
onDelete?: () => void;
className?: string;
isEditing?: boolean;
- initialData?: { name: string; description: string };
+ initialData?: { name: string; description?: string };
}
export function SearchSpaceForm({
@@ -229,7 +229,9 @@ export function SearchSpaceForm({
name="description"
render={({ field }) => (
- Description
+
+ Description (optional)
+
From b2a97b39ce33a5906a339ac9394da6c73e83d507 Mon Sep 17 00:00:00 2001
From: "DESKTOP-RTLN3BA\\$punk"
Date: Tue, 2 Dec 2025 01:24:09 -0800
Subject: [PATCH 24/36] refactor: centralize authentication handling
- Replaced direct localStorage token access with a centralized `getBearerToken` function across various components and hooks to improve code maintainability and security.
- Updated API calls to use `authenticatedFetch` for consistent authentication handling.
- Enhanced user experience by ensuring proper redirection to login when authentication fails.
- Cleaned up unused imports and improved overall code structure for better readability.
---
.../add/airtable-connector/page.tsx | 10 +-
.../connectors/add/github-connector/page.tsx | 13 +-
.../add/google-calendar-connector/page.tsx | 10 +-
.../add/google-gmail-connector/page.tsx | 10 +-
.../editor/[documentId]/page.tsx | 31 ++--
.../[search_space_id]/onboard/page.tsx | 8 +-
surfsense_web/app/dashboard/layout.tsx | 10 +-
surfsense_web/app/dashboard/page.tsx | 10 +-
.../app/dashboard/searchspaces/page.tsx | 9 +-
.../app/invite/[invite_code]/page.tsx | 8 +-
.../atoms/chats/chat-mutation.atoms.ts | 7 +-
surfsense_web/atoms/chats/chat-query.atoms.ts | 5 +-
.../atoms/podcasts/podcast-mutation.atoms.ts | 5 +-
surfsense_web/components/TokenHandler.tsx | 29 ++-
.../components/dashboard-breadcrumb.tsx | 12 +-
.../components/onboard/setup-prompt-step.tsx | 8 +-
.../settings/prompt-config-manager.tsx | 8 +-
.../components/sources/DocumentUploadTab.tsx | 5 +-
.../components/sources/YouTubeTab.tsx | 8 +-
surfsense_web/hooks/use-api-key.ts | 3 +-
surfsense_web/hooks/use-chat.ts | 3 +-
.../hooks/use-connector-edit-page.ts | 10 +-
surfsense_web/hooks/use-connectors.ts | 41 ++---
surfsense_web/hooks/use-document-by-chunk.ts | 8 +-
surfsense_web/hooks/use-document-types.ts | 13 +-
surfsense_web/hooks/use-documents.ts | 37 +---
surfsense_web/hooks/use-llm-configs.ts | 58 ++----
surfsense_web/hooks/use-logs.ts | 60 ++----
surfsense_web/hooks/use-rbac.ts | 148 ++++-----------
.../hooks/use-search-source-connectors.ts | 65 ++-----
surfsense_web/hooks/use-search-space.ts | 17 +-
surfsense_web/hooks/use-search-spaces.ts | 27 +--
surfsense_web/hooks/use-user.ts | 18 +-
surfsense_web/lib/apis/base-api.service.ts | 6 +-
surfsense_web/lib/auth-utils.ts | 173 ++++++++++++++++++
35 files changed, 396 insertions(+), 497 deletions(-)
create mode 100644 surfsense_web/lib/auth-utils.ts
diff --git a/surfsense_web/app/dashboard/[search_space_id]/connectors/add/airtable-connector/page.tsx b/surfsense_web/app/dashboard/[search_space_id]/connectors/add/airtable-connector/page.tsx
index 2d82877b3..cc4330203 100644
--- a/surfsense_web/app/dashboard/[search_space_id]/connectors/add/airtable-connector/page.tsx
+++ b/surfsense_web/app/dashboard/[search_space_id]/connectors/add/airtable-connector/page.tsx
@@ -22,6 +22,7 @@ import {
type SearchSourceConnector,
useSearchSourceConnectors,
} from "@/hooks/use-search-source-connectors";
+import { authenticatedFetch } from "@/lib/auth-utils";
export default function AirtableConnectorPage() {
const router = useRouter();
@@ -46,14 +47,9 @@ export default function AirtableConnectorPage() {
const handleConnectAirtable = async () => {
setIsConnecting(true);
try {
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/auth/airtable/connector/add/?space_id=${searchSpaceId}`,
- {
- method: "GET",
- headers: {
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
- }
+ { method: "GET" }
);
if (!response.ok) {
diff --git a/surfsense_web/app/dashboard/[search_space_id]/connectors/add/github-connector/page.tsx b/surfsense_web/app/dashboard/[search_space_id]/connectors/add/github-connector/page.tsx
index 90a02a5f2..833d716a8 100644
--- a/surfsense_web/app/dashboard/[search_space_id]/connectors/add/github-connector/page.tsx
+++ b/surfsense_web/app/dashboard/[search_space_id]/connectors/add/github-connector/page.tsx
@@ -40,6 +40,7 @@ import { EnumConnectorName } from "@/contracts/enums/connector";
import { getConnectorIcon } from "@/contracts/enums/connectorIcons";
// Assuming useSearchSourceConnectors hook exists and works similarly
import { useSearchSourceConnectors } from "@/hooks/use-search-source-connectors";
+import { authenticatedFetch, redirectToLogin } from "@/lib/auth-utils";
// Define the form schema with Zod for GitHub PAT entry step
const githubPatFormSchema = z.object({
@@ -101,19 +102,11 @@ export default function GithubConnectorPage() {
setConnectorName(values.name); // Store the name
setValidatedPat(values.github_pat); // Store the PAT temporarily
try {
- const token = localStorage.getItem("surfsense_bearer_token");
- if (!token) {
- throw new Error("No authentication token found");
- }
-
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/github/repositories`,
{
method: "POST",
- headers: {
- "Content-Type": "application/json",
- Authorization: `Bearer ${token}`,
- },
+ headers: { "Content-Type": "application/json" },
body: JSON.stringify({ github_pat: values.github_pat }),
}
);
diff --git a/surfsense_web/app/dashboard/[search_space_id]/connectors/add/google-calendar-connector/page.tsx b/surfsense_web/app/dashboard/[search_space_id]/connectors/add/google-calendar-connector/page.tsx
index 2fdc95671..8179fbabc 100644
--- a/surfsense_web/app/dashboard/[search_space_id]/connectors/add/google-calendar-connector/page.tsx
+++ b/surfsense_web/app/dashboard/[search_space_id]/connectors/add/google-calendar-connector/page.tsx
@@ -24,6 +24,7 @@ import {
type SearchSourceConnector,
useSearchSourceConnectors,
} from "@/hooks/use-search-source-connectors";
+import { authenticatedFetch } from "@/lib/auth-utils";
export default function GoogleCalendarConnectorPage() {
const router = useRouter();
@@ -51,14 +52,9 @@ export default function GoogleCalendarConnectorPage() {
try {
setIsConnecting(true);
// Call backend to initiate authorization flow
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/auth/google/calendar/connector/add/?space_id=${searchSpaceId}`,
- {
- method: "GET",
- headers: {
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
- }
+ { method: "GET" }
);
if (!response.ok) {
diff --git a/surfsense_web/app/dashboard/[search_space_id]/connectors/add/google-gmail-connector/page.tsx b/surfsense_web/app/dashboard/[search_space_id]/connectors/add/google-gmail-connector/page.tsx
index c1354d03e..8659d937c 100644
--- a/surfsense_web/app/dashboard/[search_space_id]/connectors/add/google-gmail-connector/page.tsx
+++ b/surfsense_web/app/dashboard/[search_space_id]/connectors/add/google-gmail-connector/page.tsx
@@ -24,6 +24,7 @@ import {
type SearchSourceConnector,
useSearchSourceConnectors,
} from "@/hooks/use-search-source-connectors";
+import { authenticatedFetch } from "@/lib/auth-utils";
export default function GoogleGmailConnectorPage() {
const router = useRouter();
@@ -50,14 +51,9 @@ export default function GoogleGmailConnectorPage() {
try {
setIsConnecting(true);
// Call backend to initiate authorization flow
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/auth/google/gmail/connector/add/?space_id=${searchSpaceId}`,
- {
- method: "GET",
- headers: {
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
- }
+ { method: "GET" }
);
if (!response.ok) {
diff --git a/surfsense_web/app/dashboard/[search_space_id]/editor/[documentId]/page.tsx b/surfsense_web/app/dashboard/[search_space_id]/editor/[documentId]/page.tsx
index 71ce2fa2f..a7e0d6861 100644
--- a/surfsense_web/app/dashboard/[search_space_id]/editor/[documentId]/page.tsx
+++ b/surfsense_web/app/dashboard/[search_space_id]/editor/[documentId]/page.tsx
@@ -9,6 +9,7 @@ import { BlockNoteEditor } from "@/components/DynamicBlockNoteEditor";
import { Button } from "@/components/ui/button";
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card";
import { Separator } from "@/components/ui/separator";
+import { authenticatedFetch, getBearerToken, redirectToLogin } from "@/lib/auth-utils";
interface EditorContent {
document_id: number;
@@ -29,28 +30,21 @@ export default function EditorPage() {
const [error, setError] = useState(null);
const [hasUnsavedChanges, setHasUnsavedChanges] = useState(false);
- // Get auth token
- const token =
- typeof window !== "undefined" ? localStorage.getItem("surfsense_bearer_token") : null;
-
// Fetch document content - DIRECT CALL TO FASTAPI
useEffect(() => {
async function fetchDocument() {
+ const token = getBearerToken();
if (!token) {
console.error("No auth token found");
- setError("Please login to access the editor");
- setLoading(false);
+ // Redirect to login with current path saved
+ redirectToLogin();
return;
}
try {
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-spaces/${params.search_space_id}/documents/${documentId}/editor-content`,
- {
- headers: {
- Authorization: `Bearer ${token}`,
- },
- }
+ { method: "GET" }
);
if (!response.ok) {
@@ -84,10 +78,10 @@ export default function EditorPage() {
}
}
- if (documentId && token) {
+ if (documentId) {
fetchDocument();
}
- }, [documentId, token]);
+ }, [documentId, params.search_space_id]);
// Track changes to mark as unsaved
useEffect(() => {
@@ -100,8 +94,10 @@ export default function EditorPage() {
// Save and exit - DIRECT CALL TO FASTAPI
const handleSave = async () => {
+ const token = getBearerToken();
if (!token) {
toast.error("Please login to save");
+ redirectToLogin();
return;
}
@@ -113,14 +109,11 @@ export default function EditorPage() {
setSaving(true);
try {
// Save blocknote_document and trigger reindexing in background
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-spaces/${params.search_space_id}/documents/${documentId}/save`,
{
method: "POST",
- headers: {
- "Content-Type": "application/json",
- Authorization: `Bearer ${token}`,
- },
+ headers: { "Content-Type": "application/json" },
body: JSON.stringify({ blocknote_document: editorContent }),
}
);
diff --git a/surfsense_web/app/dashboard/[search_space_id]/onboard/page.tsx b/surfsense_web/app/dashboard/[search_space_id]/onboard/page.tsx
index 150cd772c..7382429d2 100644
--- a/surfsense_web/app/dashboard/[search_space_id]/onboard/page.tsx
+++ b/surfsense_web/app/dashboard/[search_space_id]/onboard/page.tsx
@@ -13,6 +13,7 @@ import { OnboardLLMSetup } from "@/components/onboard/onboard-llm-setup";
import { OnboardLoading } from "@/components/onboard/onboard-loading";
import { OnboardStats } from "@/components/onboard/onboard-stats";
import { useGlobalLLMConfigs, useLLMConfigs, useLLMPreferences } from "@/hooks/use-llm-configs";
+import { getBearerToken, redirectToLogin } from "@/lib/auth-utils";
const OnboardPage = () => {
const t = useTranslations("onboard");
@@ -44,12 +45,13 @@ const OnboardPage = () => {
// Check if user is authenticated
useEffect(() => {
- const token = localStorage.getItem("surfsense_bearer_token");
+ const token = getBearerToken();
if (!token) {
- router.push("/login");
+ // Save current path and redirect to login
+ redirectToLogin();
return;
}
- }, [router]);
+ }, []);
// Capture onboarding state on first load
useEffect(() => {
diff --git a/surfsense_web/app/dashboard/layout.tsx b/surfsense_web/app/dashboard/layout.tsx
index 01436aff9..8763a622f 100644
--- a/surfsense_web/app/dashboard/layout.tsx
+++ b/surfsense_web/app/dashboard/layout.tsx
@@ -1,28 +1,28 @@
"use client";
import { Loader2 } from "lucide-react";
-import { useRouter } from "next/navigation";
import { useEffect, useState } from "react";
import { AnnouncementBanner } from "@/components/announcement-banner";
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card";
+import { getBearerToken, redirectToLogin } from "@/lib/auth-utils";
interface DashboardLayoutProps {
children: React.ReactNode;
}
export default function DashboardLayout({ children }: DashboardLayoutProps) {
- const router = useRouter();
const [isCheckingAuth, setIsCheckingAuth] = useState(true);
useEffect(() => {
// Check if user is authenticated
- const token = localStorage.getItem("surfsense_bearer_token");
+ const token = getBearerToken();
if (!token) {
- router.push("/login");
+ // Save current path and redirect to login
+ redirectToLogin();
return;
}
setIsCheckingAuth(false);
- }, [router]);
+ }, []);
// Show loading screen while checking authentication
if (isCheckingAuth) {
diff --git a/surfsense_web/app/dashboard/page.tsx b/surfsense_web/app/dashboard/page.tsx
index 0910d0b44..0b3450d20 100644
--- a/surfsense_web/app/dashboard/page.tsx
+++ b/surfsense_web/app/dashboard/page.tsx
@@ -36,6 +36,7 @@ import { Spotlight } from "@/components/ui/spotlight";
import { Tilt } from "@/components/ui/tilt";
import { useUser } from "@/hooks";
import { useSearchSpaces } from "@/hooks/use-search-spaces";
+import { authenticatedFetch } from "@/lib/auth-utils";
/**
* Formats a date string into a readable format
@@ -173,14 +174,9 @@ const DashboardPage = () => {
const handleDeleteSearchSpace = async (id: number) => {
// Send DELETE request to the API
try {
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${id}`,
- {
- method: "DELETE",
- headers: {
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
- }
+ { method: "DELETE" }
);
if (!response.ok) {
diff --git a/surfsense_web/app/dashboard/searchspaces/page.tsx b/surfsense_web/app/dashboard/searchspaces/page.tsx
index 5d18195af..520c4358e 100644
--- a/surfsense_web/app/dashboard/searchspaces/page.tsx
+++ b/surfsense_web/app/dashboard/searchspaces/page.tsx
@@ -4,18 +4,17 @@ import { motion } from "motion/react";
import { useRouter } from "next/navigation";
import { toast } from "sonner";
import { SearchSpaceForm } from "@/components/search-space-form";
+import { authenticatedFetch } from "@/lib/auth-utils";
+
export default function SearchSpacesPage() {
const router = useRouter();
const handleCreateSearchSpace = async (data: { name: string; description?: string }) => {
try {
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces`,
{
method: "POST",
- headers: {
- "Content-Type": "application/json",
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
+ headers: { "Content-Type": "application/json" },
body: JSON.stringify({
name: data.name,
description: data.description || "",
diff --git a/surfsense_web/app/invite/[invite_code]/page.tsx b/surfsense_web/app/invite/[invite_code]/page.tsx
index 1a4600483..4ff78ac91 100644
--- a/surfsense_web/app/invite/[invite_code]/page.tsx
+++ b/surfsense_web/app/invite/[invite_code]/page.tsx
@@ -27,6 +27,7 @@ import {
CardTitle,
} from "@/components/ui/card";
import { useInviteInfo } from "@/hooks/use-rbac";
+import { getBearerToken } from "@/lib/auth-utils";
export default function InviteAcceptPage() {
const params = useParams();
@@ -47,7 +48,7 @@ export default function InviteAcceptPage() {
// Check if user is logged in
useEffect(() => {
if (typeof window !== "undefined") {
- const token = localStorage.getItem("surfsense_bearer_token");
+ const token = getBearerToken();
setIsLoggedIn(!!token);
}
}, []);
@@ -71,7 +72,10 @@ export default function InviteAcceptPage() {
const handleLoginRedirect = () => {
// Store the invite code to redirect back after login
localStorage.setItem("pending_invite_code", inviteCode);
- router.push("/auth");
+ // Save the current invite page URL so we can return after authentication
+ localStorage.setItem("surfsense_redirect_path", `/invite/${inviteCode}`);
+ // Redirect to login (we manually set the path above since invite pages need special handling)
+ window.location.href = "/login";
};
// Check for pending invite after login
diff --git a/surfsense_web/atoms/chats/chat-mutation.atoms.ts b/surfsense_web/atoms/chats/chat-mutation.atoms.ts
index f7ad5cf4b..6105ef615 100644
--- a/surfsense_web/atoms/chats/chat-mutation.atoms.ts
+++ b/surfsense_web/atoms/chats/chat-mutation.atoms.ts
@@ -7,6 +7,7 @@ import type {
UpdateChatRequest,
} from "@/contracts/types/chat.types";
import { chatsApiService } from "@/lib/apis/chats-api.service";
+import { getBearerToken } from "@/lib/auth-utils";
import { cacheKeys } from "@/lib/query-client/cache-keys";
import { queryClient } from "@/lib/query-client/client";
import { activeSearchSpaceIdAtom } from "../seach-spaces/seach-space-queries.atom";
@@ -14,7 +15,7 @@ import { globalChatsQueryParamsAtom } from "./ui.atoms";
export const deleteChatMutationAtom = atomWithMutation((get) => {
const searchSpaceId = get(activeSearchSpaceIdAtom);
- const authToken = localStorage.getItem("surfsense_bearer_token");
+ const authToken = getBearerToken();
const chatsQueryParams = get(globalChatsQueryParamsAtom);
return {
@@ -38,7 +39,7 @@ export const deleteChatMutationAtom = atomWithMutation((get) => {
export const createChatMutationAtom = atomWithMutation((get) => {
const searchSpaceId = get(activeSearchSpaceIdAtom);
- const authToken = localStorage.getItem("surfsense_bearer_token");
+ const authToken = getBearerToken();
const chatsQueryParams = get(globalChatsQueryParamsAtom);
return {
@@ -58,7 +59,7 @@ export const createChatMutationAtom = atomWithMutation((get) => {
export const updateChatMutationAtom = atomWithMutation((get) => {
const searchSpaceId = get(activeSearchSpaceIdAtom);
- const authToken = localStorage.getItem("surfsense_bearer_token");
+ const authToken = getBearerToken();
const chatsQueryParams = get(globalChatsQueryParamsAtom);
return {
diff --git a/surfsense_web/atoms/chats/chat-query.atoms.ts b/surfsense_web/atoms/chats/chat-query.atoms.ts
index 26b2b1057..36871dbd0 100644
--- a/surfsense_web/atoms/chats/chat-query.atoms.ts
+++ b/surfsense_web/atoms/chats/chat-query.atoms.ts
@@ -2,12 +2,13 @@ import { atomWithQuery } from "jotai-tanstack-query";
import { activeSearchSpaceIdAtom } from "@/atoms/seach-spaces/seach-space-queries.atom";
import { chatsApiService } from "@/lib/apis/chats-api.service";
import { podcastsApiService } from "@/lib/apis/podcasts-api.service";
+import { getBearerToken } from "@/lib/auth-utils";
import { cacheKeys } from "@/lib/query-client/cache-keys";
import { activeChatIdAtom, globalChatsQueryParamsAtom } from "./ui.atoms";
export const activeChatAtom = atomWithQuery((get) => {
const activeChatId = get(activeChatIdAtom);
- const authToken = localStorage.getItem("surfsense_bearer_token");
+ const authToken = getBearerToken();
return {
queryKey: cacheKeys.chats.activeChat(activeChatId ?? ""),
@@ -32,7 +33,7 @@ export const activeChatAtom = atomWithQuery((get) => {
export const chatsAtom = atomWithQuery((get) => {
const searchSpaceId = get(activeSearchSpaceIdAtom);
- const authToken = localStorage.getItem("surfsense_bearer_token");
+ const authToken = getBearerToken();
const queryParams = get(globalChatsQueryParamsAtom);
return {
diff --git a/surfsense_web/atoms/podcasts/podcast-mutation.atoms.ts b/surfsense_web/atoms/podcasts/podcast-mutation.atoms.ts
index 3a2403af7..7e51891be 100644
--- a/surfsense_web/atoms/podcasts/podcast-mutation.atoms.ts
+++ b/surfsense_web/atoms/podcasts/podcast-mutation.atoms.ts
@@ -7,13 +7,14 @@ import type {
Podcast,
} from "@/contracts/types/podcast.types";
import { podcastsApiService } from "@/lib/apis/podcasts-api.service";
+import { getBearerToken } from "@/lib/auth-utils";
import { cacheKeys } from "@/lib/query-client/cache-keys";
import { queryClient } from "@/lib/query-client/client";
import { globalPodcastsQueryParamsAtom } from "./ui.atoms";
export const deletePodcastMutationAtom = atomWithMutation((get) => {
const searchSpaceId = get(activeSearchSpaceIdAtom);
- const authToken = localStorage.getItem("surfsense_bearer_token");
+ const authToken = getBearerToken();
const podcastsQueryParams = get(globalPodcastsQueryParamsAtom);
return {
@@ -37,7 +38,7 @@ export const deletePodcastMutationAtom = atomWithMutation((get) => {
export const generatePodcastMutationAtom = atomWithMutation((get) => {
const searchSpaceId = get(activeSearchSpaceIdAtom);
- const authToken = localStorage.getItem("surfsense_bearer_token");
+ const authToken = getBearerToken();
const podcastsQueryParams = get(globalPodcastsQueryParamsAtom);
return {
diff --git a/surfsense_web/components/TokenHandler.tsx b/surfsense_web/components/TokenHandler.tsx
index 3b80994a8..70119dfe4 100644
--- a/surfsense_web/components/TokenHandler.tsx
+++ b/surfsense_web/components/TokenHandler.tsx
@@ -2,22 +2,25 @@
import { useRouter, useSearchParams } from "next/navigation";
import { useEffect } from "react";
+import { getAndClearRedirectPath, setBearerToken } from "@/lib/auth-utils";
interface TokenHandlerProps {
- redirectPath?: string; // Path to redirect after storing token
+ redirectPath?: string; // Default path to redirect after storing token (if no saved path)
tokenParamName?: string; // Name of the URL parameter containing the token
- storageKey?: string; // Key to use when storing in localStorage
+ storageKey?: string; // Key to use when storing in localStorage (kept for backwards compatibility)
}
/**
* Client component that extracts a token from URL parameters and stores it in localStorage
+ * After storing the token, it redirects the user back to the page they were on before
+ * being redirected to login (if available), or to the default redirectPath.
*
- * @param redirectPath - Path to redirect after storing token (default: '/')
+ * @param redirectPath - Default path to redirect after storing token (default: '/dashboard')
* @param tokenParamName - Name of the URL parameter containing the token (default: 'token')
- * @param storageKey - Key to use when storing in localStorage (default: 'auth_token')
+ * @param storageKey - Key to use when storing in localStorage (default: 'surfsense_bearer_token')
*/
const TokenHandler = ({
- redirectPath = "/",
+ redirectPath = "/dashboard",
tokenParamName = "token",
storageKey = "surfsense_bearer_token",
}: TokenHandlerProps) => {
@@ -33,14 +36,22 @@ const TokenHandler = ({
if (token) {
try {
- // Store token in localStorage
+ // Store token in localStorage using both methods for compatibility
localStorage.setItem(storageKey, token);
- // console.log(`Token stored in localStorage with key: ${storageKey}`);
+ setBearerToken(token);
- // Redirect to specified path
- router.push(redirectPath);
+ // Check if there's a saved redirect path from before the auth flow
+ const savedRedirectPath = getAndClearRedirectPath();
+
+ // Use the saved path if available, otherwise use the default redirectPath
+ const finalRedirectPath = savedRedirectPath || redirectPath;
+
+ // Redirect to the appropriate path
+ router.push(finalRedirectPath);
} catch (error) {
console.error("Error storing token in localStorage:", error);
+ // Even if there's an error, try to redirect to the default path
+ router.push(redirectPath);
}
}
}, [searchParams, tokenParamName, storageKey, redirectPath, router]);
diff --git a/surfsense_web/components/dashboard-breadcrumb.tsx b/surfsense_web/components/dashboard-breadcrumb.tsx
index 76abe3817..c845403a6 100644
--- a/surfsense_web/components/dashboard-breadcrumb.tsx
+++ b/surfsense_web/components/dashboard-breadcrumb.tsx
@@ -14,6 +14,7 @@ import {
BreadcrumbSeparator,
} from "@/components/ui/breadcrumb";
import { useSearchSpace } from "@/hooks/use-search-space";
+import { authenticatedFetch, getBearerToken } from "@/lib/auth-utils";
interface BreadcrumbItemInterface {
label: string;
@@ -41,17 +42,12 @@ export function DashboardBreadcrumb() {
useEffect(() => {
if (segments[2] === "editor" && segments[3] && searchSpaceId) {
const documentId = segments[3];
- const token =
- typeof window !== "undefined" ? localStorage.getItem("surfsense_bearer_token") : null;
+ const token = getBearerToken();
if (token) {
- fetch(
+ authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-spaces/${searchSpaceId}/documents/${documentId}/editor-content`,
- {
- headers: {
- Authorization: `Bearer ${token}`,
- },
- }
+ { method: "GET" }
)
.then((res) => res.json())
.then((data) => {
diff --git a/surfsense_web/components/onboard/setup-prompt-step.tsx b/surfsense_web/components/onboard/setup-prompt-step.tsx
index 9a190e6c7..899d856fa 100644
--- a/surfsense_web/components/onboard/setup-prompt-step.tsx
+++ b/surfsense_web/components/onboard/setup-prompt-step.tsx
@@ -13,6 +13,7 @@ import { Switch } from "@/components/ui/switch";
import { Tabs, TabsList, TabsTrigger } from "@/components/ui/tabs";
import { Textarea } from "@/components/ui/textarea";
import { type CommunityPrompt, useCommunityPrompts } from "@/hooks/use-community-prompts";
+import { authenticatedFetch } from "@/lib/auth-utils";
interface SetupPromptStepProps {
searchSpaceId: number;
@@ -74,14 +75,11 @@ export function SetupPromptStep({ searchSpaceId, onComplete }: SetupPromptStepPr
// Only send update if there's something to update
if (Object.keys(payload).length > 0) {
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}`,
{
method: "PUT",
- headers: {
- "Content-Type": "application/json",
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
+ headers: { "Content-Type": "application/json" },
body: JSON.stringify(payload),
}
);
diff --git a/surfsense_web/components/settings/prompt-config-manager.tsx b/surfsense_web/components/settings/prompt-config-manager.tsx
index 0de1ca286..a1199c10d 100644
--- a/surfsense_web/components/settings/prompt-config-manager.tsx
+++ b/surfsense_web/components/settings/prompt-config-manager.tsx
@@ -25,6 +25,7 @@ import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
import { Textarea } from "@/components/ui/textarea";
import { type CommunityPrompt, useCommunityPrompts } from "@/hooks/use-community-prompts";
import { useSearchSpace } from "@/hooks/use-search-space";
+import { authenticatedFetch } from "@/lib/auth-utils";
interface PromptConfigManagerProps {
searchSpaceId: number;
@@ -78,14 +79,11 @@ export function PromptConfigManager({ searchSpaceId }: PromptConfigManagerProps)
// Only send request if we have something to update
if (Object.keys(payload).length > 0) {
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}`,
{
method: "PUT",
- headers: {
- "Content-Type": "application/json",
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
+ headers: { "Content-Type": "application/json" },
body: JSON.stringify(payload),
}
);
diff --git a/surfsense_web/components/sources/DocumentUploadTab.tsx b/surfsense_web/components/sources/DocumentUploadTab.tsx
index c9976bb64..c51522a74 100644
--- a/surfsense_web/components/sources/DocumentUploadTab.tsx
+++ b/surfsense_web/components/sources/DocumentUploadTab.tsx
@@ -14,6 +14,7 @@ import { Button } from "@/components/ui/button";
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card";
import { Progress } from "@/components/ui/progress";
import { Separator } from "@/components/ui/separator";
+import { getAuthHeaders } from "@/lib/auth-utils";
import { GridPattern } from "./GridPattern";
interface DocumentUploadTabProps {
@@ -168,9 +169,7 @@ export function DocumentUploadTab({ searchSpaceId }: DocumentUploadTabProps) {
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/documents/fileupload`,
{
method: "POST",
- headers: {
- Authorization: `Bearer ${window.localStorage.getItem("surfsense_bearer_token")}`,
- },
+ headers: getAuthHeaders(),
body: formData,
}
);
diff --git a/surfsense_web/components/sources/YouTubeTab.tsx b/surfsense_web/components/sources/YouTubeTab.tsx
index 717a4266d..8301f51cc 100644
--- a/surfsense_web/components/sources/YouTubeTab.tsx
+++ b/surfsense_web/components/sources/YouTubeTab.tsx
@@ -19,6 +19,7 @@ import {
CardTitle,
} from "@/components/ui/card";
import { Label } from "@/components/ui/label";
+import { authenticatedFetch } from "@/lib/auth-utils";
const youtubeRegex =
/^(https:\/\/)?(www\.)?(youtube\.com\/watch\?v=|youtu\.be\/)([a-zA-Z0-9_-]{11})$/;
@@ -66,14 +67,11 @@ export function YouTubeTab({ searchSpaceId }: YouTubeTabProps) {
const videoUrls = videoTags.map((tag) => tag.text);
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/documents`,
{
method: "POST",
- headers: {
- "Content-Type": "application/json",
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
+ headers: { "Content-Type": "application/json" },
body: JSON.stringify({
document_type: "YOUTUBE_VIDEO",
content: videoUrls,
diff --git a/surfsense_web/hooks/use-api-key.ts b/surfsense_web/hooks/use-api-key.ts
index 229a8de3e..6f955adc3 100644
--- a/surfsense_web/hooks/use-api-key.ts
+++ b/surfsense_web/hooks/use-api-key.ts
@@ -1,5 +1,6 @@
import { useCallback, useEffect, useState } from "react";
import { toast } from "sonner";
+import { getBearerToken } from "@/lib/auth-utils";
interface UseApiKeyReturn {
apiKey: string | null;
@@ -17,7 +18,7 @@ export function useApiKey(): UseApiKeyReturn {
// Load API key from localStorage
const loadApiKey = () => {
try {
- const token = localStorage.getItem("surfsense_bearer_token");
+ const token = getBearerToken();
setApiKey(token);
} catch (error) {
console.error("Error loading API key:", error);
diff --git a/surfsense_web/hooks/use-chat.ts b/surfsense_web/hooks/use-chat.ts
index 7424c7aea..b006401d1 100644
--- a/surfsense_web/hooks/use-chat.ts
+++ b/surfsense_web/hooks/use-chat.ts
@@ -3,6 +3,7 @@ import { useCallback, useEffect, useState } from "react";
import type { ChatDetails } from "@/app/dashboard/[search_space_id]/chats/chats-client";
import type { ResearchMode } from "@/components/chat";
import type { Document } from "@/hooks/use-documents";
+import { getBearerToken } from "@/lib/auth-utils";
interface UseChatStateProps {
search_space_id: string;
@@ -22,7 +23,7 @@ export function useChatState({ chat_id }: UseChatStateProps) {
const [topK, setTopK] = useState(5);
useEffect(() => {
- const bearerToken = localStorage.getItem("surfsense_bearer_token");
+ const bearerToken = getBearerToken();
setToken(bearerToken);
}, []);
diff --git a/surfsense_web/hooks/use-connector-edit-page.ts b/surfsense_web/hooks/use-connector-edit-page.ts
index 00be9ef17..899cbb961 100644
--- a/surfsense_web/hooks/use-connector-edit-page.ts
+++ b/surfsense_web/hooks/use-connector-edit-page.ts
@@ -15,6 +15,7 @@ import {
type SearchSourceConnector,
useSearchSourceConnectors,
} from "@/hooks/use-search-source-connectors";
+import { authenticatedFetch } from "@/lib/auth-utils";
const normalizeListInput = (value: unknown): string[] => {
if (Array.isArray(value)) {
@@ -178,16 +179,11 @@ export function useConnectorEditPage(connectorId: number, searchSpaceId: string)
setIsFetchingRepos(true);
setFetchedRepos(null);
try {
- const token = localStorage.getItem("surfsense_bearer_token");
- if (!token) throw new Error("No auth token");
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/github/repositories`,
{
method: "POST",
- headers: {
- "Content-Type": "application/json",
- Authorization: `Bearer ${token}`,
- },
+ headers: { "Content-Type": "application/json" },
body: JSON.stringify({ github_pat: values.github_pat }),
}
);
diff --git a/surfsense_web/hooks/use-connectors.ts b/surfsense_web/hooks/use-connectors.ts
index db0a2618e..830677ea5 100644
--- a/surfsense_web/hooks/use-connectors.ts
+++ b/surfsense_web/hooks/use-connectors.ts
@@ -1,3 +1,5 @@
+import { authenticatedFetch } from "@/lib/auth-utils";
+
// Types for connector API
export interface ConnectorConfig {
[key: string]: string;
@@ -32,14 +34,11 @@ export const getConnectorTypeDisplay = (type: string): string => {
export const ConnectorService = {
// Create a new connector
async createConnector(data: CreateConnectorRequest): Promise {
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-source-connectors`,
{
method: "POST",
- headers: {
- "Content-Type": "application/json",
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
+ headers: { "Content-Type": "application/json" },
body: JSON.stringify(data),
}
);
@@ -54,13 +53,9 @@ export const ConnectorService = {
// Get all connectors
async getConnectors(skip = 0, limit = 100): Promise {
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-source-connectors?skip=${skip}&limit=${limit}`,
- {
- headers: {
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
- }
+ { method: "GET" }
);
if (!response.ok) {
@@ -73,13 +68,9 @@ export const ConnectorService = {
// Get a specific connector
async getConnector(connectorId: number): Promise {
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-source-connectors/${connectorId}`,
- {
- headers: {
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
- }
+ { method: "GET" }
);
if (!response.ok) {
@@ -92,14 +83,11 @@ export const ConnectorService = {
// Update a connector
async updateConnector(connectorId: number, data: CreateConnectorRequest): Promise {
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-source-connectors/${connectorId}`,
{
method: "PUT",
- headers: {
- "Content-Type": "application/json",
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
+ headers: { "Content-Type": "application/json" },
body: JSON.stringify(data),
}
);
@@ -114,14 +102,9 @@ export const ConnectorService = {
// Delete a connector
async deleteConnector(connectorId: number): Promise {
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-source-connectors/${connectorId}`,
- {
- method: "DELETE",
- headers: {
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
- }
+ { method: "DELETE" }
);
if (!response.ok) {
diff --git a/surfsense_web/hooks/use-document-by-chunk.ts b/surfsense_web/hooks/use-document-by-chunk.ts
index dd36fcab1..630e810a2 100644
--- a/surfsense_web/hooks/use-document-by-chunk.ts
+++ b/surfsense_web/hooks/use-document-by-chunk.ts
@@ -1,6 +1,7 @@
"use client";
import { useCallback, useState } from "react";
import { toast } from "sonner";
+import { authenticatedFetch } from "@/lib/auth-utils";
export interface Chunk {
id: number;
@@ -49,13 +50,10 @@ export function useDocumentByChunk() {
setError(null);
setDocument(null);
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/documents/by-chunk/${chunkId}`,
{
- headers: {
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- "Content-Type": "application/json",
- },
+ headers: { "Content-Type": "application/json" },
method: "GET",
}
);
diff --git a/surfsense_web/hooks/use-document-types.ts b/surfsense_web/hooks/use-document-types.ts
index 415e42e90..21c9eb6fe 100644
--- a/surfsense_web/hooks/use-document-types.ts
+++ b/surfsense_web/hooks/use-document-types.ts
@@ -1,4 +1,5 @@
import { useCallback, useEffect, useState } from "react";
+import { authenticatedFetch } from "@/lib/auth-utils";
export interface DocumentTypeCount {
type: string;
@@ -23,11 +24,6 @@ export const useDocumentTypes = (searchSpaceId?: number, lazy: boolean = false)
try {
setIsLoading(true);
setError(null);
- const token = localStorage.getItem("surfsense_bearer_token");
-
- if (!token) {
- throw new Error("No authentication token found");
- }
// Build URL with optional search_space_id query parameter
const url = new URL(
@@ -37,12 +33,9 @@ export const useDocumentTypes = (searchSpaceId?: number, lazy: boolean = false)
url.searchParams.append("search_space_id", spaceId.toString());
}
- const response = await fetch(url.toString(), {
+ const response = await authenticatedFetch(url.toString(), {
method: "GET",
- headers: {
- "Content-Type": "application/json",
- Authorization: `Bearer ${token}`,
- },
+ headers: { "Content-Type": "application/json" },
});
if (!response.ok) {
diff --git a/surfsense_web/hooks/use-documents.ts b/surfsense_web/hooks/use-documents.ts
index 21ee959b8..b5c349091 100644
--- a/surfsense_web/hooks/use-documents.ts
+++ b/surfsense_web/hooks/use-documents.ts
@@ -1,6 +1,7 @@
"use client";
import { useCallback, useEffect, useState } from "react";
import { toast } from "sonner";
+import { authenticatedFetch } from "@/lib/auth-utils";
import { normalizeListResponse } from "@/lib/pagination";
export interface Document {
@@ -78,14 +79,9 @@ export function useDocuments(searchSpaceId: number, options?: UseDocumentsOption
params.append("document_types", effectiveDocumentTypes.join(","));
}
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/documents?${params.toString()}`,
- {
- headers: {
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
- method: "GET",
- }
+ { method: "GET" }
);
if (!response.ok) {
@@ -159,14 +155,9 @@ export function useDocuments(searchSpaceId: number, options?: UseDocumentsOption
params.append("document_types", effectiveDocumentTypes.join(","));
}
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/documents/search?${params.toString()}`,
- {
- headers: {
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
- method: "GET",
- }
+ { method: "GET" }
);
if (!response.ok) {
@@ -193,14 +184,9 @@ export function useDocuments(searchSpaceId: number, options?: UseDocumentsOption
const deleteDocument = useCallback(
async (documentId: number) => {
try {
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/documents/${documentId}`,
- {
- headers: {
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
- method: "DELETE",
- }
+ { method: "DELETE" }
);
if (!response.ok) {
@@ -228,14 +214,9 @@ export function useDocuments(searchSpaceId: number, options?: UseDocumentsOption
search_space_id: searchSpaceId.toString(),
});
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/documents/type-counts?${params.toString()}`,
- {
- headers: {
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
- method: "GET",
- }
+ { method: "GET" }
);
if (!response.ok) {
diff --git a/surfsense_web/hooks/use-llm-configs.ts b/surfsense_web/hooks/use-llm-configs.ts
index 0755211c4..7619cc3e4 100644
--- a/surfsense_web/hooks/use-llm-configs.ts
+++ b/surfsense_web/hooks/use-llm-configs.ts
@@ -1,6 +1,7 @@
"use client";
import { useEffect, useState } from "react";
import { toast } from "sonner";
+import { authenticatedFetch } from "@/lib/auth-utils";
export interface LLMConfig {
id: number;
@@ -61,14 +62,9 @@ export function useLLMConfigs(searchSpaceId: number | null) {
try {
setLoading(true);
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/llm-configs?search_space_id=${searchSpaceId}`,
- {
- headers: {
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
- method: "GET",
- }
+ { method: "GET" }
);
if (!response.ok) {
@@ -92,14 +88,11 @@ export function useLLMConfigs(searchSpaceId: number | null) {
const createLLMConfig = async (config: CreateLLMConfig): Promise => {
try {
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/llm-configs`,
{
method: "POST",
- headers: {
- "Content-Type": "application/json",
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
+ headers: { "Content-Type": "application/json" },
body: JSON.stringify(config),
}
);
@@ -122,14 +115,9 @@ export function useLLMConfigs(searchSpaceId: number | null) {
const deleteLLMConfig = async (id: number): Promise => {
try {
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/llm-configs/${id}`,
- {
- method: "DELETE",
- headers: {
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
- }
+ { method: "DELETE" }
);
if (!response.ok) {
@@ -151,14 +139,11 @@ export function useLLMConfigs(searchSpaceId: number | null) {
config: UpdateLLMConfig
): Promise => {
try {
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/llm-configs/${id}`,
{
method: "PUT",
- headers: {
- "Content-Type": "application/json",
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
+ headers: { "Content-Type": "application/json" },
body: JSON.stringify(config),
}
);
@@ -203,14 +188,9 @@ export function useLLMPreferences(searchSpaceId: number | null) {
try {
setLoading(true);
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-spaces/${searchSpaceId}/llm-preferences`,
- {
- headers: {
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
- method: "GET",
- }
+ { method: "GET" }
);
if (!response.ok) {
@@ -239,14 +219,11 @@ export function useLLMPreferences(searchSpaceId: number | null) {
}
try {
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-spaces/${searchSpaceId}/llm-preferences`,
{
method: "PUT",
- headers: {
- "Content-Type": "application/json",
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
+ headers: { "Content-Type": "application/json" },
body: JSON.stringify(newPreferences),
}
);
@@ -293,14 +270,9 @@ export function useGlobalLLMConfigs() {
const fetchGlobalConfigs = async () => {
try {
setLoading(true);
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/global-llm-configs`,
- {
- headers: {
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
- method: "GET",
- }
+ { method: "GET" }
);
if (!response.ok) {
diff --git a/surfsense_web/hooks/use-logs.ts b/surfsense_web/hooks/use-logs.ts
index 7defd8345..6ce025e89 100644
--- a/surfsense_web/hooks/use-logs.ts
+++ b/surfsense_web/hooks/use-logs.ts
@@ -1,6 +1,7 @@
"use client";
import { useCallback, useEffect, useMemo, useState } from "react";
import { toast } from "sonner";
+import { authenticatedFetch } from "@/lib/auth-utils";
export type LogLevel = "DEBUG" | "INFO" | "WARNING" | "ERROR" | "CRITICAL";
export type LogStatus = "IN_PROGRESS" | "SUCCESS" | "FAILED";
@@ -95,14 +96,9 @@ export function useLogs(searchSpaceId?: number, filters: LogFilters = {}) {
if (options.skip !== undefined) params.append("skip", options.skip.toString());
if (options.limit !== undefined) params.append("limit", options.limit.toString());
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/logs?${params}`,
- {
- headers: {
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
- method: "GET",
- }
+ { method: "GET" }
);
if (!response.ok) {
@@ -147,14 +143,14 @@ export function useLogs(searchSpaceId?: number, filters: LogFilters = {}) {
// Function to create a new log
const createLog = useCallback(async (logData: Omit) => {
try {
- const response = await fetch(`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/logs`, {
- headers: {
- "Content-Type": "application/json",
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
- method: "POST",
- body: JSON.stringify(logData),
- });
+ const response = await authenticatedFetch(
+ `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/logs`,
+ {
+ headers: { "Content-Type": "application/json" },
+ method: "POST",
+ body: JSON.stringify(logData),
+ }
+ );
if (!response.ok) {
const errorData = await response.json().catch(() => ({}));
@@ -179,13 +175,10 @@ export function useLogs(searchSpaceId?: number, filters: LogFilters = {}) {
updateData: Partial>
) => {
try {
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/logs/${logId}`,
{
- headers: {
- "Content-Type": "application/json",
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
+ headers: { "Content-Type": "application/json" },
method: "PUT",
body: JSON.stringify(updateData),
}
@@ -212,14 +205,9 @@ export function useLogs(searchSpaceId?: number, filters: LogFilters = {}) {
// Function to delete a log
const deleteLog = useCallback(async (logId: number) => {
try {
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/logs/${logId}`,
- {
- headers: {
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
- method: "DELETE",
- }
+ { method: "DELETE" }
);
if (!response.ok) {
@@ -240,14 +228,9 @@ export function useLogs(searchSpaceId?: number, filters: LogFilters = {}) {
// Function to get a single log
const getLog = useCallback(async (logId: number) => {
try {
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/logs/${logId}`,
- {
- headers: {
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
- method: "GET",
- }
+ { method: "GET" }
);
if (!response.ok) {
@@ -287,14 +270,9 @@ export function useLogsSummary(searchSpaceId: number, hours: number = 24) {
try {
setLoading(true);
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/logs/search-space/${searchSpaceId}/summary?hours=${hours}`,
- {
- headers: {
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
- method: "GET",
- }
+ { method: "GET" }
);
if (!response.ok) {
diff --git a/surfsense_web/hooks/use-rbac.ts b/surfsense_web/hooks/use-rbac.ts
index 6033f887f..ee3450746 100644
--- a/surfsense_web/hooks/use-rbac.ts
+++ b/surfsense_web/hooks/use-rbac.ts
@@ -2,6 +2,7 @@
import { useCallback, useEffect, useMemo, useState } from "react";
import { toast } from "sonner";
+import { authenticatedFetch, getBearerToken, handleUnauthorized } from "@/lib/auth-utils";
// ============ Types ============
@@ -105,22 +106,11 @@ export function useMembers(searchSpaceId: number) {
try {
setLoading(true);
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/members`,
- {
- headers: {
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
- method: "GET",
- }
+ { method: "GET" }
);
- if (response.status === 401) {
- localStorage.removeItem("surfsense_bearer_token");
- window.location.href = "/";
- throw new Error("Unauthorized");
- }
-
if (!response.ok) {
const errorData = await response.json().catch(() => ({}));
throw new Error(errorData.detail || "Failed to fetch members");
@@ -145,13 +135,10 @@ export function useMembers(searchSpaceId: number) {
const updateMemberRole = useCallback(
async (membershipId: number, roleId: number | null) => {
try {
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/members/${membershipId}`,
{
- headers: {
- "Content-Type": "application/json",
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
+ headers: { "Content-Type": "application/json" },
method: "PUT",
body: JSON.stringify({ role_id: roleId }),
}
@@ -177,14 +164,9 @@ export function useMembers(searchSpaceId: number) {
const removeMember = useCallback(
async (membershipId: number) => {
try {
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/members/${membershipId}`,
- {
- headers: {
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
- method: "DELETE",
- }
+ { method: "DELETE" }
);
if (!response.ok) {
@@ -205,14 +187,9 @@ export function useMembers(searchSpaceId: number) {
const leaveSearchSpace = useCallback(async () => {
try {
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/members/me`,
- {
- headers: {
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
- method: "DELETE",
- }
+ { method: "DELETE" }
);
if (!response.ok) {
@@ -251,22 +228,11 @@ export function useRoles(searchSpaceId: number) {
try {
setLoading(true);
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/roles`,
- {
- headers: {
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
- method: "GET",
- }
+ { method: "GET" }
);
- if (response.status === 401) {
- localStorage.removeItem("surfsense_bearer_token");
- window.location.href = "/";
- throw new Error("Unauthorized");
- }
-
if (!response.ok) {
const errorData = await response.json().catch(() => ({}));
throw new Error(errorData.detail || "Failed to fetch roles");
@@ -291,13 +257,10 @@ export function useRoles(searchSpaceId: number) {
const createRole = useCallback(
async (roleData: RoleCreate) => {
try {
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/roles`,
{
- headers: {
- "Content-Type": "application/json",
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
+ headers: { "Content-Type": "application/json" },
method: "POST",
body: JSON.stringify(roleData),
}
@@ -323,13 +286,10 @@ export function useRoles(searchSpaceId: number) {
const updateRole = useCallback(
async (roleId: number, roleData: RoleUpdate) => {
try {
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/roles/${roleId}`,
{
- headers: {
- "Content-Type": "application/json",
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
+ headers: { "Content-Type": "application/json" },
method: "PUT",
body: JSON.stringify(roleData),
}
@@ -355,14 +315,9 @@ export function useRoles(searchSpaceId: number) {
const deleteRole = useCallback(
async (roleId: number) => {
try {
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/roles/${roleId}`,
- {
- headers: {
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
- method: "DELETE",
- }
+ { method: "DELETE" }
);
if (!response.ok) {
@@ -404,22 +359,11 @@ export function useInvites(searchSpaceId: number) {
try {
setLoading(true);
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/invites`,
- {
- headers: {
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
- method: "GET",
- }
+ { method: "GET" }
);
- if (response.status === 401) {
- localStorage.removeItem("surfsense_bearer_token");
- window.location.href = "/";
- throw new Error("Unauthorized");
- }
-
if (!response.ok) {
const errorData = await response.json().catch(() => ({}));
throw new Error(errorData.detail || "Failed to fetch invites");
@@ -444,13 +388,10 @@ export function useInvites(searchSpaceId: number) {
const createInvite = useCallback(
async (inviteData: InviteCreate) => {
try {
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/invites`,
{
- headers: {
- "Content-Type": "application/json",
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
+ headers: { "Content-Type": "application/json" },
method: "POST",
body: JSON.stringify(inviteData),
}
@@ -476,13 +417,10 @@ export function useInvites(searchSpaceId: number) {
const updateInvite = useCallback(
async (inviteId: number, inviteData: InviteUpdate) => {
try {
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/invites/${inviteId}`,
{
- headers: {
- "Content-Type": "application/json",
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
+ headers: { "Content-Type": "application/json" },
method: "PUT",
body: JSON.stringify(inviteData),
}
@@ -508,14 +446,9 @@ export function useInvites(searchSpaceId: number) {
const revokeInvite = useCallback(
async (inviteId: number) => {
try {
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/invites/${inviteId}`,
- {
- headers: {
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
- method: "DELETE",
- }
+ { method: "DELETE" }
);
if (!response.ok) {
@@ -555,14 +488,9 @@ export function usePermissions() {
const fetchPermissions = useCallback(async () => {
try {
setLoading(true);
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/permissions`,
- {
- headers: {
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
- method: "GET",
- }
+ { method: "GET" }
);
if (!response.ok) {
@@ -619,22 +547,11 @@ export function useUserAccess(searchSpaceId: number) {
try {
setLoading(true);
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}/my-access`,
- {
- headers: {
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
- method: "GET",
- }
+ { method: "GET" }
);
- if (response.status === 401) {
- localStorage.removeItem("surfsense_bearer_token");
- window.location.href = "/";
- throw new Error("Unauthorized");
- }
-
if (!response.ok) {
const errorData = await response.json().catch(() => ({}));
throw new Error(errorData.detail || "Failed to fetch access info");
@@ -737,13 +654,10 @@ export function useInviteInfo(inviteCode: string | null) {
}
try {
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/invites/accept`,
{
- headers: {
- "Content-Type": "application/json",
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
+ headers: { "Content-Type": "application/json" },
method: "POST",
body: JSON.stringify({ invite_code: inviteCode }),
}
diff --git a/surfsense_web/hooks/use-search-source-connectors.ts b/surfsense_web/hooks/use-search-source-connectors.ts
index 41b5f5115..22c5b3553 100644
--- a/surfsense_web/hooks/use-search-source-connectors.ts
+++ b/surfsense_web/hooks/use-search-source-connectors.ts
@@ -1,4 +1,5 @@
import { useCallback, useEffect, useState } from "react";
+import { authenticatedFetch, getBearerToken, handleUnauthorized } from "@/lib/auth-utils";
export interface SearchSourceConnector {
id: number;
@@ -66,11 +67,6 @@ export const useSearchSourceConnectors = (lazy: boolean = false, searchSpaceId?:
try {
setIsLoading(true);
setError(null);
- const token = localStorage.getItem("surfsense_bearer_token");
-
- if (!token) {
- throw new Error("No authentication token found");
- }
// Build URL with optional search_space_id query parameter
const url = new URL(
@@ -80,12 +76,9 @@ export const useSearchSourceConnectors = (lazy: boolean = false, searchSpaceId?:
url.searchParams.append("search_space_id", spaceId.toString());
}
- const response = await fetch(url.toString(), {
+ const response = await authenticatedFetch(url.toString(), {
method: "GET",
- headers: {
- "Content-Type": "application/json",
- Authorization: `Bearer ${token}`,
- },
+ headers: { "Content-Type": "application/json" },
});
if (!response.ok) {
@@ -176,24 +169,15 @@ export const useSearchSourceConnectors = (lazy: boolean = false, searchSpaceId?:
spaceId: number
) => {
try {
- const token = localStorage.getItem("surfsense_bearer_token");
-
- if (!token) {
- throw new Error("No authentication token found");
- }
-
// Add search_space_id as a query parameter
const url = new URL(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-source-connectors`
);
url.searchParams.append("search_space_id", spaceId.toString());
- const response = await fetch(url.toString(), {
+ const response = await authenticatedFetch(url.toString(), {
method: "POST",
- headers: {
- "Content-Type": "application/json",
- Authorization: `Bearer ${token}`,
- },
+ headers: { "Content-Type": "application/json" },
body: JSON.stringify(connectorData),
});
@@ -222,20 +206,11 @@ export const useSearchSourceConnectors = (lazy: boolean = false, searchSpaceId?:
>
) => {
try {
- const token = localStorage.getItem("surfsense_bearer_token");
-
- if (!token) {
- throw new Error("No authentication token found");
- }
-
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-source-connectors/${connectorId}`,
{
method: "PUT",
- headers: {
- "Content-Type": "application/json",
- Authorization: `Bearer ${token}`,
- },
+ headers: { "Content-Type": "application/json" },
body: JSON.stringify(connectorData),
}
);
@@ -262,20 +237,11 @@ export const useSearchSourceConnectors = (lazy: boolean = false, searchSpaceId?:
*/
const deleteConnector = async (connectorId: number) => {
try {
- const token = localStorage.getItem("surfsense_bearer_token");
-
- if (!token) {
- throw new Error("No authentication token found");
- }
-
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-source-connectors/${connectorId}`,
{
method: "DELETE",
- headers: {
- "Content-Type": "application/json",
- Authorization: `Bearer ${token}`,
- },
+ headers: { "Content-Type": "application/json" },
}
);
@@ -302,12 +268,6 @@ export const useSearchSourceConnectors = (lazy: boolean = false, searchSpaceId?:
endDate?: string
) => {
try {
- const token = localStorage.getItem("surfsense_bearer_token");
-
- if (!token) {
- throw new Error("No authentication token found");
- }
-
// Build query parameters
const params = new URLSearchParams({
search_space_id: searchSpaceId.toString(),
@@ -319,16 +279,13 @@ export const useSearchSourceConnectors = (lazy: boolean = false, searchSpaceId?:
params.append("end_date", endDate);
}
- const response = await fetch(
+ const response = await authenticatedFetch(
`${
process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL
}/api/v1/search-source-connectors/${connectorId}/index?${params.toString()}`,
{
method: "POST",
- headers: {
- "Content-Type": "application/json",
- Authorization: `Bearer ${token}`,
- },
+ headers: { "Content-Type": "application/json" },
}
);
diff --git a/surfsense_web/hooks/use-search-space.ts b/surfsense_web/hooks/use-search-space.ts
index 46ff64187..849aad413 100644
--- a/surfsense_web/hooks/use-search-space.ts
+++ b/surfsense_web/hooks/use-search-space.ts
@@ -2,6 +2,7 @@
import { useCallback, useEffect, useState } from "react";
import { toast } from "sonner";
+import { authenticatedFetch } from "@/lib/auth-utils";
interface SearchSpace {
created_at: string;
@@ -29,23 +30,11 @@ export function useSearchSpace({ searchSpaceId, autoFetch = true }: UseSearchSpa
if (typeof window === "undefined") return;
setLoading(true);
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces/${searchSpaceId}`,
- {
- headers: {
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
- method: "GET",
- }
+ { method: "GET" }
);
- if (response.status === 401) {
- // Clear token and redirect to home
- localStorage.removeItem("surfsense_bearer_token");
- window.location.href = "/";
- throw new Error("Unauthorized: Redirecting to login page");
- }
-
if (!response.ok) {
throw new Error(`Failed to fetch search space: ${response.status}`);
}
diff --git a/surfsense_web/hooks/use-search-spaces.ts b/surfsense_web/hooks/use-search-spaces.ts
index f69144081..03a87881c 100644
--- a/surfsense_web/hooks/use-search-spaces.ts
+++ b/surfsense_web/hooks/use-search-spaces.ts
@@ -2,6 +2,7 @@
import { useEffect, useState } from "react";
import { toast } from "sonner";
+import { authenticatedFetch } from "@/lib/auth-utils";
interface SearchSpace {
id: number;
@@ -23,19 +24,14 @@ export function useSearchSpaces() {
const fetchSearchSpaces = async () => {
try {
setLoading(true);
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces`,
- {
- headers: {
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
- method: "GET",
- }
+ { method: "GET" }
);
if (!response.ok) {
- toast.error("Not authenticated");
- throw new Error("Not authenticated");
+ toast.error("Failed to fetch search spaces");
+ throw new Error("Failed to fetch search spaces");
}
const data = await response.json();
@@ -56,19 +52,14 @@ export function useSearchSpaces() {
const refreshSearchSpaces = async () => {
setLoading(true);
try {
- const response = await fetch(
+ const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/searchspaces`,
- {
- headers: {
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
- method: "GET",
- }
+ { method: "GET" }
);
if (!response.ok) {
- toast.error("Not authenticated");
- throw new Error("Not authenticated");
+ toast.error("Failed to fetch search spaces");
+ throw new Error("Failed to fetch search spaces");
}
const data = await response.json();
diff --git a/surfsense_web/hooks/use-user.ts b/surfsense_web/hooks/use-user.ts
index 23a23237b..e81ac350b 100644
--- a/surfsense_web/hooks/use-user.ts
+++ b/surfsense_web/hooks/use-user.ts
@@ -2,6 +2,7 @@
import { useEffect, useState } from "react";
import { toast } from "sonner";
+import { authenticatedFetch } from "@/lib/auth-utils";
interface User {
id: string;
@@ -25,19 +26,10 @@ export function useUser() {
if (typeof window === "undefined") return;
setLoading(true);
- const response = await fetch(`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/users/me`, {
- headers: {
- Authorization: `Bearer ${localStorage.getItem("surfsense_bearer_token")}`,
- },
- method: "GET",
- });
-
- if (response.status === 401) {
- // Clear token and redirect to home
- localStorage.removeItem("surfsense_bearer_token");
- window.location.href = "/";
- throw new Error("Unauthorized: Redirecting to login page");
- }
+ const response = await authenticatedFetch(
+ `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/users/me`,
+ { method: "GET" }
+ );
if (!response.ok) {
throw new Error(`Failed to fetch user: ${response.status}`);
diff --git a/surfsense_web/lib/apis/base-api.service.ts b/surfsense_web/lib/apis/base-api.service.ts
index 51b1f69fb..3013be70a 100644
--- a/surfsense_web/lib/apis/base-api.service.ts
+++ b/surfsense_web/lib/apis/base-api.service.ts
@@ -1,5 +1,5 @@
-import { th } from "date-fns/locale";
import type z from "zod";
+import { getBearerToken, handleUnauthorized } from "../auth-utils";
import { AppError, AuthenticationError, AuthorizationError, NotFoundError } from "../error";
enum ResponseType {
@@ -132,6 +132,8 @@ class BaseApiService {
switch (response.status) {
case 401:
+ // Use centralized auth handler for 401 responses
+ handleUnauthorized();
throw new AuthenticationError(
"You are not authenticated. Please login again.",
response.status,
@@ -261,6 +263,6 @@ class BaseApiService {
}
export const baseApiService = new BaseApiService(
- typeof window !== "undefined" ? localStorage.getItem("surfsense_bearer_token") || "" : "",
+ typeof window !== "undefined" ? getBearerToken() || "" : "",
process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL || ""
);
diff --git a/surfsense_web/lib/auth-utils.ts b/surfsense_web/lib/auth-utils.ts
new file mode 100644
index 000000000..c1dc7194b
--- /dev/null
+++ b/surfsense_web/lib/auth-utils.ts
@@ -0,0 +1,173 @@
+/**
+ * Authentication utilities for handling token expiration and redirects
+ */
+
+const REDIRECT_PATH_KEY = "surfsense_redirect_path";
+const BEARER_TOKEN_KEY = "surfsense_bearer_token";
+
+/**
+ * Saves the current path and redirects to login page
+ * Call this when a 401 response is received
+ */
+export function handleUnauthorized(): void {
+ if (typeof window === "undefined") return;
+
+ // Save the current path (including search params and hash) for redirect after login
+ const currentPath = window.location.pathname + window.location.search + window.location.hash;
+
+ // Don't save auth-related paths
+ const excludedPaths = ["/auth", "/auth/callback", "/"];
+ if (!excludedPaths.includes(window.location.pathname)) {
+ localStorage.setItem(REDIRECT_PATH_KEY, currentPath);
+ }
+
+ // Clear the token
+ localStorage.removeItem(BEARER_TOKEN_KEY);
+
+ // Redirect to home page (which has login options)
+ window.location.href = "/login";
+}
+
+/**
+ * Gets the stored redirect path and clears it from storage
+ * Call this after successful login to redirect the user back
+ */
+export function getAndClearRedirectPath(): string | null {
+ if (typeof window === "undefined") return null;
+
+ const redirectPath = localStorage.getItem(REDIRECT_PATH_KEY);
+ if (redirectPath) {
+ localStorage.removeItem(REDIRECT_PATH_KEY);
+ }
+ return redirectPath;
+}
+
+/**
+ * Gets the bearer token from localStorage
+ */
+export function getBearerToken(): string | null {
+ if (typeof window === "undefined") return null;
+ return localStorage.getItem(BEARER_TOKEN_KEY);
+}
+
+/**
+ * Sets the bearer token in localStorage
+ */
+export function setBearerToken(token: string): void {
+ if (typeof window === "undefined") return;
+ localStorage.setItem(BEARER_TOKEN_KEY, token);
+}
+
+/**
+ * Clears the bearer token from localStorage
+ */
+export function clearBearerToken(): void {
+ if (typeof window === "undefined") return;
+ localStorage.removeItem(BEARER_TOKEN_KEY);
+}
+
+/**
+ * Checks if the user is authenticated (has a token)
+ */
+export function isAuthenticated(): boolean {
+ return !!getBearerToken();
+}
+
+/**
+ * Saves the current path and redirects to login page
+ * Use this for client-side auth checks (e.g., in useEffect)
+ * Unlike handleUnauthorized, this doesn't clear the token (user might not have one)
+ */
+export function redirectToLogin(): void {
+ if (typeof window === "undefined") return;
+
+ // Save the current path (including search params and hash) for redirect after login
+ const currentPath = window.location.pathname + window.location.search + window.location.hash;
+
+ // Don't save auth-related paths or home page
+ const excludedPaths = ["/auth", "/auth/callback", "/", "/login", "/register"];
+ if (!excludedPaths.includes(window.location.pathname)) {
+ localStorage.setItem(REDIRECT_PATH_KEY, currentPath);
+ }
+
+ // Redirect to login page
+ window.location.href = "/login";
+}
+
+/**
+ * Creates headers with authorization bearer token
+ */
+export function getAuthHeaders(additionalHeaders?: Record): Record {
+ const token = getBearerToken();
+ return {
+ ...(token ? { Authorization: `Bearer ${token}` } : {}),
+ ...additionalHeaders,
+ };
+}
+
+/**
+ * Authenticated fetch wrapper that handles 401 responses uniformly
+ * Automatically redirects to login on 401 and saves the current path
+ */
+export async function authenticatedFetch(
+ url: string,
+ options?: RequestInit & { skipAuthRedirect?: boolean }
+): Promise {
+ const { skipAuthRedirect = false, ...fetchOptions } = options || {};
+
+ const headers = getAuthHeaders(fetchOptions.headers as Record);
+
+ const response = await fetch(url, {
+ ...fetchOptions,
+ headers,
+ });
+
+ // Handle 401 Unauthorized
+ if (response.status === 401 && !skipAuthRedirect) {
+ handleUnauthorized();
+ throw new Error("Unauthorized: Redirecting to login page");
+ }
+
+ return response;
+}
+
+/**
+ * Type for the result of a fetch operation with built-in error handling
+ */
+export type FetchResult =
+ | { success: true; data: T; response: Response }
+ | { success: false; error: string; status?: number };
+
+/**
+ * Authenticated fetch with JSON response handling
+ * Returns a result object instead of throwing on non-401 errors
+ */
+export async function authenticatedFetchJson(
+ url: string,
+ options?: RequestInit & { skipAuthRedirect?: boolean }
+): Promise> {
+ try {
+ const response = await authenticatedFetch(url, options);
+
+ if (!response.ok) {
+ const errorData = await response.json().catch(() => ({}));
+ return {
+ success: false,
+ error: errorData.detail || `Request failed: ${response.status}`,
+ status: response.status,
+ };
+ }
+
+ const data = await response.json();
+ return { success: true, data, response };
+ } catch (err: any) {
+ // Re-throw if it's the unauthorized redirect
+ if (err.message?.includes("Unauthorized")) {
+ throw err;
+ }
+ return {
+ success: false,
+ error: err.message || "Request failed",
+ };
+ }
+}
From 081080233a68b8b3dd7d77d795c084a23229d809 Mon Sep 17 00:00:00 2001
From: WayChan
Date: Wed, 3 Dec 2025 15:32:32 +0000
Subject: [PATCH 25/36] fix: saving document from browser extension fails due
to missing and mismatch fields of backend data models
---
surfsense_backend/app/routes/documents_routes.py | 6 +++++-
surfsense_backend/app/tasks/celery_tasks/document_tasks.py | 6 +++++-
2 files changed, 10 insertions(+), 2 deletions(-)
diff --git a/surfsense_backend/app/routes/documents_routes.py b/surfsense_backend/app/routes/documents_routes.py
index 67015243f..1d2a3ca78 100644
--- a/surfsense_backend/app/routes/documents_routes.py
+++ b/surfsense_backend/app/routes/documents_routes.py
@@ -71,8 +71,12 @@ async def create_documents(
"metadata": {
"VisitedWebPageTitle": individual_document.metadata.VisitedWebPageTitle,
"VisitedWebPageURL": individual_document.metadata.VisitedWebPageURL,
+ "BrowsingSessionId": individual_document.metadata.BrowsingSessionId,
+ "VisitedWebPageDateWithTimeInISOString": individual_document.metadata.VisitedWebPageDateWithTimeInISOString,
+ "VisitedWebPageVisitDurationInMilliseconds": individual_document.metadata.VisitedWebPageVisitDurationInMilliseconds,
+ "VisitedWebPageReffererURL": individual_document.metadata.VisitedWebPageReffererURL,
},
- "content": individual_document.content,
+ "pageContent": individual_document.content,
}
process_extension_document_task.delay(
document_dict, request.search_space_id, str(user.id)
diff --git a/surfsense_backend/app/tasks/celery_tasks/document_tasks.py b/surfsense_backend/app/tasks/celery_tasks/document_tasks.py
index 5cf5a662a..a7b750673 100644
--- a/surfsense_backend/app/tasks/celery_tasks/document_tasks.py
+++ b/surfsense_backend/app/tasks/celery_tasks/document_tasks.py
@@ -69,10 +69,14 @@ async def _process_extension_document(
class DocumentMetadata(BaseModel):
VisitedWebPageTitle: str
VisitedWebPageURL: str
+ BrowsingSessionId: str
+ VisitedWebPageDateWithTimeInISOString: str
+ VisitedWebPageReffererURL: str
+ VisitedWebPageVisitDurationInMilliseconds: str
class IndividualDocument(BaseModel):
metadata: DocumentMetadata
- content: str
+ pageContent: str
individual_document = IndividualDocument(**individual_document_dict)
From 875924e5fd21a1132e59f39c3259f569755a2478 Mon Sep 17 00:00:00 2001
From: CREDO23
Date: Thu, 4 Dec 2025 00:38:13 +0200
Subject: [PATCH 26/36] jira-connector: update make_api_request to accespt POST
with payload
---
surfsense_backend/app/connectors/jira_connector.py | 13 +++++++++++--
1 file changed, 11 insertions(+), 2 deletions(-)
diff --git a/surfsense_backend/app/connectors/jira_connector.py b/surfsense_backend/app/connectors/jira_connector.py
index ef0e00329..2a68c21f1 100644
--- a/surfsense_backend/app/connectors/jira_connector.py
+++ b/surfsense_backend/app/connectors/jira_connector.py
@@ -92,7 +92,11 @@ class JiraConnector:
}
def make_api_request(
- self, endpoint: str, params: dict[str, Any] | None = None
+ self,
+ endpoint: str,
+ params: dict[str, Any] | None = None,
+ method: str = "GET",
+ json_payload: dict[str, Any] | None = None,
) -> dict[str, Any]:
"""
Make a request to the Jira API.
@@ -116,7 +120,12 @@ class JiraConnector:
url = f"{self.base_url}/rest/api/{self.api_version}/{endpoint}"
headers = self.get_headers()
- response = requests.get(url, headers=headers, params=params, timeout=500)
+ if method.upper() == "POST":
+ response = requests.post(
+ url, headers=headers, json=json_payload, timeout=500
+ )
+ else:
+ response = requests.get(url, headers=headers, params=params, timeout=500)
if response.status_code == 200:
return response.json()
From 4df6b09db9398e1508663ce8d7f0e31564b01651 Mon Sep 17 00:00:00 2001
From: CREDO23
Date: Thu, 4 Dec 2025 00:42:10 +0200
Subject: [PATCH 27/36] jira-connector: update get all issues method
---
.../app/connectors/jira_connector.py | 20 +++++++++++--------
1 file changed, 12 insertions(+), 8 deletions(-)
diff --git a/surfsense_backend/app/connectors/jira_connector.py b/surfsense_backend/app/connectors/jira_connector.py
index 2a68c21f1..3578abca1 100644
--- a/surfsense_backend/app/connectors/jira_connector.py
+++ b/surfsense_backend/app/connectors/jira_connector.py
@@ -178,19 +178,23 @@ class JiraConnector:
"project",
]
- params = {
- "jql": jql,
- "fields": ",".join(fields),
- "maxResults": 100,
- "startAt": 0,
- }
+ all_issues = []
+ start_at = 0
+ max_results = 100
all_issues = []
start_at = 0
while True:
- params["startAt"] = start_at
- result = self.make_api_request("search", params)
+ json_payload = {
+ "jql": jql,
+ "fields": fields, # API accepts list
+ "maxResults": max_results,
+ "startAt": start_at,
+ }
+ result = self.make_api_request(
+ "search/jql", json_payload=json_payload, method="POST"
+ )
if not isinstance(result, dict) or "issues" not in result:
raise Exception("Invalid response from Jira API")
From abf017eabb2278b96eb3952112676faaf4390e53 Mon Sep 17 00:00:00 2001
From: CREDO23
Date: Thu, 4 Dec 2025 00:48:54 +0200
Subject: [PATCH 28/36] jira-connector: update get_issues_by_date_range method
---
.../app/connectors/jira_connector.py | 51 ++++++++++---------
1 file changed, 26 insertions(+), 25 deletions(-)
diff --git a/surfsense_backend/app/connectors/jira_connector.py b/surfsense_backend/app/connectors/jira_connector.py
index 3578abca1..c0bc080fd 100644
--- a/surfsense_backend/app/connectors/jira_connector.py
+++ b/surfsense_backend/app/connectors/jira_connector.py
@@ -6,6 +6,7 @@ Allows fetching issue lists and their comments, projects and more.
"""
import base64
+import re
from datetime import datetime
from typing import Any
@@ -220,7 +221,7 @@ class JiraConnector:
project_key: str | None = None,
) -> tuple[list[dict[str, Any]], str | None]:
"""
- Fetch issues within a date range.
+ Fetch issues created OR updated within a date range using /search/jql.
Args:
start_date: Start date in YYYY-MM-DD format
@@ -232,20 +233,20 @@ class JiraConnector:
Tuple containing (issues list, error message or None)
"""
try:
- # Build JQL query for date range
- # Query issues that were either created OR updated within the date range
- date_filter = (
- f"(createdDate >= '{start_date}' AND createdDate <= '{end_date}')"
+ # Validate date format (simple YYYY-MM-DD check)
+ for d in (start_date, end_date):
+ if not re.match(r"^\d{4}-\d{2}-\d{2}$", d):
+ return [], f"Invalid date format: {d}. Expected YYYY-MM-DD."
+
+ # Build JQL: issues created OR updated within date range
+ date_jql = (
+ f'(created >= "{start_date}" AND created <= "{end_date}") '
+ f'OR (updated >= "{start_date}" AND updated <= "{end_date}")'
)
- # TODO : This JQL needs some improvement to work as expected
-
- _jql = f"{date_filter}"
+ jql = f"({date_jql}) ORDER BY created DESC"
if project_key:
- _jql = (
- f'project = "{project_key}" AND {date_filter} ORDER BY created DESC'
- )
+ jql = f'project = "{project_key}" AND {jql}'
- # Define fields to retrieve
fields = [
"summary",
"description",
@@ -258,24 +259,25 @@ class JiraConnector:
"issuetype",
"project",
]
-
if include_comments:
fields.append("comment")
- params = {
- # "jql": "", TODO : Add a JQL query to filter from a date range
- "fields": ",".join(fields),
- "maxResults": 100,
- "startAt": 0,
- }
-
- all_issues = []
+ all_issues: list[dict[str, Any]] = []
start_at = 0
+ max_results = 100
while True:
- params["startAt"] = start_at
+ json_payload = {
+ "jql": jql,
+ "fields": fields, # pass as list
+ "maxResults": max_results,
+ "startAt": start_at,
+ }
- result = self.make_api_request("search", params)
+ # Call new endpoint with POST
+ result = self.make_api_request(
+ "search/jql", json_payload=json_payload, method="POST"
+ )
if not isinstance(result, dict) or "issues" not in result:
return [], "Invalid response from Jira API"
@@ -283,9 +285,8 @@ class JiraConnector:
issues = result["issues"]
all_issues.extend(issues)
- # Check if there are more issues to fetch
total = result.get("total", 0)
- if start_at + len(issues) >= total:
+ if start_at + len(issues) >= total or len(issues) == 0:
break
start_at += len(issues)
From 107f013ff9934fc7d51484b997dbd9021860539b Mon Sep 17 00:00:00 2001
From: CREDO23
Date: Thu, 4 Dec 2025 01:21:46 +0200
Subject: [PATCH 29/36] jira-connector: update get_issues_by_date_range method
---
.../app/connectors/jira_connector.py | 55 ++++++++++---------
1 file changed, 28 insertions(+), 27 deletions(-)
diff --git a/surfsense_backend/app/connectors/jira_connector.py b/surfsense_backend/app/connectors/jira_connector.py
index c0bc080fd..d9474ee79 100644
--- a/surfsense_backend/app/connectors/jira_connector.py
+++ b/surfsense_backend/app/connectors/jira_connector.py
@@ -6,7 +6,6 @@ Allows fetching issue lists and their comments, projects and more.
"""
import base64
-import re
from datetime import datetime
from typing import Any
@@ -221,7 +220,7 @@ class JiraConnector:
project_key: str | None = None,
) -> tuple[list[dict[str, Any]], str | None]:
"""
- Fetch issues created OR updated within a date range using /search/jql.
+ Fetch issues within a date range.
Args:
start_date: Start date in YYYY-MM-DD format
@@ -233,20 +232,20 @@ class JiraConnector:
Tuple containing (issues list, error message or None)
"""
try:
- # Validate date format (simple YYYY-MM-DD check)
- for d in (start_date, end_date):
- if not re.match(r"^\d{4}-\d{2}-\d{2}$", d):
- return [], f"Invalid date format: {d}. Expected YYYY-MM-DD."
-
- # Build JQL: issues created OR updated within date range
- date_jql = (
- f'(created >= "{start_date}" AND created <= "{end_date}") '
- f'OR (updated >= "{start_date}" AND updated <= "{end_date}")'
+ # Build JQL query for date range
+ # Query issues that were either created OR updated within the date range
+ date_filter = (
+ f"(createdDate >= '{start_date}' AND createdDate <= '{end_date}')"
)
- jql = f"({date_jql}) ORDER BY created DESC"
- if project_key:
- jql = f'project = "{project_key}" AND {jql}'
+ # TODO : This JQL needs some improvement to work as expected
+ _jql = f"{date_filter}"
+ if project_key:
+ _jql = (
+ f'project = "{project_key}" AND {date_filter} ORDER BY created DESC'
+ )
+
+ # Define fields to retrieve
fields = [
"summary",
"description",
@@ -259,25 +258,26 @@ class JiraConnector:
"issuetype",
"project",
]
+
if include_comments:
fields.append("comment")
- all_issues: list[dict[str, Any]] = []
+ print(f"JQL query: {_jql}")
+
+ params = {
+ "jql": _jql,
+ "fields": ",".join(fields),
+ "maxResults": 100,
+ "startAt": 0,
+ }
+
+ all_issues = []
start_at = 0
- max_results = 100
while True:
- json_payload = {
- "jql": jql,
- "fields": fields, # pass as list
- "maxResults": max_results,
- "startAt": start_at,
- }
+ params["startAt"] = start_at
- # Call new endpoint with POST
- result = self.make_api_request(
- "search/jql", json_payload=json_payload, method="POST"
- )
+ result = self.make_api_request("search/jql", params)
if not isinstance(result, dict) or "issues" not in result:
return [], "Invalid response from Jira API"
@@ -285,8 +285,9 @@ class JiraConnector:
issues = result["issues"]
all_issues.extend(issues)
+ # Check if there are more issues to fetch
total = result.get("total", 0)
- if start_at + len(issues) >= total or len(issues) == 0:
+ if start_at + len(issues) >= total:
break
start_at += len(issues)
From 3c423436cc87c5a2bcc01d99e3a303ffbc9670a7 Mon Sep 17 00:00:00 2001
From: WayChan
Date: Thu, 4 Dec 2025 00:28:39 +0000
Subject: [PATCH 30/36] fix: retrieve wrong field for content in saving
extension document.
---
surfsense_backend/app/routes/documents_routes.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/surfsense_backend/app/routes/documents_routes.py b/surfsense_backend/app/routes/documents_routes.py
index 1d2a3ca78..a959db5e6 100644
--- a/surfsense_backend/app/routes/documents_routes.py
+++ b/surfsense_backend/app/routes/documents_routes.py
@@ -76,7 +76,7 @@ async def create_documents(
"VisitedWebPageVisitDurationInMilliseconds": individual_document.metadata.VisitedWebPageVisitDurationInMilliseconds,
"VisitedWebPageReffererURL": individual_document.metadata.VisitedWebPageReffererURL,
},
- "pageContent": individual_document.content,
+ "pageContent": individual_document.pageContent,
}
process_extension_document_task.delay(
document_dict, request.search_space_id, str(user.id)
From f76a94d9a96c2c930c219c2460d434378896e0bc Mon Sep 17 00:00:00 2001
From: "DESKTOP-RTLN3BA\\$punk"
Date: Wed, 3 Dec 2025 17:54:03 -0800
Subject: [PATCH 31/36] security: fix CVE-2025-66478
---
surfsense_web/package.json | 4 +-
surfsense_web/pnpm-lock.yaml | 152 +++++++++++++++++------------------
2 files changed, 74 insertions(+), 82 deletions(-)
diff --git a/surfsense_web/package.json b/surfsense_web/package.json
index 7f97111b9..8f85e96a7 100644
--- a/surfsense_web/package.json
+++ b/surfsense_web/package.json
@@ -28,7 +28,7 @@
"@blocknote/server-util": "^0.42.3",
"@hookform/resolvers": "^4.1.3",
"@llamaindex/chat-ui": "^0.5.17",
- "@next/third-parties": "^15.5.6",
+ "@next/third-parties": "^15.5.7",
"@number-flow/react": "^0.5.10",
"@radix-ui/react-accordion": "^1.2.11",
"@radix-ui/react-alert-dialog": "^1.1.14",
@@ -74,7 +74,7 @@
"jotai-tanstack-query": "^0.11.0",
"lucide-react": "^0.477.0",
"motion": "^12.23.22",
- "next": "^15.5.6",
+ "next": "^15.5.7",
"next-intl": "^3.26.5",
"next-themes": "^0.4.6",
"pg": "^8.16.3",
diff --git a/surfsense_web/pnpm-lock.yaml b/surfsense_web/pnpm-lock.yaml
index 285099816..dd431ffae 100644
--- a/surfsense_web/pnpm-lock.yaml
+++ b/surfsense_web/pnpm-lock.yaml
@@ -30,8 +30,8 @@ importers:
specifier: ^0.5.17
version: 0.5.17(@babel/runtime@7.26.9)(@codemirror/autocomplete@6.18.6)(@codemirror/language@6.11.2)(@codemirror/lint@6.8.5)(@codemirror/search@6.5.11)(@codemirror/state@6.5.2)(@codemirror/theme-one-dark@6.1.3)(@codemirror/view@6.38.1)(@lezer/highlight@1.2.1)(@types/react-dom@19.1.6(@types/react@19.1.8))(@types/react@19.1.8)(codemirror@6.0.2)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)(yjs@13.6.27)
'@next/third-parties':
- specifier: ^15.5.6
- version: 15.5.6(next@15.5.6(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react@19.1.0)
+ specifier: ^15.5.7
+ version: 15.5.7(next@15.5.7(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react@19.1.0)
'@number-flow/react':
specifier: ^0.5.10
version: 0.5.10(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
@@ -145,16 +145,16 @@ importers:
version: 1.4.8(@types/react-dom@19.1.6(@types/react@19.1.8))(@types/react@19.1.8)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
fumadocs-core:
specifier: ^15.6.6
- version: 15.6.6(@types/react@19.1.8)(next@15.5.6(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
+ version: 15.6.6(@types/react@19.1.8)(next@15.5.7(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
fumadocs-mdx:
specifier: ^11.7.1
- version: 11.7.1(acorn@8.15.0)(fumadocs-core@15.6.6(@types/react@19.1.8)(next@15.5.6(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(next@15.5.6(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react@19.1.0)
+ version: 11.7.1(acorn@8.14.0)(fumadocs-core@15.6.6(@types/react@19.1.8)(next@15.5.7(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(next@15.5.7(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react@19.1.0)
fumadocs-ui:
specifier: ^15.6.6
- version: 15.6.6(@types/react-dom@19.1.6(@types/react@19.1.8))(@types/react@19.1.8)(next@15.5.6(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react-dom@19.1.0(react@19.1.0))(react@19.1.0)(tailwindcss@4.1.11)
+ version: 15.6.6(@types/react-dom@19.1.6(@types/react@19.1.8))(@types/react@19.1.8)(next@15.5.7(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react-dom@19.1.0(react@19.1.0))(react@19.1.0)(tailwindcss@4.1.11)
geist:
specifier: ^1.4.2
- version: 1.4.2(next@15.5.6(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))
+ version: 1.4.2(next@15.5.7(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))
jotai:
specifier: ^2.15.1
version: 2.15.1(@types/react@19.1.8)(react@19.1.0)
@@ -168,11 +168,11 @@ importers:
specifier: ^12.23.22
version: 12.23.22(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
next:
- specifier: ^15.5.6
- version: 15.5.6(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
+ specifier: ^15.5.7
+ version: 15.5.7(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
next-intl:
specifier: ^3.26.5
- version: 3.26.5(next@15.5.6(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react@19.1.0)
+ version: 3.26.5(next@15.5.7(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react@19.1.0)
next-themes:
specifier: ^0.4.6
version: 0.4.6(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
@@ -1326,62 +1326,62 @@ packages:
react: '>= 18 || >= 19'
react-dom: '>= 18 || >= 19'
- '@next/env@15.5.6':
- resolution: {integrity: sha512-3qBGRW+sCGzgbpc5TS1a0p7eNxnOarGVQhZxfvTdnV0gFI61lX7QNtQ4V1TSREctXzYn5NetbUsLvyqwLFJM6Q==}
+ '@next/env@15.5.7':
+ resolution: {integrity: sha512-4h6Y2NyEkIEN7Z8YxkA27pq6zTkS09bUSYC0xjd0NpwFxjnIKeZEeH591o5WECSmjpUhLn3H2QLJcDye3Uzcvg==}
'@next/eslint-plugin-next@15.2.0':
resolution: {integrity: sha512-jHFUG2OwmAuOASqq253RAEG/5BYcPHn27p1NoWZDCf4OdvdK0yRYWX92YKkL+Mk2s+GyJrmd/GATlL5b2IySpw==}
- '@next/swc-darwin-arm64@15.5.6':
- resolution: {integrity: sha512-ES3nRz7N+L5Umz4KoGfZ4XX6gwHplwPhioVRc25+QNsDa7RtUF/z8wJcbuQ2Tffm5RZwuN2A063eapoJ1u4nPg==}
+ '@next/swc-darwin-arm64@15.5.7':
+ resolution: {integrity: sha512-IZwtxCEpI91HVU/rAUOOobWSZv4P2DeTtNaCdHqLcTJU4wdNXgAySvKa/qJCgR5m6KI8UsKDXtO2B31jcaw1Yw==}
engines: {node: '>= 10'}
cpu: [arm64]
os: [darwin]
- '@next/swc-darwin-x64@15.5.6':
- resolution: {integrity: sha512-JIGcytAyk9LQp2/nuVZPAtj8uaJ/zZhsKOASTjxDug0SPU9LAM3wy6nPU735M1OqacR4U20LHVF5v5Wnl9ptTA==}
+ '@next/swc-darwin-x64@15.5.7':
+ resolution: {integrity: sha512-UP6CaDBcqaCBuiq/gfCEJw7sPEoX1aIjZHnBWN9v9qYHQdMKvCKcAVs4OX1vIjeE+tC5EIuwDTVIoXpUes29lg==}
engines: {node: '>= 10'}
cpu: [x64]
os: [darwin]
- '@next/swc-linux-arm64-gnu@15.5.6':
- resolution: {integrity: sha512-qvz4SVKQ0P3/Im9zcS2RmfFL/UCQnsJKJwQSkissbngnB/12c6bZTCB0gHTexz1s6d/mD0+egPKXAIRFVS7hQg==}
+ '@next/swc-linux-arm64-gnu@15.5.7':
+ resolution: {integrity: sha512-NCslw3GrNIw7OgmRBxHtdWFQYhexoUCq+0oS2ccjyYLtcn1SzGzeM54jpTFonIMUjNbHmpKpziXnpxhSWLcmBA==}
engines: {node: '>= 10'}
cpu: [arm64]
os: [linux]
- '@next/swc-linux-arm64-musl@15.5.6':
- resolution: {integrity: sha512-FsbGVw3SJz1hZlvnWD+T6GFgV9/NYDeLTNQB2MXoPN5u9VA9OEDy6fJEfePfsUKAhJufFbZLgp0cPxMuV6SV0w==}
+ '@next/swc-linux-arm64-musl@15.5.7':
+ resolution: {integrity: sha512-nfymt+SE5cvtTrG9u1wdoxBr9bVB7mtKTcj0ltRn6gkP/2Nu1zM5ei8rwP9qKQP0Y//umK+TtkKgNtfboBxRrw==}
engines: {node: '>= 10'}
cpu: [arm64]
os: [linux]
- '@next/swc-linux-x64-gnu@15.5.6':
- resolution: {integrity: sha512-3QnHGFWlnvAgyxFxt2Ny8PTpXtQD7kVEeaFat5oPAHHI192WKYB+VIKZijtHLGdBBvc16tiAkPTDmQNOQ0dyrA==}
+ '@next/swc-linux-x64-gnu@15.5.7':
+ resolution: {integrity: sha512-hvXcZvCaaEbCZcVzcY7E1uXN9xWZfFvkNHwbe/n4OkRhFWrs1J1QV+4U1BN06tXLdaS4DazEGXwgqnu/VMcmqw==}
engines: {node: '>= 10'}
cpu: [x64]
os: [linux]
- '@next/swc-linux-x64-musl@15.5.6':
- resolution: {integrity: sha512-OsGX148sL+TqMK9YFaPFPoIaJKbFJJxFzkXZljIgA9hjMjdruKht6xDCEv1HLtlLNfkx3c5w2GLKhj7veBQizQ==}
+ '@next/swc-linux-x64-musl@15.5.7':
+ resolution: {integrity: sha512-4IUO539b8FmF0odY6/SqANJdgwn1xs1GkPO5doZugwZ3ETF6JUdckk7RGmsfSf7ws8Qb2YB5It33mvNL/0acqA==}
engines: {node: '>= 10'}
cpu: [x64]
os: [linux]
- '@next/swc-win32-arm64-msvc@15.5.6':
- resolution: {integrity: sha512-ONOMrqWxdzXDJNh2n60H6gGyKed42Ieu6UTVPZteXpuKbLZTH4G4eBMsr5qWgOBA+s7F+uB4OJbZnrkEDnZ5Fg==}
+ '@next/swc-win32-arm64-msvc@15.5.7':
+ resolution: {integrity: sha512-CpJVTkYI3ZajQkC5vajM7/ApKJUOlm6uP4BknM3XKvJ7VXAvCqSjSLmM0LKdYzn6nBJVSjdclx8nYJSa3xlTgQ==}
engines: {node: '>= 10'}
cpu: [arm64]
os: [win32]
- '@next/swc-win32-x64-msvc@15.5.6':
- resolution: {integrity: sha512-pxK4VIjFRx1MY92UycLOOw7dTdvccWsNETQ0kDHkBlcFH1GrTLUjSiHU1ohrznnux6TqRHgv5oflhfIWZwVROQ==}
+ '@next/swc-win32-x64-msvc@15.5.7':
+ resolution: {integrity: sha512-gMzgBX164I6DN+9/PGA+9dQiwmTkE4TloBNx8Kv9UiGARsr9Nba7IpcBRA1iTV9vwlYnrE3Uy6I7Aj6qLjQuqw==}
engines: {node: '>= 10'}
cpu: [x64]
os: [win32]
- '@next/third-parties@15.5.6':
- resolution: {integrity: sha512-B1BLvEi7edGERNN0njxpiqbqkp3zAZ69eJ5C0vwj/XINRzcC25b9MCqxbSHq094d306H65UnlhEkBv+a8c74iA==}
+ '@next/third-parties@15.5.7':
+ resolution: {integrity: sha512-xIahs2sBMwYBtI0CLSUCMtKXen6T8G23Cq1WogA8BMRSIDqkhHutHpu+nrdzSNVg4yA59B8oU8DFdcGz4WHWtw==}
peerDependencies:
next: ^13.0.0 || ^14.0.0 || ^15.0.0
react: ^18.2.0 || 19.0.0-rc-de68d2f4-20241204 || ^19.0.0
@@ -4843,8 +4843,8 @@ packages:
next-tick@1.1.0:
resolution: {integrity: sha512-CXdUiJembsNjuToQvxayPZF9Vqht7hewsvy2sOWafLvi2awflj9mOC6bHIg50orX8IJvWKY9wYQ/zB2kogPslQ==}
- next@15.5.6:
- resolution: {integrity: sha512-zTxsnI3LQo3c9HSdSf91O1jMNsEzIXDShXd4wVdg9y5shwLqBXi4ZtUUJyB86KGVSJLZx0PFONvO54aheGX8QQ==}
+ next@15.5.7:
+ resolution: {integrity: sha512-+t2/0jIJ48kUpGKkdlhgkv+zPTEOoXyr60qXe68eB/pl3CMJaLeIGjzp5D6Oqt25hCBiBTt8wEeeAzfJvUKnPQ==}
engines: {node: ^18.18.0 || ^19.8.0 || >= 20.0.0}
hasBin: true
peerDependencies:
@@ -5600,11 +5600,6 @@ packages:
resolution: {integrity: sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==}
hasBin: true
- semver@7.7.1:
- resolution: {integrity: sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==}
- engines: {node: '>=10'}
- hasBin: true
-
semver@7.7.3:
resolution: {integrity: sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==}
engines: {node: '>=10'}
@@ -7539,7 +7534,7 @@ snapshots:
'@marijn/find-cluster-break@1.0.2': {}
- '@mdx-js/mdx@3.1.0(acorn@8.15.0)':
+ '@mdx-js/mdx@3.1.0(acorn@8.14.0)':
dependencies:
'@types/estree': 1.0.8
'@types/estree-jsx': 1.0.5
@@ -7553,7 +7548,7 @@ snapshots:
hast-util-to-jsx-runtime: 2.3.6
markdown-extensions: 2.0.0
recma-build-jsx: 1.0.0
- recma-jsx: 1.0.0(acorn@8.15.0)
+ recma-jsx: 1.0.0(acorn@8.14.0)
recma-stringify: 1.0.0
rehype-recma: 1.0.0
remark-mdx: 3.1.0
@@ -7640,39 +7635,39 @@ snapshots:
react: 19.1.0
react-dom: 19.1.0(react@19.1.0)
- '@next/env@15.5.6': {}
+ '@next/env@15.5.7': {}
'@next/eslint-plugin-next@15.2.0':
dependencies:
fast-glob: 3.3.1
- '@next/swc-darwin-arm64@15.5.6':
+ '@next/swc-darwin-arm64@15.5.7':
optional: true
- '@next/swc-darwin-x64@15.5.6':
+ '@next/swc-darwin-x64@15.5.7':
optional: true
- '@next/swc-linux-arm64-gnu@15.5.6':
+ '@next/swc-linux-arm64-gnu@15.5.7':
optional: true
- '@next/swc-linux-arm64-musl@15.5.6':
+ '@next/swc-linux-arm64-musl@15.5.7':
optional: true
- '@next/swc-linux-x64-gnu@15.5.6':
+ '@next/swc-linux-x64-gnu@15.5.7':
optional: true
- '@next/swc-linux-x64-musl@15.5.6':
+ '@next/swc-linux-x64-musl@15.5.7':
optional: true
- '@next/swc-win32-arm64-msvc@15.5.6':
+ '@next/swc-win32-arm64-msvc@15.5.7':
optional: true
- '@next/swc-win32-x64-msvc@15.5.6':
+ '@next/swc-win32-x64-msvc@15.5.7':
optional: true
- '@next/third-parties@15.5.6(next@15.5.6(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react@19.1.0)':
+ '@next/third-parties@15.5.7(next@15.5.7(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react@19.1.0)':
dependencies:
- next: 15.5.6(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
+ next: 15.5.7(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
react: 19.1.0
third-party-capital: 1.0.20
@@ -9106,7 +9101,7 @@ snapshots:
fast-glob: 3.3.3
is-glob: 4.0.3
minimatch: 9.0.5
- semver: 7.7.1
+ semver: 7.7.3
ts-api-utils: 2.0.1(typescript@5.8.3)
typescript: 5.8.3
transitivePeerDependencies:
@@ -10258,7 +10253,7 @@ snapshots:
fsevents@2.3.3:
optional: true
- fumadocs-core@15.6.6(@types/react@19.1.8)(next@15.5.6(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react-dom@19.1.0(react@19.1.0))(react@19.1.0):
+ fumadocs-core@15.6.6(@types/react@19.1.8)(next@15.5.7(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react-dom@19.1.0(react@19.1.0))(react@19.1.0):
dependencies:
'@formatjs/intl-localematcher': 0.6.1
'@orama/orama': 3.1.11
@@ -10279,20 +10274,20 @@ snapshots:
unist-util-visit: 5.0.0
optionalDependencies:
'@types/react': 19.1.8
- next: 15.5.6(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
+ next: 15.5.7(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
react: 19.1.0
react-dom: 19.1.0(react@19.1.0)
transitivePeerDependencies:
- supports-color
- fumadocs-mdx@11.7.1(acorn@8.15.0)(fumadocs-core@15.6.6(@types/react@19.1.8)(next@15.5.6(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(next@15.5.6(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react@19.1.0):
+ fumadocs-mdx@11.7.1(acorn@8.14.0)(fumadocs-core@15.6.6(@types/react@19.1.8)(next@15.5.7(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(next@15.5.7(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react@19.1.0):
dependencies:
- '@mdx-js/mdx': 3.1.0(acorn@8.15.0)
+ '@mdx-js/mdx': 3.1.0(acorn@8.14.0)
'@standard-schema/spec': 1.0.0
chokidar: 4.0.3
esbuild: 0.25.8
estree-util-value-to-estree: 3.4.0
- fumadocs-core: 15.6.6(@types/react@19.1.8)(next@15.5.6(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
+ fumadocs-core: 15.6.6(@types/react@19.1.8)(next@15.5.7(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
js-yaml: 4.1.0
lru-cache: 11.1.0
picocolors: 1.1.1
@@ -10301,13 +10296,13 @@ snapshots:
unist-util-visit: 5.0.0
zod: 4.0.10
optionalDependencies:
- next: 15.5.6(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
+ next: 15.5.7(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
react: 19.1.0
transitivePeerDependencies:
- acorn
- supports-color
- fumadocs-ui@15.6.6(@types/react-dom@19.1.6(@types/react@19.1.8))(@types/react@19.1.8)(next@15.5.6(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react-dom@19.1.0(react@19.1.0))(react@19.1.0)(tailwindcss@4.1.11):
+ fumadocs-ui@15.6.6(@types/react-dom@19.1.6(@types/react@19.1.8))(@types/react@19.1.8)(next@15.5.7(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react-dom@19.1.0(react@19.1.0))(react@19.1.0)(tailwindcss@4.1.11):
dependencies:
'@radix-ui/react-accordion': 1.2.11(@types/react-dom@19.1.6(@types/react@19.1.8))(@types/react@19.1.8)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
'@radix-ui/react-collapsible': 1.1.11(@types/react-dom@19.1.6(@types/react@19.1.8))(@types/react@19.1.8)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
@@ -10320,7 +10315,7 @@ snapshots:
'@radix-ui/react-slot': 1.2.3(@types/react@19.1.8)(react@19.1.0)
'@radix-ui/react-tabs': 1.1.12(@types/react-dom@19.1.6(@types/react@19.1.8))(@types/react@19.1.8)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
class-variance-authority: 0.7.1
- fumadocs-core: 15.6.6(@types/react@19.1.8)(next@15.5.6(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
+ fumadocs-core: 15.6.6(@types/react@19.1.8)(next@15.5.7(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
lodash.merge: 4.6.2
next-themes: 0.4.6(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
postcss-selector-parser: 7.1.0
@@ -10331,7 +10326,7 @@ snapshots:
tailwind-merge: 3.3.1
optionalDependencies:
'@types/react': 19.1.8
- next: 15.5.6(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
+ next: 15.5.7(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
tailwindcss: 4.1.11
transitivePeerDependencies:
- '@mixedbread/sdk'
@@ -10355,9 +10350,9 @@ snapshots:
fuse.js@6.6.2: {}
- geist@1.4.2(next@15.5.6(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)):
+ geist@1.4.2(next@15.5.7(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)):
dependencies:
- next: 15.5.6(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
+ next: 15.5.7(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
get-intrinsic@1.3.0:
dependencies:
@@ -10769,7 +10764,7 @@ snapshots:
is-bun-module@1.3.0:
dependencies:
- semver: 7.7.1
+ semver: 7.7.3
is-callable@1.2.7: {}
@@ -11955,11 +11950,11 @@ snapshots:
negotiator@1.0.0: {}
- next-intl@3.26.5(next@15.5.6(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react@19.1.0):
+ next-intl@3.26.5(next@15.5.7(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react@19.1.0):
dependencies:
'@formatjs/intl-localematcher': 0.5.10
negotiator: 1.0.0
- next: 15.5.6(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
+ next: 15.5.7(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
react: 19.1.0
use-intl: 3.26.5(react@19.1.0)
@@ -11970,9 +11965,9 @@ snapshots:
next-tick@1.1.0: {}
- next@15.5.6(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0):
+ next@15.5.7(@opentelemetry/api@1.9.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0):
dependencies:
- '@next/env': 15.5.6
+ '@next/env': 15.5.7
'@swc/helpers': 0.5.15
caniuse-lite: 1.0.30001706
postcss: 8.4.31
@@ -11980,14 +11975,14 @@ snapshots:
react-dom: 19.1.0(react@19.1.0)
styled-jsx: 5.1.6(react@19.1.0)
optionalDependencies:
- '@next/swc-darwin-arm64': 15.5.6
- '@next/swc-darwin-x64': 15.5.6
- '@next/swc-linux-arm64-gnu': 15.5.6
- '@next/swc-linux-arm64-musl': 15.5.6
- '@next/swc-linux-x64-gnu': 15.5.6
- '@next/swc-linux-x64-musl': 15.5.6
- '@next/swc-win32-arm64-msvc': 15.5.6
- '@next/swc-win32-x64-msvc': 15.5.6
+ '@next/swc-darwin-arm64': 15.5.7
+ '@next/swc-darwin-x64': 15.5.7
+ '@next/swc-linux-arm64-gnu': 15.5.7
+ '@next/swc-linux-arm64-musl': 15.5.7
+ '@next/swc-linux-x64-gnu': 15.5.7
+ '@next/swc-linux-x64-musl': 15.5.7
+ '@next/swc-win32-arm64-msvc': 15.5.7
+ '@next/swc-win32-x64-msvc': 15.5.7
'@opentelemetry/api': 1.9.0
sharp: 0.34.5
transitivePeerDependencies:
@@ -12646,9 +12641,9 @@ snapshots:
estree-util-build-jsx: 3.0.1
vfile: 6.0.3
- recma-jsx@1.0.0(acorn@8.15.0):
+ recma-jsx@1.0.0(acorn@8.14.0):
dependencies:
- acorn-jsx: 5.3.2(acorn@8.15.0)
+ acorn-jsx: 5.3.2(acorn@8.14.0)
estree-util-to-js: 2.0.0
recma-parse: 1.0.0
recma-stringify: 1.0.0
@@ -12926,10 +12921,7 @@ snapshots:
semver@6.3.1: {}
- semver@7.7.1: {}
-
- semver@7.7.3:
- optional: true
+ semver@7.7.3: {}
set-function-length@1.2.2:
dependencies:
From 5a4a7860343da6640cda7e032502a617df7f85f2 Mon Sep 17 00:00:00 2001
From: waychan23
Date: Thu, 4 Dec 2025 18:18:52 +0800
Subject: [PATCH 32/36] fix: copying api key to the clipboard fails when the
page is accessed via insecure connection(http without tls). Fixed by adding a
fallback method for copy text to clipboard.
---
surfsense_web/hooks/use-api-key.ts | 57 +++++++++++++++++++++++++-----
1 file changed, 49 insertions(+), 8 deletions(-)
diff --git a/surfsense_web/hooks/use-api-key.ts b/surfsense_web/hooks/use-api-key.ts
index 6f955adc3..678307ba9 100644
--- a/surfsense_web/hooks/use-api-key.ts
+++ b/surfsense_web/hooks/use-api-key.ts
@@ -33,17 +33,58 @@ export function useApiKey(): UseApiKeyReturn {
return () => clearTimeout(timer);
}, []);
+ const fallbackCopyTextToClipboard = (text: string) => {
+ const textArea = document.createElement("textarea");
+ textArea.value = text;
+
+ // Avoid scrolling to bottom
+ textArea.style.top = "0";
+ textArea.style.left = "0";
+ textArea.style.position = "fixed";
+ textArea.style.opacity = "0";
+
+ document.body.appendChild(textArea);
+ textArea.focus();
+ textArea.select();
+
+ try {
+ const successful = document.execCommand('copy');
+ document.body.removeChild(textArea);
+
+ if (successful) {
+ setCopied(true);
+ toast.success("API key copied to clipboard");
+
+ setTimeout(() => {
+ setCopied(false);
+ }, 2000);
+ } else {
+ toast.error("Failed to copy API key");
+ }
+ } catch (err) {
+ console.error("Fallback: Oops, unable to copy", err);
+ document.body.removeChild(textArea);
+ toast.error("Failed to copy API key");
+ }
+ };
+
const copyToClipboard = useCallback(async () => {
if (!apiKey) return;
try {
- await navigator.clipboard.writeText(apiKey);
- setCopied(true);
- toast.success("API key copied to clipboard");
-
- setTimeout(() => {
- setCopied(false);
- }, 2000);
+ if (navigator.clipboard && window.isSecureContext) {
+ // Use Clipboard API if available and in secure context
+ await navigator.clipboard.writeText(apiKey);
+ setCopied(true);
+ toast.success("API key copied to clipboard");
+
+ setTimeout(() => {
+ setCopied(false);
+ }, 2000);
+ } else {
+ // Fallback for non-secure contexts or browsers without clipboard API
+ fallbackCopyTextToClipboard(apiKey);
+ }
} catch (err) {
console.error("Failed to copy:", err);
toast.error("Failed to copy API key");
@@ -56,4 +97,4 @@ export function useApiKey(): UseApiKeyReturn {
copied,
copyToClipboard,
};
-}
+}
\ No newline at end of file
From 521cea3ef0f79693958a9a263aa4ebaab0c0406c Mon Sep 17 00:00:00 2001
From: CREDO23
Date: Thu, 4 Dec 2025 12:53:18 +0200
Subject: [PATCH 33/36] update query parmas for get issues by date range method
---
surfsense_backend/app/connectors/jira_connector.py | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/surfsense_backend/app/connectors/jira_connector.py b/surfsense_backend/app/connectors/jira_connector.py
index d9474ee79..18193fce1 100644
--- a/surfsense_backend/app/connectors/jira_connector.py
+++ b/surfsense_backend/app/connectors/jira_connector.py
@@ -239,9 +239,9 @@ class JiraConnector:
)
# TODO : This JQL needs some improvement to work as expected
- _jql = f"{date_filter}"
+ jql = f"{date_filter}"
if project_key:
- _jql = (
+ jql = (
f'project = "{project_key}" AND {date_filter} ORDER BY created DESC'
)
@@ -262,10 +262,10 @@ class JiraConnector:
if include_comments:
fields.append("comment")
- print(f"JQL query: {_jql}")
+ print(f"JQL query: {jql}")
params = {
- "jql": _jql,
+ "jql": jql,
"fields": ",".join(fields),
"maxResults": 100,
"startAt": 0,
From 803f792a9d9f79e57b21c67f24073f1798c4bb7c Mon Sep 17 00:00:00 2001
From: CREDO23
Date: Thu, 4 Dec 2025 12:55:19 +0200
Subject: [PATCH 34/36] clean up
---
surfsense_backend/app/connectors/jira_connector.py | 2 --
1 file changed, 2 deletions(-)
diff --git a/surfsense_backend/app/connectors/jira_connector.py b/surfsense_backend/app/connectors/jira_connector.py
index 18193fce1..e73198e79 100644
--- a/surfsense_backend/app/connectors/jira_connector.py
+++ b/surfsense_backend/app/connectors/jira_connector.py
@@ -262,8 +262,6 @@ class JiraConnector:
if include_comments:
fields.append("comment")
- print(f"JQL query: {jql}")
-
params = {
"jql": jql,
"fields": ",".join(fields),
From c97887a63dd3afa35fc2d73ac5faddc4b8671a36 Mon Sep 17 00:00:00 2001
From: "DESKTOP-RTLN3BA\\$punk"
Date: Fri, 5 Dec 2025 00:14:36 -0800
Subject: [PATCH 35/36] fix: implement real-time streaming for responses
- Added streaming service support to the Q&A agent for real-time token streaming.
- Updated `answer_question` method to stream responses token-by-token to the frontend.
- Modified `handle_qna_workflow` to handle both custom and values streaming modes.
- Enhanced state management to include streaming service for improved user experience.
---
.../app/agents/researcher/nodes.py | 53 +++++++++----------
.../app/agents/researcher/qna_agent/nodes.py | 27 ++++++++--
.../app/agents/researcher/qna_agent/state.py | 5 ++
.../app/tasks/celery_tasks/document_tasks.py | 5 +-
.../researcher/[[...chat_id]]/page.tsx | 15 ++++--
5 files changed, 64 insertions(+), 41 deletions(-)
diff --git a/surfsense_backend/app/agents/researcher/nodes.py b/surfsense_backend/app/agents/researcher/nodes.py
index c53e3348f..4b2f4b0e6 100644
--- a/surfsense_backend/app/agents/researcher/nodes.py
+++ b/surfsense_backend/app/agents/researcher/nodes.py
@@ -1440,7 +1440,12 @@ async def handle_qna_workflow(
}
# Create the state for the QNA agent (it has a different state structure)
- qna_state = {"db_session": state.db_session, "chat_history": state.chat_history}
+ # Pass streaming_service so the QNA agent can stream tokens directly
+ qna_state = {
+ "db_session": state.db_session,
+ "chat_history": state.chat_history,
+ "streaming_service": streaming_service,
+ }
try:
writer(
@@ -1455,36 +1460,26 @@ async def handle_qna_workflow(
complete_content = ""
captured_reranked_documents = []
- # Call the QNA agent with streaming
- async for _chunk_type, chunk in qna_agent_graph.astream(
- qna_state, qna_config, stream_mode=["values"]
+ # Call the QNA agent with both custom and values streaming modes
+ # - "custom" captures token-by-token streams from answer_question via writer()
+ # - "values" captures state updates including final_answer and reranked_documents
+ async for stream_mode, chunk in qna_agent_graph.astream(
+ qna_state, qna_config, stream_mode=["custom", "values"]
):
- if "final_answer" in chunk:
- new_content = chunk["final_answer"]
- if new_content and new_content != complete_content:
- # Extract only the new content (delta)
- delta = new_content[len(complete_content) :]
- complete_content = new_content
+ if stream_mode == "custom":
+ # Handle custom stream events (token chunks from answer_question)
+ if isinstance(chunk, dict) and "yield_value" in chunk:
+ # Forward the streamed token to the parent writer
+ writer(chunk)
+ elif stream_mode == "values" and isinstance(chunk, dict):
+ # Handle state value updates
+ # Capture the final answer from state
+ if chunk.get("final_answer"):
+ complete_content = chunk["final_answer"]
- # Stream the real-time answer if there's new content
- if delta:
- # Update terminal with progress
- word_count = len(complete_content.split())
- writer(
- {
- "yield_value": streaming_service.format_terminal_info_delta(
- f"✍️ Writing answer... ({word_count} words)"
- )
- }
- )
-
- writer(
- {"yield_value": streaming_service.format_text_chunk(delta)}
- )
-
- # Capture reranked documents from QNA agent for further question generation
- if "reranked_documents" in chunk:
- captured_reranked_documents = chunk["reranked_documents"]
+ # Capture reranked documents from QNA agent for further question generation
+ if chunk.get("reranked_documents"):
+ captured_reranked_documents = chunk["reranked_documents"]
# Set default if no content was received
if not complete_content:
diff --git a/surfsense_backend/app/agents/researcher/qna_agent/nodes.py b/surfsense_backend/app/agents/researcher/qna_agent/nodes.py
index 37bdbc362..35f01146b 100644
--- a/surfsense_backend/app/agents/researcher/qna_agent/nodes.py
+++ b/surfsense_backend/app/agents/researcher/qna_agent/nodes.py
@@ -3,6 +3,7 @@ from typing import Any
from langchain_core.messages import HumanMessage, SystemMessage
from langchain_core.runnables import RunnableConfig
+from langgraph.types import StreamWriter
from sqlalchemy import select
from app.db import SearchSpace
@@ -129,9 +130,11 @@ async def rerank_documents(state: State, config: RunnableConfig) -> dict[str, An
return {"reranked_documents": documents}
-async def answer_question(state: State, config: RunnableConfig) -> dict[str, Any]:
+async def answer_question(
+ state: State, config: RunnableConfig, writer: StreamWriter
+) -> dict[str, Any]:
"""
- Answer the user's question using the provided documents.
+ Answer the user's question using the provided documents with real-time streaming.
This node takes the relevant documents provided in the configuration and uses
an LLM to generate a comprehensive answer to the user's question with
@@ -139,6 +142,8 @@ async def answer_question(state: State, config: RunnableConfig) -> dict[str, Any
documents. If no documents are provided, it will use chat history to generate
an answer.
+ The response is streamed token-by-token for real-time updates to the frontend.
+
Returns:
Dict containing the final answer in the "final_answer" key.
"""
@@ -151,6 +156,9 @@ async def answer_question(state: State, config: RunnableConfig) -> dict[str, Any
search_space_id = configuration.search_space_id
language = configuration.language
+ # Get streaming service from state
+ streaming_service = state.streaming_service
+
# Fetch search space to get QnA configuration
result = await state.db_session.execute(
select(SearchSpace).where(SearchSpace.id == search_space_id)
@@ -279,8 +287,17 @@ async def answer_question(state: State, config: RunnableConfig) -> dict[str, Any
total_tokens = calculate_token_count(messages_with_chat_history, llm.model)
print(f"Final token count: {total_tokens}")
- # Call the LLM and get the response
- response = await llm.ainvoke(messages_with_chat_history)
- final_answer = response.content
+ # Stream the LLM response token by token
+ final_answer = ""
+
+ async for chunk in llm.astream(messages_with_chat_history):
+ # Extract the content from the chunk
+ if hasattr(chunk, "content") and chunk.content:
+ token = chunk.content
+ final_answer += token
+
+ # Stream the token to the frontend via custom stream
+ if streaming_service:
+ writer({"yield_value": streaming_service.format_text_chunk(token)})
return {"final_answer": final_answer}
diff --git a/surfsense_backend/app/agents/researcher/qna_agent/state.py b/surfsense_backend/app/agents/researcher/qna_agent/state.py
index f6cc7b1ba..4113b9286 100644
--- a/surfsense_backend/app/agents/researcher/qna_agent/state.py
+++ b/surfsense_backend/app/agents/researcher/qna_agent/state.py
@@ -7,6 +7,8 @@ from typing import Any
from sqlalchemy.ext.asyncio import AsyncSession
+from app.services.streaming_service import StreamingService
+
@dataclass
class State:
@@ -21,6 +23,9 @@ class State:
# Runtime context
db_session: AsyncSession
+ # Streaming service for real-time token streaming
+ streaming_service: StreamingService | None = None
+
chat_history: list[Any] | None = field(default_factory=list)
# OUTPUT: Populated by agent nodes
reranked_documents: list[Any] | None = None
diff --git a/surfsense_backend/app/tasks/celery_tasks/document_tasks.py b/surfsense_backend/app/tasks/celery_tasks/document_tasks.py
index a7b750673..5b7f9ce13 100644
--- a/surfsense_backend/app/tasks/celery_tasks/document_tasks.py
+++ b/surfsense_backend/app/tasks/celery_tasks/document_tasks.py
@@ -62,7 +62,7 @@ async def _process_extension_document(
individual_document_dict, search_space_id: int, user_id: str
):
"""Process extension document with new session."""
- from pydantic import BaseModel
+ from pydantic import BaseModel, ConfigDict, Field
# Reconstruct the document object from dict
# You'll need to define the proper model for this
@@ -75,8 +75,9 @@ async def _process_extension_document(
VisitedWebPageVisitDurationInMilliseconds: str
class IndividualDocument(BaseModel):
+ model_config = ConfigDict(populate_by_name=True)
metadata: DocumentMetadata
- pageContent: str
+ page_content: str = Field(alias="pageContent")
individual_document = IndividualDocument(**individual_document_dict)
diff --git a/surfsense_web/app/dashboard/[search_space_id]/researcher/[[...chat_id]]/page.tsx b/surfsense_web/app/dashboard/[search_space_id]/researcher/[[...chat_id]]/page.tsx
index 7481ddaa2..1a9a607fb 100644
--- a/surfsense_web/app/dashboard/[search_space_id]/researcher/[[...chat_id]]/page.tsx
+++ b/surfsense_web/app/dashboard/[search_space_id]/researcher/[[...chat_id]]/page.tsx
@@ -17,13 +17,14 @@ export default function ResearcherPage() {
const { search_space_id } = useParams();
const router = useRouter();
const hasSetInitialConnectors = useRef(false);
+ const hasInitiatedResponse = useRef(null);
const activeChatId = useAtomValue(activeChatIdAtom);
const { data: activeChatState, isFetching: isChatLoading } = useAtomValue(activeChatAtom);
const { mutateAsync: createChat } = useAtomValue(createChatMutationAtom);
const { mutateAsync: updateChat } = useAtomValue(updateChatMutationAtom);
const isNewChat = !activeChatId;
- // Reset the flag when chat ID changes
+ // Reset the flag when chat ID changes (but not hasInitiatedResponse - we need to remember if we already initiated)
useEffect(() => {
hasSetInitialConnectors.current = false;
}, [activeChatId]);
@@ -167,10 +168,14 @@ export default function ResearcherPage() {
if (chatData.messages && Array.isArray(chatData.messages)) {
if (chatData.messages.length === 1 && chatData.messages[0].role === "user") {
// Single user message - append to trigger LLM response
- handler.append({
- role: "user",
- content: chatData.messages[0].content,
- });
+ // Only if we haven't already initiated for this chat and handler doesn't have messages yet
+ if (hasInitiatedResponse.current !== activeChatId && handler.messages.length === 0) {
+ hasInitiatedResponse.current = activeChatId;
+ handler.append({
+ role: "user",
+ content: chatData.messages[0].content,
+ });
+ }
} else if (chatData.messages.length > 1) {
// Multiple messages - set them all
handler.setMessages(chatData.messages);
From 481ec5533b60979b3ed3f4199ded038fca784d16 Mon Sep 17 00:00:00 2001
From: "DESKTOP-RTLN3BA\\$punk"
Date: Fri, 5 Dec 2025 00:17:31 -0800
Subject: [PATCH 36/36] chore: biome checks
---
surfsense_web/hooks/use-api-key.ts | 16 ++++++++--------
1 file changed, 8 insertions(+), 8 deletions(-)
diff --git a/surfsense_web/hooks/use-api-key.ts b/surfsense_web/hooks/use-api-key.ts
index 678307ba9..a5f24d4c6 100644
--- a/surfsense_web/hooks/use-api-key.ts
+++ b/surfsense_web/hooks/use-api-key.ts
@@ -36,25 +36,25 @@ export function useApiKey(): UseApiKeyReturn {
const fallbackCopyTextToClipboard = (text: string) => {
const textArea = document.createElement("textarea");
textArea.value = text;
-
+
// Avoid scrolling to bottom
textArea.style.top = "0";
textArea.style.left = "0";
textArea.style.position = "fixed";
textArea.style.opacity = "0";
-
+
document.body.appendChild(textArea);
textArea.focus();
textArea.select();
-
+
try {
- const successful = document.execCommand('copy');
+ const successful = document.execCommand("copy");
document.body.removeChild(textArea);
-
+
if (successful) {
setCopied(true);
toast.success("API key copied to clipboard");
-
+
setTimeout(() => {
setCopied(false);
}, 2000);
@@ -77,7 +77,7 @@ export function useApiKey(): UseApiKeyReturn {
await navigator.clipboard.writeText(apiKey);
setCopied(true);
toast.success("API key copied to clipboard");
-
+
setTimeout(() => {
setCopied(false);
}, 2000);
@@ -97,4 +97,4 @@ export function useApiKey(): UseApiKeyReturn {
copied,
copyToClipboard,
};
-}
\ No newline at end of file
+}