diff --git a/surfsense_backend/alembic/versions/43_add_blocknote_fields_to_documents.py b/surfsense_backend/alembic/versions/43_add_blocknote_fields_to_documents.py
new file mode 100644
index 000000000..32e7780eb
--- /dev/null
+++ b/surfsense_backend/alembic/versions/43_add_blocknote_fields_to_documents.py
@@ -0,0 +1,75 @@
+"""43_add_blocknote_fields_to_documents
+
+Revision ID: 43
+Revises: 42
+Create Date: 2025-11-30
+
+Adds fields for live document editing:
+- blocknote_document: JSONB editor state
+- content_needs_reindexing: Flag for regenerating chunks/summary
+- last_edited_at: Last edit timestamp
+"""
+
+from collections.abc import Sequence
+
+import sqlalchemy as sa
+from sqlalchemy.dialects import postgresql
+
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision: str = "43"
+down_revision: str | None = "42"
+branch_labels: str | Sequence[str] | None = None
+depends_on: str | Sequence[str] | None = None
+
+
+def upgrade() -> None:
+ """Upgrade schema - Add BlockNote fields and trigger population task."""
+
+ # Add the columns
+ op.add_column(
+ "documents",
+ sa.Column(
+ "blocknote_document", postgresql.JSONB(astext_type=sa.Text()), nullable=True
+ ),
+ )
+ op.add_column(
+ "documents",
+ sa.Column(
+ "content_needs_reindexing",
+ sa.Boolean(),
+ nullable=False,
+ server_default=sa.false(),
+ ),
+ )
+ op.add_column(
+ "documents",
+ sa.Column("last_edited_at", sa.TIMESTAMP(timezone=True), nullable=True),
+ )
+
+ # Trigger the Celery task to populate blocknote_document for existing documents
+ try:
+ from app.tasks.celery_tasks.blocknote_migration_tasks import (
+ populate_blocknote_for_documents_task,
+ )
+
+ # Queue the task to run asynchronously
+ populate_blocknote_for_documents_task.apply_async()
+ print(
+ "✓ Queued Celery task to populate blocknote_document for existing documents"
+ )
+ except Exception as e:
+ # If Celery is not available or task queueing fails, log but don't fail the migration
+ print(f"⚠ Warning: Could not queue blocknote population task: {e}")
+ print(" You can manually trigger it later with:")
+ print(
+ " celery -A app.celery_app call app.tasks.celery_tasks.blocknote_migration_tasks.populate_blocknote_for_documents_task"
+ )
+
+
+def downgrade() -> None:
+ """Downgrade schema - Remove BlockNote fields."""
+ op.drop_column("documents", "last_edited_at")
+ op.drop_column("documents", "content_needs_reindexing")
+ op.drop_column("documents", "blocknote_document")
diff --git a/surfsense_backend/app/celery_app.py b/surfsense_backend/app/celery_app.py
index 898ab9735..f7bea8cc3 100644
--- a/surfsense_backend/app/celery_app.py
+++ b/surfsense_backend/app/celery_app.py
@@ -63,6 +63,8 @@ celery_app = Celery(
"app.tasks.celery_tasks.podcast_tasks",
"app.tasks.celery_tasks.connector_tasks",
"app.tasks.celery_tasks.schedule_checker_task",
+ "app.tasks.celery_tasks.blocknote_migration_tasks",
+ "app.tasks.celery_tasks.document_reindex_tasks",
],
)
diff --git a/surfsense_backend/app/db.py b/surfsense_backend/app/db.py
index f3147a42b..20a4adc23 100644
--- a/surfsense_backend/app/db.py
+++ b/surfsense_backend/app/db.py
@@ -20,7 +20,7 @@ from sqlalchemy import (
UniqueConstraint,
text,
)
-from sqlalchemy.dialects.postgresql import UUID
+from sqlalchemy.dialects.postgresql import JSONB, UUID
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
from sqlalchemy.orm import DeclarativeBase, Mapped, declared_attr, relationship
@@ -343,6 +343,17 @@ class Document(BaseModel, TimestampMixin):
unique_identifier_hash = Column(String, nullable=True, index=True, unique=True)
embedding = Column(Vector(config.embedding_model_instance.dimension))
+ # BlockNote live editing state (NULL when never edited)
+ blocknote_document = Column(JSONB, nullable=True)
+
+ # blocknote background reindex flag
+ content_needs_reindexing = Column(
+ Boolean, nullable=False, default=False, server_default=text("false")
+ )
+
+ # Track when blocknote document was last edited
+ last_edited_at = Column(TIMESTAMP(timezone=True), nullable=True)
+
search_space_id = Column(
Integer, ForeignKey("searchspaces.id", ondelete="CASCADE"), nullable=False
)
diff --git a/surfsense_backend/app/routes/__init__.py b/surfsense_backend/app/routes/__init__.py
index 127a8d927..4b829fe84 100644
--- a/surfsense_backend/app/routes/__init__.py
+++ b/surfsense_backend/app/routes/__init__.py
@@ -5,6 +5,7 @@ from .airtable_add_connector_route import (
)
from .chats_routes import router as chats_router
from .documents_routes import router as documents_router
+from .editor_routes import router as editor_router
from .google_calendar_add_connector_route import (
router as google_calendar_add_connector_router,
)
@@ -23,6 +24,7 @@ router = APIRouter()
router.include_router(search_spaces_router)
router.include_router(rbac_router) # RBAC routes for roles, members, invites
+router.include_router(editor_router)
router.include_router(documents_router)
router.include_router(podcasts_router)
router.include_router(chats_router)
diff --git a/surfsense_backend/app/routes/editor_routes.py b/surfsense_backend/app/routes/editor_routes.py
new file mode 100644
index 000000000..9beebfc8e
--- /dev/null
+++ b/surfsense_backend/app/routes/editor_routes.py
@@ -0,0 +1,166 @@
+"""
+Editor routes for BlockNote document editing.
+"""
+
+from datetime import UTC, datetime
+from typing import Any
+
+from fastapi import APIRouter, Depends, HTTPException
+from sqlalchemy import select
+from sqlalchemy.ext.asyncio import AsyncSession
+from sqlalchemy.orm import selectinload
+
+from app.db import Document, Permission, User, get_async_session
+from app.users import current_active_user
+from app.utils.rbac import check_permission
+
+router = APIRouter()
+
+
+@router.get("/search-spaces/{search_space_id}/documents/{document_id}/editor-content")
+async def get_editor_content(
+ search_space_id: int,
+ document_id: int,
+ session: AsyncSession = Depends(get_async_session),
+ user: User = Depends(current_active_user),
+):
+ """
+ Get document content for editing.
+
+ Returns BlockNote JSON document. If blocknote_document is NULL,
+ attempts to generate it from chunks (lazy migration).
+
+ Requires DOCUMENTS_READ permission.
+ """
+ # Check RBAC permission
+ await check_permission(
+ session,
+ user,
+ search_space_id,
+ Permission.DOCUMENTS_READ.value,
+ "You don't have permission to read documents in this search space",
+ )
+
+ result = await session.execute(
+ select(Document)
+ .options(selectinload(Document.chunks))
+ .filter(
+ Document.id == document_id,
+ Document.search_space_id == search_space_id,
+ )
+ )
+ document = result.scalars().first()
+
+ if not document:
+ raise HTTPException(status_code=404, detail="Document not found")
+
+ # If blocknote_document exists, return it
+ if document.blocknote_document:
+ return {
+ "document_id": document.id,
+ "title": document.title,
+ "blocknote_document": document.blocknote_document,
+ "last_edited_at": document.last_edited_at.isoformat()
+ if document.last_edited_at
+ else None,
+ }
+
+ # Lazy migration: Try to generate blocknote_document from chunks
+ from app.utils.blocknote_converter import convert_markdown_to_blocknote
+
+ chunks = sorted(document.chunks, key=lambda c: c.id)
+
+ if not chunks:
+ raise HTTPException(
+ status_code=400,
+ detail="This document has no chunks and cannot be edited. Please re-upload to enable editing.",
+ )
+
+ # Reconstruct markdown from chunks
+ markdown_content = "\n\n".join(chunk.content for chunk in chunks)
+
+ if not markdown_content.strip():
+ raise HTTPException(
+ status_code=400,
+ detail="This document has empty content and cannot be edited.",
+ )
+
+ # Convert to BlockNote
+ blocknote_json = await convert_markdown_to_blocknote(markdown_content)
+
+ if not blocknote_json:
+ raise HTTPException(
+ status_code=500,
+ detail="Failed to convert document to editable format. Please try again later.",
+ )
+
+ # Save the generated blocknote_document (lazy migration)
+ document.blocknote_document = blocknote_json
+ document.content_needs_reindexing = False
+ document.last_edited_at = None
+ await session.commit()
+
+ return {
+ "document_id": document.id,
+ "title": document.title,
+ "blocknote_document": blocknote_json,
+ "last_edited_at": None,
+ }
+
+
+@router.post("/search-spaces/{search_space_id}/documents/{document_id}/save")
+async def save_document(
+ search_space_id: int,
+ document_id: int,
+ data: dict[str, Any],
+ session: AsyncSession = Depends(get_async_session),
+ user: User = Depends(current_active_user),
+):
+ """
+ Save BlockNote document and trigger reindexing.
+ Called when user clicks 'Save & Exit'.
+
+ Requires DOCUMENTS_UPDATE permission.
+ """
+ from app.tasks.celery_tasks.document_reindex_tasks import reindex_document_task
+
+ # Check RBAC permission
+ await check_permission(
+ session,
+ user,
+ search_space_id,
+ Permission.DOCUMENTS_UPDATE.value,
+ "You don't have permission to update documents in this search space",
+ )
+
+ result = await session.execute(
+ select(Document).filter(
+ Document.id == document_id,
+ Document.search_space_id == search_space_id,
+ )
+ )
+ document = result.scalars().first()
+
+ if not document:
+ raise HTTPException(status_code=404, detail="Document not found")
+
+ blocknote_document = data.get("blocknote_document")
+ if not blocknote_document:
+ raise HTTPException(status_code=400, detail="blocknote_document is required")
+
+ # Save BlockNote document
+ document.blocknote_document = blocknote_document
+ document.last_edited_at = datetime.now(UTC)
+ document.content_needs_reindexing = True
+
+ await session.commit()
+
+ # Queue reindex task
+ reindex_document_task.delay(document_id, str(user.id))
+
+ return {
+ "status": "saved",
+ "document_id": document_id,
+ "message": "Document saved and will be reindexed in the background",
+ "last_edited_at": document.last_edited_at.isoformat(),
+ }
diff --git a/surfsense_backend/app/tasks/celery_tasks/blocknote_migration_tasks.py b/surfsense_backend/app/tasks/celery_tasks/blocknote_migration_tasks.py
new file mode 100644
index 000000000..c945bcb04
--- /dev/null
+++ b/surfsense_backend/app/tasks/celery_tasks/blocknote_migration_tasks.py
@@ -0,0 +1,168 @@
+"""Celery tasks for populating blocknote_document for existing documents."""
+
+import logging
+
+from sqlalchemy import select
+from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
+from sqlalchemy.orm import selectinload
+from sqlalchemy.pool import NullPool
+
+from app.celery_app import celery_app
+from app.config import config
+from app.db import Document
+from app.utils.blocknote_converter import convert_markdown_to_blocknote
+
+logger = logging.getLogger(__name__)
+
+
+def get_celery_session_maker():
+ """
+ Create a new async session maker for Celery tasks.
+ This is necessary because Celery tasks run in a new event loop,
+ and the default session maker is bound to the main app's event loop.
+ """
+ engine = create_async_engine(
+ config.DATABASE_URL,
+ poolclass=NullPool,
+ echo=False,
+ )
+ return async_sessionmaker(engine, expire_on_commit=False)
+
+
+@celery_app.task(name="populate_blocknote_for_documents", bind=True)
+def populate_blocknote_for_documents_task(
+ self, document_ids: list[int] | None = None, batch_size: int = 50
+):
+ """
+ Celery task to populate blocknote_document for existing documents.
+
+ Args:
+ document_ids: Optional list of specific document IDs to process.
+ If None, processes all documents with blocknote_document IS NULL.
+ batch_size: Number of documents to process in each batch (default: 50)
+ """
+ import asyncio
+
+ loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(loop)
+
+ try:
+ loop.run_until_complete(
+ _populate_blocknote_for_documents(document_ids, batch_size)
+ )
+ finally:
+ loop.close()
+
+
+async def _populate_blocknote_for_documents(
+ document_ids: list[int] | None = None, batch_size: int = 50
+):
+ """
+ Async function to populate blocknote_document for documents.
+
+ Args:
+ document_ids: Optional list of specific document IDs to process
+ batch_size: Number of documents to process per batch
+ """
+ async with get_celery_session_maker()() as session:
+ try:
+ # Build query for documents that need blocknote_document populated
+ query = select(Document).where(Document.blocknote_document.is_(None))
+
+ # If specific document IDs provided, filter by them
+ if document_ids:
+ query = query.where(Document.id.in_(document_ids))
+
+ # Load chunks relationship to avoid N+1 queries
+ query = query.options(selectinload(Document.chunks))
+
+ # Execute query
+ result = await session.execute(query)
+ documents = result.scalars().all()
+
+ total_documents = len(documents)
+ logger.info(f"Found {total_documents} documents to process")
+
+ if total_documents == 0:
+ logger.info("No documents to process")
+ return
+
+ # Process documents in batches
+ processed = 0
+ failed = 0
+
+ for i in range(0, total_documents, batch_size):
+ batch = documents[i : i + batch_size]
+ logger.info(
+ f"Processing batch {i // batch_size + 1}: documents {i + 1}-{min(i + batch_size, total_documents)}"
+ )
+
+ for document in batch:
+ try:
+ # Use preloaded chunks from selectinload - no need to query again
+ chunks = sorted(document.chunks, key=lambda c: c.id)
+
+ if not chunks:
+ logger.warning(
+ f"Document {document.id} ({document.title}) has no chunks, skipping"
+ )
+ failed += 1
+ continue
+
+ # Reconstruct markdown by concatenating chunk contents
+ markdown_content = "\n\n".join(
+ chunk.content for chunk in chunks
+ )
+
+ if not markdown_content or not markdown_content.strip():
+ logger.warning(
+ f"Document {document.id} ({document.title}) has empty markdown content, skipping"
+ )
+ failed += 1
+ continue
+
+ # Convert markdown to BlockNote JSON
+ blocknote_json = await convert_markdown_to_blocknote(
+ markdown_content
+ )
+
+ if not blocknote_json:
+ logger.warning(
+ f"Failed to convert markdown to BlockNote for document {document.id} ({document.title})"
+ )
+ failed += 1
+ continue
+
+ # Update document with blocknote_document (other fields already have correct defaults)
+ document.blocknote_document = blocknote_json
+
+ processed += 1
+
+ # Commit every batch_size documents to avoid long transactions
+ if processed % batch_size == 0:
+ await session.commit()
+ logger.info(
+ f"Committed batch: {processed} documents processed so far"
+ )
+
+ except Exception as e:
+ logger.error(
+ f"Error processing document {document.id} ({document.title}): {e}",
+ exc_info=True,
+ )
+ failed += 1
+ # Continue with next document instead of failing entire batch
+ continue
+
+ # Commit remaining changes in the batch
+ await session.commit()
+ logger.info(f"Completed batch {i // batch_size + 1}")
+
+ logger.info(
+ f"Migration complete: {processed} documents processed, {failed} failed"
+ )
+
+ except Exception as e:
+ await session.rollback()
+ logger.error(f"Error in blocknote migration task: {e}", exc_info=True)
+ raise
diff --git a/surfsense_backend/app/tasks/celery_tasks/document_reindex_tasks.py b/surfsense_backend/app/tasks/celery_tasks/document_reindex_tasks.py
new file mode 100644
index 000000000..8ab5309f2
--- /dev/null
+++ b/surfsense_backend/app/tasks/celery_tasks/document_reindex_tasks.py
@@ -0,0 +1,126 @@
+"""Celery tasks for reindexing edited documents."""
+
+import logging
+
+from sqlalchemy import delete, select
+from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
+from sqlalchemy.orm import selectinload
+from sqlalchemy.pool import NullPool
+
+from app.celery_app import celery_app
+from app.config import config
+from app.db import Document
+from app.services.llm_service import get_user_long_context_llm
+from app.utils.blocknote_converter import convert_blocknote_to_markdown
+from app.utils.document_converters import (
+ create_document_chunks,
+ generate_document_summary,
+)
+
+logger = logging.getLogger(__name__)
+
+
+def get_celery_session_maker():
+ """Create async session maker for Celery tasks."""
+ engine = create_async_engine(
+ config.DATABASE_URL,
+ poolclass=NullPool,
+ echo=False,
+ )
+ return async_sessionmaker(engine, expire_on_commit=False)
+
+
+@celery_app.task(name="reindex_document", bind=True)
+def reindex_document_task(self, document_id: int, user_id: str):
+ """
+ Celery task to reindex a document after editing.
+
+ Args:
+ document_id: ID of document to reindex
+ user_id: ID of user who edited the document
+ """
+ import asyncio
+
+ loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(loop)
+
+ try:
+ loop.run_until_complete(_reindex_document(document_id, user_id))
+ finally:
+ loop.close()
+
+
+async def _reindex_document(document_id: int, user_id: str):
+ """Async function to reindex a document."""
+ async with get_celery_session_maker()() as session:
+ try:
+ # Get document
+ result = await session.execute(
+ select(Document)
+ .options(selectinload(Document.chunks)) # Eagerly load chunks
+ .where(Document.id == document_id)
+ )
+ document = result.scalars().first()
+
+ if not document:
+ logger.error(f"Document {document_id} not found")
+ return
+
+ if not document.blocknote_document:
+ logger.warning(f"Document {document_id} has no BlockNote content")
+ return
+
+ logger.info(f"Reindexing document {document_id} ({document.title})")
+
+ # 1. Convert BlockNote → Markdown
+ markdown_content = await convert_blocknote_to_markdown(
+ document.blocknote_document
+ )
+
+ if not markdown_content:
+ logger.error(f"Failed to convert document {document_id} to markdown")
+ return
+
+ # 2. Delete old chunks explicitly
+ from app.db import Chunk
+
+ await session.execute(delete(Chunk).where(Chunk.document_id == document_id))
+ await session.flush() # Ensure old chunks are deleted
+
+ # 3. Create new chunks
+ new_chunks = await create_document_chunks(markdown_content)
+
+ # 4. Add new chunks to session
+ for chunk in new_chunks:
+ chunk.document_id = document_id
+ session.add(chunk)
+
+ logger.info(f"Created {len(new_chunks)} chunks for document {document_id}")
+
+ # 5. Regenerate summary
+ user_llm = await get_user_long_context_llm(
+ session, user_id, document.search_space_id
+ )
+
+ document_metadata = {
+ "title": document.title,
+ "document_type": document.document_type.value,
+ }
+
+ summary_content, summary_embedding = await generate_document_summary(
+ markdown_content, user_llm, document_metadata
+ )
+
+ # 6. Update document
+ document.content = summary_content
+ document.embedding = summary_embedding
+ document.content_needs_reindexing = False
+
+ await session.commit()
+
+ logger.info(f"Successfully reindexed document {document_id}")
+
+ except Exception as e:
+ await session.rollback()
+ logger.error(f"Error reindexing document {document_id}: {e}", exc_info=True)
+ raise
diff --git a/surfsense_backend/app/tasks/document_processors/extension_processor.py b/surfsense_backend/app/tasks/document_processors/extension_processor.py
index 663093375..48e3efe27 100644
--- a/surfsense_backend/app/tasks/document_processors/extension_processor.py
+++ b/surfsense_backend/app/tasks/document_processors/extension_processor.py
@@ -145,6 +145,16 @@ async def add_extension_received_document(
# Process chunks
chunks = await create_document_chunks(content.pageContent)
+ from app.utils.blocknote_converter import convert_markdown_to_blocknote
+
+ # Convert markdown to BlockNote JSON
+ blocknote_json = await convert_markdown_to_blocknote(combined_document_string)
+ if not blocknote_json:
+ logging.warning(
+ f"Failed to convert extension document '{content.metadata.VisitedWebPageTitle}' "
+ f"to BlockNote JSON, document will not be editable"
+ )
+
# Update or create document
if existing_document:
# Update existing document
@@ -154,6 +164,7 @@ async def add_extension_received_document(
existing_document.embedding = summary_embedding
existing_document.document_metadata = content.metadata.model_dump()
existing_document.chunks = chunks
+ existing_document.blocknote_document = blocknote_json
await session.commit()
await session.refresh(existing_document)
@@ -170,6 +181,7 @@ async def add_extension_received_document(
chunks=chunks,
content_hash=content_hash,
unique_identifier_hash=unique_identifier_hash,
+ blocknote_document=blocknote_json,
)
session.add(document)
diff --git a/surfsense_backend/app/tasks/document_processors/file_processors.py b/surfsense_backend/app/tasks/document_processors/file_processors.py
index 859f6a25d..4ae04e050 100644
--- a/surfsense_backend/app/tasks/document_processors/file_processors.py
+++ b/surfsense_backend/app/tasks/document_processors/file_processors.py
@@ -100,6 +100,15 @@ async def add_received_file_document_using_unstructured(
# Process chunks
chunks = await create_document_chunks(file_in_markdown)
+ from app.utils.blocknote_converter import convert_markdown_to_blocknote
+
+ # Convert markdown to BlockNote JSON
+ blocknote_json = await convert_markdown_to_blocknote(file_in_markdown)
+ if not blocknote_json:
+ logging.warning(
+ f"Failed to convert {file_name} to BlockNote JSON, document will not be editable"
+ )
+
# Update or create document
if existing_document:
# Update existing document
@@ -112,6 +121,9 @@ async def add_received_file_document_using_unstructured(
"ETL_SERVICE": "UNSTRUCTURED",
}
existing_document.chunks = chunks
+ existing_document.blocknote_document = blocknote_json
+ existing_document.content_needs_reindexing = False
+ existing_document.last_edited_at = None
await session.commit()
await session.refresh(existing_document)
@@ -131,6 +143,9 @@ async def add_received_file_document_using_unstructured(
chunks=chunks,
content_hash=content_hash,
unique_identifier_hash=unique_identifier_hash,
+ blocknote_document=blocknote_json,
+ content_needs_reindexing=False,
+ last_edited_at=None,
)
session.add(document)
@@ -214,6 +229,15 @@ async def add_received_file_document_using_llamacloud(
# Process chunks
chunks = await create_document_chunks(file_in_markdown)
+ from app.utils.blocknote_converter import convert_markdown_to_blocknote
+
+ # Convert markdown to BlockNote JSON
+ blocknote_json = await convert_markdown_to_blocknote(file_in_markdown)
+ if not blocknote_json:
+ logging.warning(
+ f"Failed to convert {file_name} to BlockNote JSON, document will not be editable"
+ )
+
# Update or create document
if existing_document:
# Update existing document
@@ -226,6 +250,9 @@ async def add_received_file_document_using_llamacloud(
"ETL_SERVICE": "LLAMACLOUD",
}
existing_document.chunks = chunks
+ existing_document.blocknote_document = blocknote_json
+ existing_document.content_needs_reindexing = False
+ existing_document.last_edited_at = None
await session.commit()
await session.refresh(existing_document)
@@ -245,6 +272,9 @@ async def add_received_file_document_using_llamacloud(
chunks=chunks,
content_hash=content_hash,
unique_identifier_hash=unique_identifier_hash,
+ blocknote_document=blocknote_json,
+ content_needs_reindexing=False,
+ last_edited_at=None,
)
session.add(document)
@@ -353,6 +383,15 @@ async def add_received_file_document_using_docling(
# Process chunks
chunks = await create_document_chunks(file_in_markdown)
+ from app.utils.blocknote_converter import convert_markdown_to_blocknote
+
+ # Convert markdown to BlockNote JSON
+ blocknote_json = await convert_markdown_to_blocknote(file_in_markdown)
+ if not blocknote_json:
+ logging.warning(
+ f"Failed to convert {file_name} to BlockNote JSON, document will not be editable"
+ )
+
# Update or create document
if existing_document:
# Update existing document
@@ -365,6 +404,9 @@ async def add_received_file_document_using_docling(
"ETL_SERVICE": "DOCLING",
}
existing_document.chunks = chunks
+ existing_document.blocknote_document = blocknote_json
+ existing_document.content_needs_reindexing = False
+ existing_document.last_edited_at = None
await session.commit()
await session.refresh(existing_document)
@@ -384,6 +426,9 @@ async def add_received_file_document_using_docling(
chunks=chunks,
content_hash=content_hash,
unique_identifier_hash=unique_identifier_hash,
+ blocknote_document=blocknote_json,
+ content_needs_reindexing=False,
+ last_edited_at=None,
)
session.add(document)
diff --git a/surfsense_backend/app/tasks/document_processors/markdown_processor.py b/surfsense_backend/app/tasks/document_processors/markdown_processor.py
index 76215ed51..3036071c9 100644
--- a/surfsense_backend/app/tasks/document_processors/markdown_processor.py
+++ b/surfsense_backend/app/tasks/document_processors/markdown_processor.py
@@ -110,6 +110,15 @@ async def add_received_markdown_file_document(
# Process chunks
chunks = await create_document_chunks(file_in_markdown)
+ from app.utils.blocknote_converter import convert_markdown_to_blocknote
+
+ # Convert to BlockNote JSON
+ blocknote_json = await convert_markdown_to_blocknote(file_in_markdown)
+ if not blocknote_json:
+ logging.warning(
+ f"Failed to convert {file_name} to BlockNote JSON, document will not be editable"
+ )
+
# Update or create document
if existing_document:
# Update existing document
@@ -121,6 +130,7 @@ async def add_received_markdown_file_document(
"FILE_NAME": file_name,
}
existing_document.chunks = chunks
+ existing_document.blocknote_document = blocknote_json
await session.commit()
await session.refresh(existing_document)
@@ -139,6 +149,7 @@ async def add_received_markdown_file_document(
chunks=chunks,
content_hash=content_hash,
unique_identifier_hash=unique_identifier_hash,
+ blocknote_document=blocknote_json,
)
session.add(document)
diff --git a/surfsense_backend/app/tasks/document_processors/youtube_processor.py b/surfsense_backend/app/tasks/document_processors/youtube_processor.py
index c7d396974..332e775e1 100644
--- a/surfsense_backend/app/tasks/document_processors/youtube_processor.py
+++ b/surfsense_backend/app/tasks/document_processors/youtube_processor.py
@@ -291,6 +291,16 @@ async def add_youtube_video_document(
{"stage": "chunk_processing"},
)
+ from app.utils.blocknote_converter import convert_markdown_to_blocknote
+
+ # Convert transcript to BlockNote JSON
+ blocknote_json = await convert_markdown_to_blocknote(combined_document_string)
+ if not blocknote_json:
+ logging.warning(
+ f"Failed to convert YouTube video '{video_id}' to BlockNote JSON, "
+ "document will not be editable"
+ )
+
chunks = await create_document_chunks(combined_document_string)
# Update or create document
@@ -314,6 +324,7 @@ async def add_youtube_video_document(
"thumbnail": video_data.get("thumbnail_url", ""),
}
existing_document.chunks = chunks
+ existing_document.blocknote_document = blocknote_json
await session.commit()
await session.refresh(existing_document)
@@ -342,6 +353,7 @@ async def add_youtube_video_document(
search_space_id=search_space_id,
content_hash=content_hash,
unique_identifier_hash=unique_identifier_hash,
+ blocknote_document=blocknote_json,
)
session.add(document)
diff --git a/surfsense_backend/app/utils/blocknote_converter.py b/surfsense_backend/app/utils/blocknote_converter.py
new file mode 100644
index 000000000..b57a82996
--- /dev/null
+++ b/surfsense_backend/app/utils/blocknote_converter.py
@@ -0,0 +1,123 @@
+import logging
+from typing import Any
+
+import httpx
+
+from app.config import config
+
+logger = logging.getLogger(__name__)
+
+
+async def convert_markdown_to_blocknote(markdown: str) -> dict[str, Any] | None:
+ """
+ Convert markdown to BlockNote JSON via Next.js API.
+
+ Args:
+ markdown: Markdown string to convert
+
+ Returns:
+ BlockNote document as dict, or None if conversion fails
+ """
+ if not markdown or not markdown.strip():
+ logger.warning("Empty markdown provided for conversion")
+ return None
+
+ if not markdown or len(markdown) < 10:
+ logger.warning("Markdown became too short after sanitization")
+ # Return a minimal BlockNote document
+ return [
+ {
+ "type": "paragraph",
+ "content": [
+ {
+ "type": "text",
+ "text": "Document content could not be converted for editing.",
+ "styles": {},
+ }
+ ],
+ "children": [],
+ }
+ ]
+
+ async with httpx.AsyncClient() as client:
+ try:
+ response = await client.post(
+ f"{config.NEXT_FRONTEND_URL}/api/convert-to-blocknote",
+ json={"markdown": markdown},
+ timeout=30.0,
+ )
+ response.raise_for_status()
+ data = response.json()
+ blocknote_document = data.get("blocknote_document")
+
+ if blocknote_document:
+ logger.info(
+ f"Successfully converted markdown to BlockNote (original: {len(markdown)} chars, sanitized: {len(markdown)} chars)"
+ )
+ return blocknote_document
+ else:
+ logger.warning("Next.js API returned empty blocknote_document")
+ return None
+
+ except httpx.TimeoutException:
+ logger.error("Timeout converting markdown to BlockNote after 30s")
+ return None
+ except httpx.HTTPStatusError as e:
+ logger.error(
+ f"HTTP error converting markdown to BlockNote: {e.response.status_code} - {e.response.text}"
+ )
+ # Log first 1000 chars of problematic markdown for debugging
+ logger.debug(f"Problematic markdown sample: {markdown[:1000]}")
+ return None
+ except Exception as e:
+ logger.error(f"Failed to convert markdown to BlockNote: {e}", exc_info=True)
+ return None
+
+
+async def convert_blocknote_to_markdown(
+ blocknote_document: dict[str, Any] | list[dict[str, Any]],
+) -> str | None:
+ """
+ Convert BlockNote JSON to markdown via Next.js API.
+
+ Args:
+ blocknote_document: BlockNote document as dict or list of blocks
+
+ Returns:
+ Markdown string, or None if conversion fails
+ """
+ if not blocknote_document:
+ logger.warning("Empty BlockNote document provided for conversion")
+ return None
+
+ async with httpx.AsyncClient() as client:
+ try:
+ response = await client.post(
+ f"{config.NEXT_FRONTEND_URL}/api/convert-to-markdown",
+ json={"blocknote_document": blocknote_document},
+ timeout=30.0,
+ )
+ response.raise_for_status()
+ data = response.json()
+ markdown = data.get("markdown")
+
+ if markdown:
+ logger.info(
+ f"Successfully converted BlockNote to markdown ({len(markdown)} chars)"
+ )
+ return markdown
+ else:
+ logger.warning("Next.js API returned empty markdown")
+ return None
+
+ except httpx.TimeoutException:
+ logger.error("Timeout converting BlockNote to markdown after 30s")
+ return None
+ except httpx.HTTPStatusError as e:
+ logger.error(
+ f"HTTP error converting BlockNote to markdown: {e.response.status_code} - {e.response.text}"
+ )
+ return None
+ except Exception as e:
+ logger.error(f"Failed to convert BlockNote to markdown: {e}", exc_info=True)
+ return None
diff --git a/surfsense_web/app/api/convert-to-blocknote/route.ts b/surfsense_web/app/api/convert-to-blocknote/route.ts
new file mode 100644
index 000000000..e11c9cb47
--- /dev/null
+++ b/surfsense_web/app/api/convert-to-blocknote/route.ts
@@ -0,0 +1,40 @@
+import { ServerBlockNoteEditor } from "@blocknote/server-util";
+import { type NextRequest, NextResponse } from "next/server";
+
+export async function POST(request: NextRequest) {
+ try {
+ const { markdown } = await request.json();
+
+ if (!markdown || typeof markdown !== "string") {
+ return NextResponse.json({ error: "Markdown string is required" }, { status: 400 });
+ }
+
+ // Log raw markdown input before conversion
+ // console.log(`\n${"=".repeat(80)}`);
+ // console.log("RAW MARKDOWN INPUT (BEFORE CONVERSION):");
+ // console.log("=".repeat(80));
+ // console.log(markdown);
+ // console.log(`${"=".repeat(80)}\n`);
+
+ // Create server-side editor instance
+ const editor = ServerBlockNoteEditor.create();
+
+ // Convert markdown directly to BlockNote blocks
+ const blocks = await editor.tryParseMarkdownToBlocks(markdown);
+
+ if (!blocks || blocks.length === 0) {
+ throw new Error("Markdown parsing returned no blocks");
+ }
+
+ return NextResponse.json({ blocknote_document: blocks });
+ } catch (error: any) {
+ console.error("Failed to convert markdown to BlockNote:", error);
+ return NextResponse.json(
+ {
+ error: "Failed to convert markdown to BlockNote blocks",
+ details: error.message,
+ },
+ { status: 500 }
+ );
+ }
+}
diff --git a/surfsense_web/app/api/convert-to-markdown/route.ts b/surfsense_web/app/api/convert-to-markdown/route.ts
new file mode 100644
index 000000000..7005a800f
--- /dev/null
+++ b/surfsense_web/app/api/convert-to-markdown/route.ts
@@ -0,0 +1,28 @@
+import { ServerBlockNoteEditor } from "@blocknote/server-util";
+import { type NextRequest, NextResponse } from "next/server";
+
+export async function POST(request: NextRequest) {
+ try {
+ const { blocknote_document } = await request.json();
+
+ if (!blocknote_document || !Array.isArray(blocknote_document)) {
+ return NextResponse.json({ error: "BlockNote document array is required" }, { status: 400 });
+ }
+
+ // Create server-side editor instance
+ const editor = ServerBlockNoteEditor.create();
+
+ // Convert BlockNote blocks to markdown
+ const markdown = await editor.blocksToMarkdownLossy(blocknote_document);
+
+ return NextResponse.json({
+ markdown,
+ });
+ } catch (error) {
+ console.error("Failed to convert BlockNote to markdown:", error);
+ return NextResponse.json(
+ { error: "Failed to convert BlockNote blocks to markdown" },
+ { status: 500 }
+ );
+ }
+}
diff --git a/surfsense_web/app/dashboard/[search_space_id]/documents/(manage)/components/DocumentsTableShell.tsx b/surfsense_web/app/dashboard/[search_space_id]/documents/(manage)/components/DocumentsTableShell.tsx
index 20f2be15f..0483940e0 100644
--- a/surfsense_web/app/dashboard/[search_space_id]/documents/(manage)/components/DocumentsTableShell.tsx
+++ b/surfsense_web/app/dashboard/[search_space_id]/documents/(manage)/components/DocumentsTableShell.tsx
@@ -309,6 +309,7 @@ export function DocumentsTableShell({
refreshDocuments={async () => {
await onRefresh();
}}
+ searchSpaceId={searchSpaceId as string}
/>
@@ -340,6 +341,7 @@ export function DocumentsTableShell({
refreshDocuments={async () => {
await onRefresh();
}}
+ searchSpaceId={searchSpaceId as string}
/>
diff --git a/surfsense_web/app/dashboard/[search_space_id]/documents/(manage)/components/RowActions.tsx b/surfsense_web/app/dashboard/[search_space_id]/documents/(manage)/components/RowActions.tsx
index bd1e182d9..1c4d440e7 100644
--- a/surfsense_web/app/dashboard/[search_space_id]/documents/(manage)/components/RowActions.tsx
+++ b/surfsense_web/app/dashboard/[search_space_id]/documents/(manage)/components/RowActions.tsx
@@ -1,6 +1,8 @@
"use client";
-import { MoreHorizontal } from "lucide-react";
+import { FileText, Pencil, Trash2 } from "lucide-react";
+import { motion } from "motion/react";
+import { useRouter } from "next/navigation";
import { useState } from "react";
import { toast } from "sonner";
import { JsonMetadataViewer } from "@/components/json-metadata-viewer";
@@ -12,29 +14,26 @@ import {
AlertDialogFooter,
AlertDialogHeader,
AlertDialogTitle,
- AlertDialogTrigger,
} from "@/components/ui/alert-dialog";
import { Button } from "@/components/ui/button";
-import {
- DropdownMenu,
- DropdownMenuContent,
- DropdownMenuItem,
- DropdownMenuSeparator,
- DropdownMenuTrigger,
-} from "@/components/ui/dropdown-menu";
+import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip";
import type { Document } from "./types";
export function RowActions({
document,
deleteDocument,
refreshDocuments,
+ searchSpaceId,
}: {
document: Document;
deleteDocument: (id: number) => Promise
;
refreshDocuments: () => Promise;
+ searchSpaceId: string;
}) {
- const [isOpen, setIsOpen] = useState(false);
+ const [isDeleteOpen, setIsDeleteOpen] = useState(false);
+ const [isMetadataOpen, setIsMetadataOpen] = useState(false);
const [isDeleting, setIsDeleting] = useState(false);
+ const router = useRouter();
const handleDelete = async () => {
setIsDeleting(true);
@@ -48,62 +47,114 @@ export function RowActions({
toast.error("Failed to delete document");
} finally {
setIsDeleting(false);
- setIsOpen(false);
+ setIsDeleteOpen(false);
}
};
+ const handleEdit = () => {
+ router.push(`/dashboard/${searchSpaceId}/editor/${document.id}`);
+ };
+
return (
-
-
-
-
-
-
- e.preventDefault()}>
- View Metadata
-
- }
- />
-
-
-
- {
- e.preventDefault();
- setIsOpen(true);
- }}
- >
- Delete
-
-
-
-
- Are you sure?
-
-
- Cancel
- {
- e.preventDefault();
- handleDelete();
- }}
- disabled={isDeleting}
- >
- {isDeleting ? "Deleting..." : "Delete"}
-
-
-
-
-
-
+
+ {/* Edit Button */}
+
+
+
+
+
+
+
+ Edit Document
+
+
+
+ {/* View Metadata Button */}
+
+
+
+
+
+
+
+ View Metadata
+
+
+
+
+ {/* Delete Button */}
+
+
+
+
+
+
+
+ Delete
+
+
+
+
+
+ Are you sure?
+
+
+ Cancel
+ {
+ e.preventDefault();
+ handleDelete();
+ }}
+ disabled={isDeleting}
+ className="bg-destructive text-destructive-foreground hover:bg-destructive/90"
+ >
+ {isDeleting ? "Deleting..." : "Delete"}
+
+
+
+
);
}
diff --git a/surfsense_web/app/dashboard/[search_space_id]/editor/[documentId]/page.tsx b/surfsense_web/app/dashboard/[search_space_id]/editor/[documentId]/page.tsx
new file mode 100644
index 000000000..71ce2fa2f
--- /dev/null
+++ b/surfsense_web/app/dashboard/[search_space_id]/editor/[documentId]/page.tsx
@@ -0,0 +1,263 @@
+"use client";
+
+import { AlertCircle, FileText, Loader2, Save, X } from "lucide-react";
+import { motion } from "motion/react";
+import { useParams, useRouter } from "next/navigation";
+import { useEffect, useState } from "react";
+import { toast } from "sonner";
+import { BlockNoteEditor } from "@/components/DynamicBlockNoteEditor";
+import { Button } from "@/components/ui/button";
+import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card";
+import { Separator } from "@/components/ui/separator";
+
+interface EditorContent {
+ document_id: number;
+ title: string;
+ blocknote_document: any;
+ last_edited_at: string | null;
+}
+
+export default function EditorPage() {
+ const params = useParams();
+ const router = useRouter();
+ const documentId = params.documentId as string;
+
+ const [document, setDocument] = useState
(null);
+ const [loading, setLoading] = useState(true);
+ const [saving, setSaving] = useState(false);
+ const [editorContent, setEditorContent] = useState(null);
+ const [error, setError] = useState(null);
+ const [hasUnsavedChanges, setHasUnsavedChanges] = useState(false);
+
+ // Get auth token
+ const token =
+ typeof window !== "undefined" ? localStorage.getItem("surfsense_bearer_token") : null;
+
+ // Fetch document content - DIRECT CALL TO FASTAPI
+ useEffect(() => {
+ async function fetchDocument() {
+ if (!token) {
+ console.error("No auth token found");
+ setError("Please login to access the editor");
+ setLoading(false);
+ return;
+ }
+
+ try {
+ const response = await fetch(
+ `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-spaces/${params.search_space_id}/documents/${documentId}/editor-content`,
+ {
+ headers: {
+ Authorization: `Bearer ${token}`,
+ },
+ }
+ );
+
+ if (!response.ok) {
+ const errorData = await response
+ .json()
+ .catch(() => ({ detail: "Failed to fetch document" }));
+ throw new Error(errorData.detail || "Failed to fetch document");
+ }
+
+ const data = await response.json();
+
+ // Check if blocknote_document exists
+ if (!data.blocknote_document) {
+ setError(
+ "This document does not have BlockNote content. Please re-upload the document to enable editing."
+ );
+ setLoading(false);
+ return;
+ }
+
+ setDocument(data);
+ setEditorContent(data.blocknote_document);
+ setError(null);
+ } catch (error) {
+ console.error("Error fetching document:", error);
+ setError(
+ error instanceof Error ? error.message : "Failed to fetch document. Please try again."
+ );
+ } finally {
+ setLoading(false);
+ }
+ }
+
+ if (documentId && token) {
+ fetchDocument();
+ }
+ }, [documentId, token]);
+
+ // Track changes to mark as unsaved
+ useEffect(() => {
+ if (editorContent && document) {
+ setHasUnsavedChanges(true);
+ }
+ }, [editorContent, document]);
+
+ // TODO: Maybe add Auto-save every 30 seconds - DIRECT CALL TO FASTAPI
+
+ // Save and exit - DIRECT CALL TO FASTAPI
+ const handleSave = async () => {
+ if (!token) {
+ toast.error("Please login to save");
+ return;
+ }
+
+ if (!editorContent) {
+ toast.error("No content to save");
+ return;
+ }
+
+ setSaving(true);
+ try {
+ // Save blocknote_document and trigger reindexing in background
+ const response = await fetch(
+ `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-spaces/${params.search_space_id}/documents/${documentId}/save`,
+ {
+ method: "POST",
+ headers: {
+ "Content-Type": "application/json",
+ Authorization: `Bearer ${token}`,
+ },
+ body: JSON.stringify({ blocknote_document: editorContent }),
+ }
+ );
+
+ if (!response.ok) {
+ const errorData = await response
+ .json()
+ .catch(() => ({ detail: "Failed to save document" }));
+ throw new Error(errorData.detail || "Failed to save document");
+ }
+
+ setHasUnsavedChanges(false);
+ toast.success("Document saved! Reindexing in background...");
+
+ // Small delay before redirect to show success message
+ setTimeout(() => {
+ router.push(`/dashboard/${params.search_space_id}/documents`);
+ }, 500);
+ } catch (error) {
+ console.error("Error saving document:", error);
+ toast.error(
+ error instanceof Error ? error.message : "Failed to save document. Please try again."
+ );
+ } finally {
+ setSaving(false);
+ }
+ };
+
+ const handleCancel = () => {
+ if (hasUnsavedChanges) {
+ if (confirm("You have unsaved changes. Are you sure you want to leave?")) {
+ router.back();
+ }
+ } else {
+ router.back();
+ }
+ };
+
+ if (loading) {
+ return (
+
+
+
+
+ Loading editor...
+
+
+
+ );
+ }
+
+ if (error) {
+ return (
+
+
+
+
+
+ {error}
+
+
+
+
+
+
+
+ );
+ }
+
+ if (!document) {
+ return (
+
+
+
+
+ Document not found
+
+
+
+ );
+ }
+
+ return (
+
+ {/* Toolbar */}
+
+
+
+
+
{document.title}
+ {hasUnsavedChanges &&
Unsaved changes
}
+
+
+
+
+
+
+
+
+
+ {/* Editor Container */}
+
+
+ );
+}
diff --git a/surfsense_web/app/globals.css b/surfsense_web/app/globals.css
index a1ee277c6..5aee982bb 100644
--- a/surfsense_web/app/globals.css
+++ b/surfsense_web/app/globals.css
@@ -27,7 +27,7 @@
--accent: oklch(0.97 0 0);
--accent-foreground: oklch(0.205 0 0);
--destructive: oklch(0.577 0.245 27.325);
- --destructive-foreground: oklch(0.577 0.245 27.325);
+ --destructive-foreground: oklch(0.985 0 0);
--border: oklch(0.922 0 0);
--input: oklch(0.922 0 0);
--ring: oklch(0.708 0 0);
@@ -63,8 +63,8 @@
--muted-foreground: oklch(0.708 0 0);
--accent: oklch(0.269 0 0);
--accent-foreground: oklch(0.985 0 0);
- --destructive: oklch(0.396 0.141 25.723);
- --destructive-foreground: oklch(0.637 0.237 25.331);
+ --destructive: oklch(0.577 0.245 27.325);
+ --destructive-foreground: oklch(0.985 0 0);
--border: oklch(0.269 0 0);
--input: oklch(0.269 0 0);
--ring: oklch(0.439 0 0);
diff --git a/surfsense_web/components/BlockNoteEditor.tsx b/surfsense_web/components/BlockNoteEditor.tsx
new file mode 100644
index 000000000..8064a0dc4
--- /dev/null
+++ b/surfsense_web/components/BlockNoteEditor.tsx
@@ -0,0 +1,68 @@
+"use client";
+
+import { useTheme } from "next-themes";
+import { useEffect, useMemo, useRef } from "react";
+import "@blocknote/core/fonts/inter.css";
+import "@blocknote/mantine/style.css";
+import { BlockNoteView } from "@blocknote/mantine";
+import { useCreateBlockNote } from "@blocknote/react";
+
+interface BlockNoteEditorProps {
+ initialContent?: any;
+ onChange?: (content: any) => void;
+}
+
+export default function BlockNoteEditor({ initialContent, onChange }: BlockNoteEditorProps) {
+ const { resolvedTheme } = useTheme();
+
+ // Track the initial content to prevent re-initialization
+ const initialContentRef = useRef(null);
+ const isInitializedRef = useRef(false);
+
+ // Creates a new editor instance - only use initialContent on first render
+ const editor = useCreateBlockNote({
+ initialContent: initialContentRef.current === null ? initialContent || undefined : undefined,
+ });
+
+ // Store initial content on first render only
+ useEffect(() => {
+ if (initialContent && initialContentRef.current === null) {
+ initialContentRef.current = initialContent;
+ isInitializedRef.current = true;
+ }
+ }, [initialContent]);
+
+ // Call onChange when document changes (but don't update from props)
+ useEffect(() => {
+ if (!onChange || !editor || !isInitializedRef.current) return;
+
+ const handleChange = () => {
+ onChange(editor.document);
+ };
+
+ // Subscribe to document changes
+ const unsubscribe = editor.onChange(handleChange);
+
+ return () => {
+ unsubscribe();
+ };
+ }, [editor, onChange]);
+
+ // Determine theme for BlockNote with custom dark mode background
+ const blockNoteTheme = useMemo(() => {
+ if (resolvedTheme === "dark") {
+ // Custom dark theme - only override editor background, let BlockNote handle the rest
+ return {
+ colors: {
+ editor: {
+ background: "#0A0A0A", // Custom dark background
+ },
+ },
+ };
+ }
+ return "light" as const;
+ }, [resolvedTheme]);
+
+ // Renders the editor instance
+ return ;
+}
diff --git a/surfsense_web/components/DynamicBlockNoteEditor.tsx b/surfsense_web/components/DynamicBlockNoteEditor.tsx
new file mode 100644
index 000000000..60fc6b11c
--- /dev/null
+++ b/surfsense_web/components/DynamicBlockNoteEditor.tsx
@@ -0,0 +1,6 @@
+"use client";
+
+import dynamic from "next/dynamic";
+
+// Dynamically import BlockNote editor with SSR disabled
+export const BlockNoteEditor = dynamic(() => import("./BlockNoteEditor"), { ssr: false });
diff --git a/surfsense_web/components/dashboard-breadcrumb.tsx b/surfsense_web/components/dashboard-breadcrumb.tsx
index 3e2e5199e..a05368eba 100644
--- a/surfsense_web/components/dashboard-breadcrumb.tsx
+++ b/surfsense_web/components/dashboard-breadcrumb.tsx
@@ -3,7 +3,7 @@
import { useAtomValue } from "jotai";
import { usePathname } from "next/navigation";
import { useTranslations } from "next-intl";
-import React, { useEffect } from "react";
+import React, { useEffect, useState } from "react";
import { activeChatAtom } from "@/atoms/chats/chat-query.atoms";
import {
Breadcrumb,
@@ -34,6 +34,41 @@ export function DashboardBreadcrumb() {
autoFetch: !!searchSpaceId,
});
+ // State to store document title for editor breadcrumb
+ const [documentTitle, setDocumentTitle] = useState(null);
+
+ // Fetch document title when on editor page
+ useEffect(() => {
+ if (segments[2] === "editor" && segments[3] && searchSpaceId) {
+ const documentId = segments[3];
+ const token =
+ typeof window !== "undefined" ? localStorage.getItem("surfsense_bearer_token") : null;
+
+ if (token) {
+ fetch(
+ `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/documents/${documentId}/editor-content`,
+ {
+ headers: {
+ Authorization: `Bearer ${token}`,
+ },
+ }
+ )
+ .then((res) => res.json())
+ .then((data) => {
+ if (data.title) {
+ setDocumentTitle(data.title);
+ }
+ })
+ .catch(() => {
+ // If fetch fails, just use the document ID
+ setDocumentTitle(null);
+ });
+ }
+ } else {
+ setDocumentTitle(null);
+ }
+ }, [segments, searchSpaceId]);
+
// Parse the pathname to create breadcrumb items
const generateBreadcrumbs = (path: string): BreadcrumbItemInterface[] => {
const segments = path.split("/").filter(Boolean);
@@ -66,6 +101,7 @@ export function DashboardBreadcrumb() {
logs: t("logs"),
chats: t("chats"),
settings: t("settings"),
+ editor: t("editor"),
};
sectionLabel = sectionLabels[section] || sectionLabel;
@@ -73,7 +109,21 @@ export function DashboardBreadcrumb() {
// Handle sub-sections
if (segments[3]) {
const subSection = segments[3];
- let subSectionLabel = subSection.charAt(0).toUpperCase() + subSection.slice(1);
+
+ // Handle editor sub-sections (document ID)
+ if (section === "editor") {
+ const documentLabel = documentTitle || subSection;
+ breadcrumbs.push({
+ label: t("documents"),
+ href: `/dashboard/${segments[1]}/documents`,
+ });
+ breadcrumbs.push({
+ label: sectionLabel,
+ href: `/dashboard/${segments[1]}/documents`,
+ });
+ breadcrumbs.push({ label: documentLabel });
+ return breadcrumbs;
+ }
// Handle sources sub-sections
if (section === "sources") {
@@ -81,7 +131,7 @@ export function DashboardBreadcrumb() {
add: "Add Sources",
};
- const sourceLabel = sourceLabels[subSection] || subSectionLabel;
+ const sourceLabel = sourceLabels[subSection] || subSection;
breadcrumbs.push({
label: "Sources",
href: `/dashboard/${segments[1]}/sources`,
@@ -98,7 +148,7 @@ export function DashboardBreadcrumb() {
webpage: t("add_webpages"),
};
- const documentLabel = documentLabels[subSection] || subSectionLabel;
+ const documentLabel = documentLabels[subSection] || subSection;
breadcrumbs.push({
label: t("documents"),
href: `/dashboard/${segments[1]}/documents`,
@@ -159,7 +209,7 @@ export function DashboardBreadcrumb() {
manage: t("manage_connectors"),
};
- const connectorLabel = connectorLabels[subSection] || subSectionLabel;
+ const connectorLabel = connectorLabels[subSection] || subSection;
breadcrumbs.push({
label: t("connectors"),
href: `/dashboard/${segments[1]}/connectors`,
@@ -169,6 +219,7 @@ export function DashboardBreadcrumb() {
}
// Handle other sub-sections
+ let subSectionLabel = subSection.charAt(0).toUpperCase() + subSection.slice(1);
const subSectionLabels: Record = {
upload: t("upload_documents"),
youtube: t("add_youtube"),
diff --git a/surfsense_web/components/json-metadata-viewer.tsx b/surfsense_web/components/json-metadata-viewer.tsx
index 11dd71581..8fe1b10ae 100644
--- a/surfsense_web/components/json-metadata-viewer.tsx
+++ b/surfsense_web/components/json-metadata-viewer.tsx
@@ -15,9 +15,17 @@ interface JsonMetadataViewerProps {
title: string;
metadata: any;
trigger?: React.ReactNode;
+ open?: boolean;
+ onOpenChange?: (open: boolean) => void;
}
-export function JsonMetadataViewer({ title, metadata, trigger }: JsonMetadataViewerProps) {
+export function JsonMetadataViewer({
+ title,
+ metadata,
+ trigger,
+ open,
+ onOpenChange,
+}: JsonMetadataViewerProps) {
// Ensure metadata is a valid object
const jsonData = React.useMemo(() => {
if (!metadata) return {};
@@ -35,6 +43,23 @@ export function JsonMetadataViewer({ title, metadata, trigger }: JsonMetadataVie
}
}, [metadata]);
+ // Controlled mode: when open and onOpenChange are provided
+ if (open !== undefined && onOpenChange !== undefined) {
+ return (
+
+ );
+ }
+
+ // Uncontrolled mode: when using trigger
return (