Merge pull request #513 from MODSetter/dev

feat: added blocknode editor
This commit is contained in:
Rohan Verma 2025-11-30 15:13:13 -08:00 committed by GitHub
commit 77155488ea
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
27 changed files with 2687 additions and 79 deletions

View file

@ -0,0 +1,75 @@
"""43_add_blocknote_fields_to_documents
Revision ID: 43
Revises: 42
Create Date: 2025-11-30
Adds fields for live document editing:
- blocknote_document: JSONB editor state
- content_needs_reindexing: Flag for regenerating chunks/summary
- last_edited_at: Last edit timestamp
"""
from collections.abc import Sequence
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "43"
down_revision: str | None = "42"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
"""Upgrade schema - Add BlockNote fields and trigger population task."""
# Add the columns
op.add_column(
"documents",
sa.Column(
"blocknote_document", postgresql.JSONB(astext_type=sa.Text()), nullable=True
),
)
op.add_column(
"documents",
sa.Column(
"content_needs_reindexing",
sa.Boolean(),
nullable=False,
server_default=sa.false(),
),
)
op.add_column(
"documents",
sa.Column("last_edited_at", sa.TIMESTAMP(timezone=True), nullable=True),
)
# Trigger the Celery task to populate blocknote_document for existing documents
try:
from app.tasks.celery_tasks.blocknote_migration_tasks import (
populate_blocknote_for_documents_task,
)
# Queue the task to run asynchronously
populate_blocknote_for_documents_task.apply_async()
print(
"✓ Queued Celery task to populate blocknote_document for existing documents"
)
except Exception as e:
# If Celery is not available or task queueing fails, log but don't fail the migration
print(f"⚠ Warning: Could not queue blocknote population task: {e}")
print(" You can manually trigger it later with:")
print(
" celery -A app.celery_app call app.tasks.celery_tasks.blocknote_migration_tasks.populate_blocknote_for_documents_task"
)
def downgrade() -> None:
"""Downgrade schema - Remove BlockNote fields."""
op.drop_column("documents", "last_edited_at")
op.drop_column("documents", "content_needs_reindexing")
op.drop_column("documents", "blocknote_document")

View file

@ -63,6 +63,8 @@ celery_app = Celery(
"app.tasks.celery_tasks.podcast_tasks",
"app.tasks.celery_tasks.connector_tasks",
"app.tasks.celery_tasks.schedule_checker_task",
"app.tasks.celery_tasks.blocknote_migration_tasks",
"app.tasks.celery_tasks.document_reindex_tasks",
],
)

View file

@ -20,7 +20,7 @@ from sqlalchemy import (
UniqueConstraint,
text,
)
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.dialects.postgresql import JSONB, UUID
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
from sqlalchemy.orm import DeclarativeBase, Mapped, declared_attr, relationship
@ -343,6 +343,17 @@ class Document(BaseModel, TimestampMixin):
unique_identifier_hash = Column(String, nullable=True, index=True, unique=True)
embedding = Column(Vector(config.embedding_model_instance.dimension))
# BlockNote live editing state (NULL when never edited)
blocknote_document = Column(JSONB, nullable=True)
# blocknote background reindex flag
content_needs_reindexing = Column(
Boolean, nullable=False, default=False, server_default=text("false")
)
# Track when blocknote document was last edited
last_edited_at = Column(TIMESTAMP(timezone=True), nullable=True)
search_space_id = Column(
Integer, ForeignKey("searchspaces.id", ondelete="CASCADE"), nullable=False
)

View file

@ -5,6 +5,7 @@ from .airtable_add_connector_route import (
)
from .chats_routes import router as chats_router
from .documents_routes import router as documents_router
from .editor_routes import router as editor_router
from .google_calendar_add_connector_route import (
router as google_calendar_add_connector_router,
)
@ -23,6 +24,7 @@ router = APIRouter()
router.include_router(search_spaces_router)
router.include_router(rbac_router) # RBAC routes for roles, members, invites
router.include_router(editor_router)
router.include_router(documents_router)
router.include_router(podcasts_router)
router.include_router(chats_router)

View file

@ -0,0 +1,166 @@
"""
Editor routes for BlockNote document editing.
"""
from datetime import UTC, datetime
from typing import Any
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.orm import selectinload
from app.db import Document, Permission, User, get_async_session
from app.users import current_active_user
from app.utils.rbac import check_permission
router = APIRouter()
@router.get("/search-spaces/{search_space_id}/documents/{document_id}/editor-content")
async def get_editor_content(
search_space_id: int,
document_id: int,
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""
Get document content for editing.
Returns BlockNote JSON document. If blocknote_document is NULL,
attempts to generate it from chunks (lazy migration).
Requires DOCUMENTS_READ permission.
"""
# Check RBAC permission
await check_permission(
session,
user,
search_space_id,
Permission.DOCUMENTS_READ.value,
"You don't have permission to read documents in this search space",
)
result = await session.execute(
select(Document)
.options(selectinload(Document.chunks))
.filter(
Document.id == document_id,
Document.search_space_id == search_space_id,
)
)
document = result.scalars().first()
if not document:
raise HTTPException(status_code=404, detail="Document not found")
# If blocknote_document exists, return it
if document.blocknote_document:
return {
"document_id": document.id,
"title": document.title,
"blocknote_document": document.blocknote_document,
"last_edited_at": document.last_edited_at.isoformat()
if document.last_edited_at
else None,
}
# Lazy migration: Try to generate blocknote_document from chunks
from app.utils.blocknote_converter import convert_markdown_to_blocknote
chunks = sorted(document.chunks, key=lambda c: c.id)
if not chunks:
raise HTTPException(
status_code=400,
detail="This document has no chunks and cannot be edited. Please re-upload to enable editing.",
)
# Reconstruct markdown from chunks
markdown_content = "\n\n".join(chunk.content for chunk in chunks)
if not markdown_content.strip():
raise HTTPException(
status_code=400,
detail="This document has empty content and cannot be edited.",
)
# Convert to BlockNote
blocknote_json = await convert_markdown_to_blocknote(markdown_content)
if not blocknote_json:
raise HTTPException(
status_code=500,
detail="Failed to convert document to editable format. Please try again later.",
)
# Save the generated blocknote_document (lazy migration)
document.blocknote_document = blocknote_json
document.content_needs_reindexing = False
document.last_edited_at = None
await session.commit()
return {
"document_id": document.id,
"title": document.title,
"blocknote_document": blocknote_json,
"last_edited_at": None,
}
@router.post("/search-spaces/{search_space_id}/documents/{document_id}/save")
async def save_document(
search_space_id: int,
document_id: int,
data: dict[str, Any],
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""
Save BlockNote document and trigger reindexing.
Called when user clicks 'Save & Exit'.
Requires DOCUMENTS_UPDATE permission.
"""
from app.tasks.celery_tasks.document_reindex_tasks import reindex_document_task
# Check RBAC permission
await check_permission(
session,
user,
search_space_id,
Permission.DOCUMENTS_UPDATE.value,
"You don't have permission to update documents in this search space",
)
result = await session.execute(
select(Document).filter(
Document.id == document_id,
Document.search_space_id == search_space_id,
)
)
document = result.scalars().first()
if not document:
raise HTTPException(status_code=404, detail="Document not found")
blocknote_document = data.get("blocknote_document")
if not blocknote_document:
raise HTTPException(status_code=400, detail="blocknote_document is required")
# Save BlockNote document
document.blocknote_document = blocknote_document
document.last_edited_at = datetime.now(UTC)
document.content_needs_reindexing = True
await session.commit()
# Queue reindex task
reindex_document_task.delay(document_id, str(user.id))
return {
"status": "saved",
"document_id": document_id,
"message": "Document saved and will be reindexed in the background",
"last_edited_at": document.last_edited_at.isoformat(),
}

View file

@ -0,0 +1,168 @@
"""Celery tasks for populating blocknote_document for existing documents."""
import logging
from sqlalchemy import select
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
from sqlalchemy.orm import selectinload
from sqlalchemy.pool import NullPool
from app.celery_app import celery_app
from app.config import config
from app.db import Document
from app.utils.blocknote_converter import convert_markdown_to_blocknote
logger = logging.getLogger(__name__)
def get_celery_session_maker():
"""
Create a new async session maker for Celery tasks.
This is necessary because Celery tasks run in a new event loop,
and the default session maker is bound to the main app's event loop.
"""
engine = create_async_engine(
config.DATABASE_URL,
poolclass=NullPool,
echo=False,
)
return async_sessionmaker(engine, expire_on_commit=False)
@celery_app.task(name="populate_blocknote_for_documents", bind=True)
def populate_blocknote_for_documents_task(
self, document_ids: list[int] | None = None, batch_size: int = 50
):
"""
Celery task to populate blocknote_document for existing documents.
Args:
document_ids: Optional list of specific document IDs to process.
If None, processes all documents with blocknote_document IS NULL.
batch_size: Number of documents to process in each batch (default: 50)
"""
import asyncio
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
loop.run_until_complete(
_populate_blocknote_for_documents(document_ids, batch_size)
)
finally:
loop.close()
async def _populate_blocknote_for_documents(
document_ids: list[int] | None = None, batch_size: int = 50
):
"""
Async function to populate blocknote_document for documents.
Args:
document_ids: Optional list of specific document IDs to process
batch_size: Number of documents to process per batch
"""
async with get_celery_session_maker()() as session:
try:
# Build query for documents that need blocknote_document populated
query = select(Document).where(Document.blocknote_document.is_(None))
# If specific document IDs provided, filter by them
if document_ids:
query = query.where(Document.id.in_(document_ids))
# Load chunks relationship to avoid N+1 queries
query = query.options(selectinload(Document.chunks))
# Execute query
result = await session.execute(query)
documents = result.scalars().all()
total_documents = len(documents)
logger.info(f"Found {total_documents} documents to process")
if total_documents == 0:
logger.info("No documents to process")
return
# Process documents in batches
processed = 0
failed = 0
for i in range(0, total_documents, batch_size):
batch = documents[i : i + batch_size]
logger.info(
f"Processing batch {i // batch_size + 1}: documents {i + 1}-{min(i + batch_size, total_documents)}"
)
for document in batch:
try:
# Use preloaded chunks from selectinload - no need to query again
chunks = sorted(document.chunks, key=lambda c: c.id)
if not chunks:
logger.warning(
f"Document {document.id} ({document.title}) has no chunks, skipping"
)
failed += 1
continue
# Reconstruct markdown by concatenating chunk contents
markdown_content = "\n\n".join(
chunk.content for chunk in chunks
)
if not markdown_content or not markdown_content.strip():
logger.warning(
f"Document {document.id} ({document.title}) has empty markdown content, skipping"
)
failed += 1
continue
# Convert markdown to BlockNote JSON
blocknote_json = await convert_markdown_to_blocknote(
markdown_content
)
if not blocknote_json:
logger.warning(
f"Failed to convert markdown to BlockNote for document {document.id} ({document.title})"
)
failed += 1
continue
# Update document with blocknote_document (other fields already have correct defaults)
document.blocknote_document = blocknote_json
processed += 1
# Commit every batch_size documents to avoid long transactions
if processed % batch_size == 0:
await session.commit()
logger.info(
f"Committed batch: {processed} documents processed so far"
)
except Exception as e:
logger.error(
f"Error processing document {document.id} ({document.title}): {e}",
exc_info=True,
)
failed += 1
# Continue with next document instead of failing entire batch
continue
# Commit remaining changes in the batch
await session.commit()
logger.info(f"Completed batch {i // batch_size + 1}")
logger.info(
f"Migration complete: {processed} documents processed, {failed} failed"
)
except Exception as e:
await session.rollback()
logger.error(f"Error in blocknote migration task: {e}", exc_info=True)
raise

View file

@ -0,0 +1,126 @@
"""Celery tasks for reindexing edited documents."""
import logging
from sqlalchemy import delete, select
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
from sqlalchemy.orm import selectinload
from sqlalchemy.pool import NullPool
from app.celery_app import celery_app
from app.config import config
from app.db import Document
from app.services.llm_service import get_user_long_context_llm
from app.utils.blocknote_converter import convert_blocknote_to_markdown
from app.utils.document_converters import (
create_document_chunks,
generate_document_summary,
)
logger = logging.getLogger(__name__)
def get_celery_session_maker():
"""Create async session maker for Celery tasks."""
engine = create_async_engine(
config.DATABASE_URL,
poolclass=NullPool,
echo=False,
)
return async_sessionmaker(engine, expire_on_commit=False)
@celery_app.task(name="reindex_document", bind=True)
def reindex_document_task(self, document_id: int, user_id: str):
"""
Celery task to reindex a document after editing.
Args:
document_id: ID of document to reindex
user_id: ID of user who edited the document
"""
import asyncio
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
loop.run_until_complete(_reindex_document(document_id, user_id))
finally:
loop.close()
async def _reindex_document(document_id: int, user_id: str):
"""Async function to reindex a document."""
async with get_celery_session_maker()() as session:
try:
# Get document
result = await session.execute(
select(Document)
.options(selectinload(Document.chunks)) # Eagerly load chunks
.where(Document.id == document_id)
)
document = result.scalars().first()
if not document:
logger.error(f"Document {document_id} not found")
return
if not document.blocknote_document:
logger.warning(f"Document {document_id} has no BlockNote content")
return
logger.info(f"Reindexing document {document_id} ({document.title})")
# 1. Convert BlockNote → Markdown
markdown_content = await convert_blocknote_to_markdown(
document.blocknote_document
)
if not markdown_content:
logger.error(f"Failed to convert document {document_id} to markdown")
return
# 2. Delete old chunks explicitly
from app.db import Chunk
await session.execute(delete(Chunk).where(Chunk.document_id == document_id))
await session.flush() # Ensure old chunks are deleted
# 3. Create new chunks
new_chunks = await create_document_chunks(markdown_content)
# 4. Add new chunks to session
for chunk in new_chunks:
chunk.document_id = document_id
session.add(chunk)
logger.info(f"Created {len(new_chunks)} chunks for document {document_id}")
# 5. Regenerate summary
user_llm = await get_user_long_context_llm(
session, user_id, document.search_space_id
)
document_metadata = {
"title": document.title,
"document_type": document.document_type.value,
}
summary_content, summary_embedding = await generate_document_summary(
markdown_content, user_llm, document_metadata
)
# 6. Update document
document.content = summary_content
document.embedding = summary_embedding
document.content_needs_reindexing = False
await session.commit()
logger.info(f"Successfully reindexed document {document_id}")
except Exception as e:
await session.rollback()
logger.error(f"Error reindexing document {document_id}: {e}", exc_info=True)
raise

View file

@ -145,6 +145,16 @@ async def add_extension_received_document(
# Process chunks
chunks = await create_document_chunks(content.pageContent)
from app.utils.blocknote_converter import convert_markdown_to_blocknote
# Convert markdown to BlockNote JSON
blocknote_json = await convert_markdown_to_blocknote(combined_document_string)
if not blocknote_json:
logging.warning(
f"Failed to convert extension document '{content.metadata.VisitedWebPageTitle}' "
f"to BlockNote JSON, document will not be editable"
)
# Update or create document
if existing_document:
# Update existing document
@ -154,6 +164,7 @@ async def add_extension_received_document(
existing_document.embedding = summary_embedding
existing_document.document_metadata = content.metadata.model_dump()
existing_document.chunks = chunks
existing_document.blocknote_document = blocknote_json
await session.commit()
await session.refresh(existing_document)
@ -170,6 +181,7 @@ async def add_extension_received_document(
chunks=chunks,
content_hash=content_hash,
unique_identifier_hash=unique_identifier_hash,
blocknote_document=blocknote_json,
)
session.add(document)

View file

@ -100,6 +100,15 @@ async def add_received_file_document_using_unstructured(
# Process chunks
chunks = await create_document_chunks(file_in_markdown)
from app.utils.blocknote_converter import convert_markdown_to_blocknote
# Convert markdown to BlockNote JSON
blocknote_json = await convert_markdown_to_blocknote(file_in_markdown)
if not blocknote_json:
logging.warning(
f"Failed to convert {file_name} to BlockNote JSON, document will not be editable"
)
# Update or create document
if existing_document:
# Update existing document
@ -112,6 +121,9 @@ async def add_received_file_document_using_unstructured(
"ETL_SERVICE": "UNSTRUCTURED",
}
existing_document.chunks = chunks
existing_document.blocknote_document = blocknote_json
existing_document.content_needs_reindexing = False
existing_document.last_edited_at = None
await session.commit()
await session.refresh(existing_document)
@ -131,6 +143,9 @@ async def add_received_file_document_using_unstructured(
chunks=chunks,
content_hash=content_hash,
unique_identifier_hash=unique_identifier_hash,
blocknote_document=blocknote_json,
content_needs_reindexing=False,
last_edited_at=None,
)
session.add(document)
@ -214,6 +229,15 @@ async def add_received_file_document_using_llamacloud(
# Process chunks
chunks = await create_document_chunks(file_in_markdown)
from app.utils.blocknote_converter import convert_markdown_to_blocknote
# Convert markdown to BlockNote JSON
blocknote_json = await convert_markdown_to_blocknote(file_in_markdown)
if not blocknote_json:
logging.warning(
f"Failed to convert {file_name} to BlockNote JSON, document will not be editable"
)
# Update or create document
if existing_document:
# Update existing document
@ -226,6 +250,9 @@ async def add_received_file_document_using_llamacloud(
"ETL_SERVICE": "LLAMACLOUD",
}
existing_document.chunks = chunks
existing_document.blocknote_document = blocknote_json
existing_document.content_needs_reindexing = False
existing_document.last_edited_at = None
await session.commit()
await session.refresh(existing_document)
@ -245,6 +272,9 @@ async def add_received_file_document_using_llamacloud(
chunks=chunks,
content_hash=content_hash,
unique_identifier_hash=unique_identifier_hash,
blocknote_document=blocknote_json,
content_needs_reindexing=False,
last_edited_at=None,
)
session.add(document)
@ -353,6 +383,15 @@ async def add_received_file_document_using_docling(
# Process chunks
chunks = await create_document_chunks(file_in_markdown)
from app.utils.blocknote_converter import convert_markdown_to_blocknote
# Convert markdown to BlockNote JSON
blocknote_json = await convert_markdown_to_blocknote(file_in_markdown)
if not blocknote_json:
logging.warning(
f"Failed to convert {file_name} to BlockNote JSON, document will not be editable"
)
# Update or create document
if existing_document:
# Update existing document
@ -365,6 +404,9 @@ async def add_received_file_document_using_docling(
"ETL_SERVICE": "DOCLING",
}
existing_document.chunks = chunks
existing_document.blocknote_document = blocknote_json
existing_document.content_needs_reindexing = False
existing_document.last_edited_at = None
await session.commit()
await session.refresh(existing_document)
@ -384,6 +426,9 @@ async def add_received_file_document_using_docling(
chunks=chunks,
content_hash=content_hash,
unique_identifier_hash=unique_identifier_hash,
blocknote_document=blocknote_json,
content_needs_reindexing=False,
last_edited_at=None,
)
session.add(document)

View file

@ -110,6 +110,15 @@ async def add_received_markdown_file_document(
# Process chunks
chunks = await create_document_chunks(file_in_markdown)
from app.utils.blocknote_converter import convert_markdown_to_blocknote
# Convert to BlockNote JSON
blocknote_json = await convert_markdown_to_blocknote(file_in_markdown)
if not blocknote_json:
logging.warning(
f"Failed to convert {file_name} to BlockNote JSON, document will not be editable"
)
# Update or create document
if existing_document:
# Update existing document
@ -121,6 +130,7 @@ async def add_received_markdown_file_document(
"FILE_NAME": file_name,
}
existing_document.chunks = chunks
existing_document.blocknote_document = blocknote_json
await session.commit()
await session.refresh(existing_document)
@ -139,6 +149,7 @@ async def add_received_markdown_file_document(
chunks=chunks,
content_hash=content_hash,
unique_identifier_hash=unique_identifier_hash,
blocknote_document=blocknote_json,
)
session.add(document)

View file

@ -291,6 +291,16 @@ async def add_youtube_video_document(
{"stage": "chunk_processing"},
)
from app.utils.blocknote_converter import convert_markdown_to_blocknote
# Convert transcript to BlockNote JSON
blocknote_json = await convert_markdown_to_blocknote(combined_document_string)
if not blocknote_json:
logging.warning(
f"Failed to convert YouTube video '{video_id}' to BlockNote JSON, "
"document will not be editable"
)
chunks = await create_document_chunks(combined_document_string)
# Update or create document
@ -314,6 +324,7 @@ async def add_youtube_video_document(
"thumbnail": video_data.get("thumbnail_url", ""),
}
existing_document.chunks = chunks
existing_document.blocknote_document = blocknote_json
await session.commit()
await session.refresh(existing_document)
@ -342,6 +353,7 @@ async def add_youtube_video_document(
search_space_id=search_space_id,
content_hash=content_hash,
unique_identifier_hash=unique_identifier_hash,
blocknote_document=blocknote_json,
)
session.add(document)

View file

@ -0,0 +1,123 @@
import logging
from typing import Any
import httpx
from app.config import config
logger = logging.getLogger(__name__)
async def convert_markdown_to_blocknote(markdown: str) -> dict[str, Any] | None:
"""
Convert markdown to BlockNote JSON via Next.js API.
Args:
markdown: Markdown string to convert
Returns:
BlockNote document as dict, or None if conversion fails
"""
if not markdown or not markdown.strip():
logger.warning("Empty markdown provided for conversion")
return None
if not markdown or len(markdown) < 10:
logger.warning("Markdown became too short after sanitization")
# Return a minimal BlockNote document
return [
{
"type": "paragraph",
"content": [
{
"type": "text",
"text": "Document content could not be converted for editing.",
"styles": {},
}
],
"children": [],
}
]
async with httpx.AsyncClient() as client:
try:
response = await client.post(
f"{config.NEXT_FRONTEND_URL}/api/convert-to-blocknote",
json={"markdown": markdown},
timeout=30.0,
)
response.raise_for_status()
data = response.json()
blocknote_document = data.get("blocknote_document")
if blocknote_document:
logger.info(
f"Successfully converted markdown to BlockNote (original: {len(markdown)} chars, sanitized: {len(markdown)} chars)"
)
return blocknote_document
else:
logger.warning("Next.js API returned empty blocknote_document")
return None
except httpx.TimeoutException:
logger.error("Timeout converting markdown to BlockNote after 30s")
return None
except httpx.HTTPStatusError as e:
logger.error(
f"HTTP error converting markdown to BlockNote: {e.response.status_code} - {e.response.text}"
)
# Log first 1000 chars of problematic markdown for debugging
logger.debug(f"Problematic markdown sample: {markdown[:1000]}")
return None
except Exception as e:
logger.error(f"Failed to convert markdown to BlockNote: {e}", exc_info=True)
return None
async def convert_blocknote_to_markdown(
blocknote_document: dict[str, Any] | list[dict[str, Any]],
) -> str | None:
"""
Convert BlockNote JSON to markdown via Next.js API.
Args:
blocknote_document: BlockNote document as dict or list of blocks
Returns:
Markdown string, or None if conversion fails
"""
if not blocknote_document:
logger.warning("Empty BlockNote document provided for conversion")
return None
async with httpx.AsyncClient() as client:
try:
response = await client.post(
f"{config.NEXT_FRONTEND_URL}/api/convert-to-markdown",
json={"blocknote_document": blocknote_document},
timeout=30.0,
)
response.raise_for_status()
data = response.json()
markdown = data.get("markdown")
if markdown:
logger.info(
f"Successfully converted BlockNote to markdown ({len(markdown)} chars)"
)
return markdown
else:
logger.warning("Next.js API returned empty markdown")
return None
except httpx.TimeoutException:
logger.error("Timeout converting BlockNote to markdown after 30s")
return None
except httpx.HTTPStatusError as e:
logger.error(
f"HTTP error converting BlockNote to markdown: {e.response.status_code} - {e.response.text}"
)
return None
except Exception as e:
logger.error(f"Failed to convert BlockNote to markdown: {e}", exc_info=True)
return None

View file

@ -0,0 +1,40 @@
import { ServerBlockNoteEditor } from "@blocknote/server-util";
import { type NextRequest, NextResponse } from "next/server";
export async function POST(request: NextRequest) {
try {
const { markdown } = await request.json();
if (!markdown || typeof markdown !== "string") {
return NextResponse.json({ error: "Markdown string is required" }, { status: 400 });
}
// Log raw markdown input before conversion
// console.log(`\n${"=".repeat(80)}`);
// console.log("RAW MARKDOWN INPUT (BEFORE CONVERSION):");
// console.log("=".repeat(80));
// console.log(markdown);
// console.log(`${"=".repeat(80)}\n`);
// Create server-side editor instance
const editor = ServerBlockNoteEditor.create();
// Convert markdown directly to BlockNote blocks
const blocks = await editor.tryParseMarkdownToBlocks(markdown);
if (!blocks || blocks.length === 0) {
throw new Error("Markdown parsing returned no blocks");
}
return NextResponse.json({ blocknote_document: blocks });
} catch (error: any) {
console.error("Failed to convert markdown to BlockNote:", error);
return NextResponse.json(
{
error: "Failed to convert markdown to BlockNote blocks",
details: error.message,
},
{ status: 500 }
);
}
}

View file

@ -0,0 +1,28 @@
import { ServerBlockNoteEditor } from "@blocknote/server-util";
import { type NextRequest, NextResponse } from "next/server";
export async function POST(request: NextRequest) {
try {
const { blocknote_document } = await request.json();
if (!blocknote_document || !Array.isArray(blocknote_document)) {
return NextResponse.json({ error: "BlockNote document array is required" }, { status: 400 });
}
// Create server-side editor instance
const editor = ServerBlockNoteEditor.create();
// Convert BlockNote blocks to markdown
const markdown = await editor.blocksToMarkdownLossy(blocknote_document);
return NextResponse.json({
markdown,
});
} catch (error) {
console.error("Failed to convert BlockNote to markdown:", error);
return NextResponse.json(
{ error: "Failed to convert BlockNote blocks to markdown" },
{ status: 500 }
);
}
}

View file

@ -309,6 +309,7 @@ export function DocumentsTableShell({
refreshDocuments={async () => {
await onRefresh();
}}
searchSpaceId={searchSpaceId as string}
/>
</TableCell>
</motion.tr>
@ -340,6 +341,7 @@ export function DocumentsTableShell({
refreshDocuments={async () => {
await onRefresh();
}}
searchSpaceId={searchSpaceId as string}
/>
</div>
<div className="mt-1 flex flex-wrap items-center gap-2">

View file

@ -1,6 +1,8 @@
"use client";
import { MoreHorizontal } from "lucide-react";
import { FileText, Pencil, Trash2 } from "lucide-react";
import { motion } from "motion/react";
import { useRouter } from "next/navigation";
import { useState } from "react";
import { toast } from "sonner";
import { JsonMetadataViewer } from "@/components/json-metadata-viewer";
@ -12,29 +14,26 @@ import {
AlertDialogFooter,
AlertDialogHeader,
AlertDialogTitle,
AlertDialogTrigger,
} from "@/components/ui/alert-dialog";
import { Button } from "@/components/ui/button";
import {
DropdownMenu,
DropdownMenuContent,
DropdownMenuItem,
DropdownMenuSeparator,
DropdownMenuTrigger,
} from "@/components/ui/dropdown-menu";
import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip";
import type { Document } from "./types";
export function RowActions({
document,
deleteDocument,
refreshDocuments,
searchSpaceId,
}: {
document: Document;
deleteDocument: (id: number) => Promise<boolean>;
refreshDocuments: () => Promise<void>;
searchSpaceId: string;
}) {
const [isOpen, setIsOpen] = useState(false);
const [isDeleteOpen, setIsDeleteOpen] = useState(false);
const [isMetadataOpen, setIsMetadataOpen] = useState(false);
const [isDeleting, setIsDeleting] = useState(false);
const router = useRouter();
const handleDelete = async () => {
setIsDeleting(true);
@ -48,62 +47,114 @@ export function RowActions({
toast.error("Failed to delete document");
} finally {
setIsDeleting(false);
setIsOpen(false);
setIsDeleteOpen(false);
}
};
const handleEdit = () => {
router.push(`/dashboard/${searchSpaceId}/editor/${document.id}`);
};
return (
<div className="flex justify-end">
<DropdownMenu>
<DropdownMenuTrigger asChild>
<Button variant="ghost" className="h-8 w-8 p-0">
<span className="sr-only">Open menu</span>
<MoreHorizontal className="h-4 w-4" />
</Button>
</DropdownMenuTrigger>
<DropdownMenuContent align="end">
<JsonMetadataViewer
title={document.title}
metadata={document.document_metadata}
trigger={
<DropdownMenuItem onSelect={(e) => e.preventDefault()}>
View Metadata
</DropdownMenuItem>
}
/>
<DropdownMenuSeparator />
<AlertDialog open={isOpen} onOpenChange={setIsOpen}>
<AlertDialogTrigger asChild>
<DropdownMenuItem
className="text-destructive focus:text-destructive"
onSelect={(e) => {
e.preventDefault();
setIsOpen(true);
}}
>
Delete
</DropdownMenuItem>
</AlertDialogTrigger>
<AlertDialogContent>
<AlertDialogHeader>
<AlertDialogTitle>Are you sure?</AlertDialogTitle>
</AlertDialogHeader>
<AlertDialogFooter>
<AlertDialogCancel>Cancel</AlertDialogCancel>
<AlertDialogAction
onClick={(e) => {
e.preventDefault();
handleDelete();
}}
disabled={isDeleting}
>
{isDeleting ? "Deleting..." : "Delete"}
</AlertDialogAction>
</AlertDialogFooter>
</AlertDialogContent>
</AlertDialog>
</DropdownMenuContent>
</DropdownMenu>
<div className="flex items-center justify-end gap-1">
{/* Edit Button */}
<Tooltip>
<TooltipTrigger asChild>
<motion.div
whileHover={{ scale: 1.1 }}
whileTap={{ scale: 0.95 }}
transition={{ type: "spring", stiffness: 400, damping: 17 }}
>
<Button
variant="ghost"
size="icon"
className="h-8 w-8 text-muted-foreground hover:text-foreground hover:bg-muted/80"
onClick={handleEdit}
>
<Pencil className="h-4 w-4" />
<span className="sr-only">Edit Document</span>
</Button>
</motion.div>
</TooltipTrigger>
<TooltipContent side="top">
<p>Edit Document</p>
</TooltipContent>
</Tooltip>
{/* View Metadata Button */}
<Tooltip>
<TooltipTrigger asChild>
<motion.div
whileHover={{ scale: 1.1 }}
whileTap={{ scale: 0.95 }}
transition={{ type: "spring", stiffness: 400, damping: 17 }}
>
<Button
variant="ghost"
size="icon"
className="h-8 w-8 text-muted-foreground hover:text-foreground hover:bg-muted/80"
onClick={() => setIsMetadataOpen(true)}
>
<FileText className="h-4 w-4" />
<span className="sr-only">View Metadata</span>
</Button>
</motion.div>
</TooltipTrigger>
<TooltipContent side="top">
<p>View Metadata</p>
</TooltipContent>
</Tooltip>
<JsonMetadataViewer
title={document.title}
metadata={document.document_metadata}
open={isMetadataOpen}
onOpenChange={setIsMetadataOpen}
/>
{/* Delete Button */}
<Tooltip>
<TooltipTrigger asChild>
<motion.div
whileHover={{ scale: 1.1 }}
whileTap={{ scale: 0.95 }}
transition={{ type: "spring", stiffness: 400, damping: 17 }}
>
<Button
variant="ghost"
size="icon"
className="h-8 w-8 text-muted-foreground hover:text-destructive hover:bg-destructive/10"
onClick={() => setIsDeleteOpen(true)}
disabled={isDeleting}
>
<Trash2 className="h-4 w-4" />
<span className="sr-only">Delete</span>
</Button>
</motion.div>
</TooltipTrigger>
<TooltipContent side="top">
<p>Delete</p>
</TooltipContent>
</Tooltip>
<AlertDialog open={isDeleteOpen} onOpenChange={setIsDeleteOpen}>
<AlertDialogContent>
<AlertDialogHeader>
<AlertDialogTitle>Are you sure?</AlertDialogTitle>
</AlertDialogHeader>
<AlertDialogFooter>
<AlertDialogCancel>Cancel</AlertDialogCancel>
<AlertDialogAction
onClick={(e) => {
e.preventDefault();
handleDelete();
}}
disabled={isDeleting}
className="bg-destructive text-destructive-foreground hover:bg-destructive/90"
>
{isDeleting ? "Deleting..." : "Delete"}
</AlertDialogAction>
</AlertDialogFooter>
</AlertDialogContent>
</AlertDialog>
</div>
);
}

View file

@ -0,0 +1,263 @@
"use client";
import { AlertCircle, FileText, Loader2, Save, X } from "lucide-react";
import { motion } from "motion/react";
import { useParams, useRouter } from "next/navigation";
import { useEffect, useState } from "react";
import { toast } from "sonner";
import { BlockNoteEditor } from "@/components/DynamicBlockNoteEditor";
import { Button } from "@/components/ui/button";
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card";
import { Separator } from "@/components/ui/separator";
interface EditorContent {
document_id: number;
title: string;
blocknote_document: any;
last_edited_at: string | null;
}
export default function EditorPage() {
const params = useParams();
const router = useRouter();
const documentId = params.documentId as string;
const [document, setDocument] = useState<EditorContent | null>(null);
const [loading, setLoading] = useState(true);
const [saving, setSaving] = useState(false);
const [editorContent, setEditorContent] = useState<any>(null);
const [error, setError] = useState<string | null>(null);
const [hasUnsavedChanges, setHasUnsavedChanges] = useState(false);
// Get auth token
const token =
typeof window !== "undefined" ? localStorage.getItem("surfsense_bearer_token") : null;
// Fetch document content - DIRECT CALL TO FASTAPI
useEffect(() => {
async function fetchDocument() {
if (!token) {
console.error("No auth token found");
setError("Please login to access the editor");
setLoading(false);
return;
}
try {
const response = await fetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-spaces/${params.search_space_id}/documents/${documentId}/editor-content`,
{
headers: {
Authorization: `Bearer ${token}`,
},
}
);
if (!response.ok) {
const errorData = await response
.json()
.catch(() => ({ detail: "Failed to fetch document" }));
throw new Error(errorData.detail || "Failed to fetch document");
}
const data = await response.json();
// Check if blocknote_document exists
if (!data.blocknote_document) {
setError(
"This document does not have BlockNote content. Please re-upload the document to enable editing."
);
setLoading(false);
return;
}
setDocument(data);
setEditorContent(data.blocknote_document);
setError(null);
} catch (error) {
console.error("Error fetching document:", error);
setError(
error instanceof Error ? error.message : "Failed to fetch document. Please try again."
);
} finally {
setLoading(false);
}
}
if (documentId && token) {
fetchDocument();
}
}, [documentId, token]);
// Track changes to mark as unsaved
useEffect(() => {
if (editorContent && document) {
setHasUnsavedChanges(true);
}
}, [editorContent, document]);
// TODO: Maybe add Auto-save every 30 seconds - DIRECT CALL TO FASTAPI
// Save and exit - DIRECT CALL TO FASTAPI
const handleSave = async () => {
if (!token) {
toast.error("Please login to save");
return;
}
if (!editorContent) {
toast.error("No content to save");
return;
}
setSaving(true);
try {
// Save blocknote_document and trigger reindexing in background
const response = await fetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-spaces/${params.search_space_id}/documents/${documentId}/save`,
{
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${token}`,
},
body: JSON.stringify({ blocknote_document: editorContent }),
}
);
if (!response.ok) {
const errorData = await response
.json()
.catch(() => ({ detail: "Failed to save document" }));
throw new Error(errorData.detail || "Failed to save document");
}
setHasUnsavedChanges(false);
toast.success("Document saved! Reindexing in background...");
// Small delay before redirect to show success message
setTimeout(() => {
router.push(`/dashboard/${params.search_space_id}/documents`);
}, 500);
} catch (error) {
console.error("Error saving document:", error);
toast.error(
error instanceof Error ? error.message : "Failed to save document. Please try again."
);
} finally {
setSaving(false);
}
};
const handleCancel = () => {
if (hasUnsavedChanges) {
if (confirm("You have unsaved changes. Are you sure you want to leave?")) {
router.back();
}
} else {
router.back();
}
};
if (loading) {
return (
<div className="flex items-center justify-center min-h-[400px] p-6">
<Card className="w-full max-w-md">
<CardContent className="flex flex-col items-center justify-center py-12">
<Loader2 className="h-12 w-12 text-primary animate-spin mb-4" />
<p className="text-muted-foreground">Loading editor...</p>
</CardContent>
</Card>
</div>
);
}
if (error) {
return (
<div className="flex items-center justify-center min-h-[400px] p-6">
<motion.div
initial={{ opacity: 0, y: 20 }}
animate={{ opacity: 1, y: 0 }}
className="w-full max-w-md"
>
<Card className="border-destructive/50">
<CardHeader>
<div className="flex items-center gap-2">
<AlertCircle className="h-5 w-5 text-destructive" />
<CardTitle className="text-destructive">Error</CardTitle>
</div>
<CardDescription>{error}</CardDescription>
</CardHeader>
<CardContent>
<Button onClick={() => router.back()} variant="outline" className="w-full">
<X className="mr-2 h-4 w-4" />
Go Back
</Button>
</CardContent>
</Card>
</motion.div>
</div>
);
}
if (!document) {
return (
<div className="flex items-center justify-center min-h-[400px] p-6">
<Card className="w-full max-w-md">
<CardContent className="flex flex-col items-center justify-center py-12">
<FileText className="h-12 w-12 text-muted-foreground mb-4" />
<p className="text-muted-foreground">Document not found</p>
</CardContent>
</Card>
</div>
);
}
return (
<motion.div
initial={{ opacity: 0 }}
animate={{ opacity: 1 }}
className="flex flex-col h-full w-full"
>
{/* Toolbar */}
<div className="sticky top-0 z-40 flex h-16 shrink-0 items-center gap-4 border-b bg-background/95 backdrop-blur supports-backdrop-filter:bg-background/60 px-6">
<div className="flex items-center gap-3 flex-1 min-w-0">
<FileText className="h-5 w-5 text-muted-foreground shrink-0" />
<div className="flex flex-col min-w-0">
<h1 className="text-lg font-semibold truncate">{document.title}</h1>
{hasUnsavedChanges && <p className="text-xs text-muted-foreground">Unsaved changes</p>}
</div>
</div>
<Separator orientation="vertical" className="h-6" />
<div className="flex items-center gap-2">
<Button variant="outline" onClick={handleCancel} disabled={saving} className="gap-2">
<X className="h-4 w-4" />
Cancel
</Button>
<Button onClick={handleSave} disabled={saving} className="gap-2">
{saving ? (
<>
<Loader2 className="h-4 w-4 animate-spin" />
Saving...
</>
) : (
<>
<Save className="h-4 w-4" />
Save & Exit
</>
)}
</Button>
</div>
</div>
{/* Editor Container */}
<div className="flex-1 overflow-hidden relative">
<div className="h-full w-full overflow-auto p-6">
<div className="max-w-4xl mx-auto">
<BlockNoteEditor initialContent={editorContent} onChange={setEditorContent} />
</div>
</div>
</div>
</motion.div>
);
}

View file

@ -27,7 +27,7 @@
--accent: oklch(0.97 0 0);
--accent-foreground: oklch(0.205 0 0);
--destructive: oklch(0.577 0.245 27.325);
--destructive-foreground: oklch(0.577 0.245 27.325);
--destructive-foreground: oklch(0.985 0 0);
--border: oklch(0.922 0 0);
--input: oklch(0.922 0 0);
--ring: oklch(0.708 0 0);
@ -63,8 +63,8 @@
--muted-foreground: oklch(0.708 0 0);
--accent: oklch(0.269 0 0);
--accent-foreground: oklch(0.985 0 0);
--destructive: oklch(0.396 0.141 25.723);
--destructive-foreground: oklch(0.637 0.237 25.331);
--destructive: oklch(0.577 0.245 27.325);
--destructive-foreground: oklch(0.985 0 0);
--border: oklch(0.269 0 0);
--input: oklch(0.269 0 0);
--ring: oklch(0.439 0 0);

View file

@ -0,0 +1,68 @@
"use client";
import { useTheme } from "next-themes";
import { useEffect, useMemo, useRef } from "react";
import "@blocknote/core/fonts/inter.css";
import "@blocknote/mantine/style.css";
import { BlockNoteView } from "@blocknote/mantine";
import { useCreateBlockNote } from "@blocknote/react";
interface BlockNoteEditorProps {
initialContent?: any;
onChange?: (content: any) => void;
}
export default function BlockNoteEditor({ initialContent, onChange }: BlockNoteEditorProps) {
const { resolvedTheme } = useTheme();
// Track the initial content to prevent re-initialization
const initialContentRef = useRef<any>(null);
const isInitializedRef = useRef(false);
// Creates a new editor instance - only use initialContent on first render
const editor = useCreateBlockNote({
initialContent: initialContentRef.current === null ? initialContent || undefined : undefined,
});
// Store initial content on first render only
useEffect(() => {
if (initialContent && initialContentRef.current === null) {
initialContentRef.current = initialContent;
isInitializedRef.current = true;
}
}, [initialContent]);
// Call onChange when document changes (but don't update from props)
useEffect(() => {
if (!onChange || !editor || !isInitializedRef.current) return;
const handleChange = () => {
onChange(editor.document);
};
// Subscribe to document changes
const unsubscribe = editor.onChange(handleChange);
return () => {
unsubscribe();
};
}, [editor, onChange]);
// Determine theme for BlockNote with custom dark mode background
const blockNoteTheme = useMemo(() => {
if (resolvedTheme === "dark") {
// Custom dark theme - only override editor background, let BlockNote handle the rest
return {
colors: {
editor: {
background: "#0A0A0A", // Custom dark background
},
},
};
}
return "light" as const;
}, [resolvedTheme]);
// Renders the editor instance
return <BlockNoteView editor={editor} theme={blockNoteTheme} />;
}

View file

@ -0,0 +1,6 @@
"use client";
import dynamic from "next/dynamic";
// Dynamically import BlockNote editor with SSR disabled
export const BlockNoteEditor = dynamic(() => import("./BlockNoteEditor"), { ssr: false });

View file

@ -3,7 +3,7 @@
import { useAtomValue } from "jotai";
import { usePathname } from "next/navigation";
import { useTranslations } from "next-intl";
import React, { useEffect } from "react";
import React, { useEffect, useState } from "react";
import { activeChatAtom } from "@/atoms/chats/chat-query.atoms";
import {
Breadcrumb,
@ -34,6 +34,41 @@ export function DashboardBreadcrumb() {
autoFetch: !!searchSpaceId,
});
// State to store document title for editor breadcrumb
const [documentTitle, setDocumentTitle] = useState<string | null>(null);
// Fetch document title when on editor page
useEffect(() => {
if (segments[2] === "editor" && segments[3] && searchSpaceId) {
const documentId = segments[3];
const token =
typeof window !== "undefined" ? localStorage.getItem("surfsense_bearer_token") : null;
if (token) {
fetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/documents/${documentId}/editor-content`,
{
headers: {
Authorization: `Bearer ${token}`,
},
}
)
.then((res) => res.json())
.then((data) => {
if (data.title) {
setDocumentTitle(data.title);
}
})
.catch(() => {
// If fetch fails, just use the document ID
setDocumentTitle(null);
});
}
} else {
setDocumentTitle(null);
}
}, [segments, searchSpaceId]);
// Parse the pathname to create breadcrumb items
const generateBreadcrumbs = (path: string): BreadcrumbItemInterface[] => {
const segments = path.split("/").filter(Boolean);
@ -66,6 +101,7 @@ export function DashboardBreadcrumb() {
logs: t("logs"),
chats: t("chats"),
settings: t("settings"),
editor: t("editor"),
};
sectionLabel = sectionLabels[section] || sectionLabel;
@ -73,7 +109,21 @@ export function DashboardBreadcrumb() {
// Handle sub-sections
if (segments[3]) {
const subSection = segments[3];
let subSectionLabel = subSection.charAt(0).toUpperCase() + subSection.slice(1);
// Handle editor sub-sections (document ID)
if (section === "editor") {
const documentLabel = documentTitle || subSection;
breadcrumbs.push({
label: t("documents"),
href: `/dashboard/${segments[1]}/documents`,
});
breadcrumbs.push({
label: sectionLabel,
href: `/dashboard/${segments[1]}/documents`,
});
breadcrumbs.push({ label: documentLabel });
return breadcrumbs;
}
// Handle sources sub-sections
if (section === "sources") {
@ -81,7 +131,7 @@ export function DashboardBreadcrumb() {
add: "Add Sources",
};
const sourceLabel = sourceLabels[subSection] || subSectionLabel;
const sourceLabel = sourceLabels[subSection] || subSection;
breadcrumbs.push({
label: "Sources",
href: `/dashboard/${segments[1]}/sources`,
@ -98,7 +148,7 @@ export function DashboardBreadcrumb() {
webpage: t("add_webpages"),
};
const documentLabel = documentLabels[subSection] || subSectionLabel;
const documentLabel = documentLabels[subSection] || subSection;
breadcrumbs.push({
label: t("documents"),
href: `/dashboard/${segments[1]}/documents`,
@ -159,7 +209,7 @@ export function DashboardBreadcrumb() {
manage: t("manage_connectors"),
};
const connectorLabel = connectorLabels[subSection] || subSectionLabel;
const connectorLabel = connectorLabels[subSection] || subSection;
breadcrumbs.push({
label: t("connectors"),
href: `/dashboard/${segments[1]}/connectors`,
@ -169,6 +219,7 @@ export function DashboardBreadcrumb() {
}
// Handle other sub-sections
let subSectionLabel = subSection.charAt(0).toUpperCase() + subSection.slice(1);
const subSectionLabels: Record<string, string> = {
upload: t("upload_documents"),
youtube: t("add_youtube"),

View file

@ -15,9 +15,17 @@ interface JsonMetadataViewerProps {
title: string;
metadata: any;
trigger?: React.ReactNode;
open?: boolean;
onOpenChange?: (open: boolean) => void;
}
export function JsonMetadataViewer({ title, metadata, trigger }: JsonMetadataViewerProps) {
export function JsonMetadataViewer({
title,
metadata,
trigger,
open,
onOpenChange,
}: JsonMetadataViewerProps) {
// Ensure metadata is a valid object
const jsonData = React.useMemo(() => {
if (!metadata) return {};
@ -35,6 +43,23 @@ export function JsonMetadataViewer({ title, metadata, trigger }: JsonMetadataVie
}
}, [metadata]);
// Controlled mode: when open and onOpenChange are provided
if (open !== undefined && onOpenChange !== undefined) {
return (
<Dialog open={open} onOpenChange={onOpenChange}>
<DialogContent className="max-w-4xl max-h-[80vh] overflow-y-auto">
<DialogHeader>
<DialogTitle>{title} - Metadata</DialogTitle>
</DialogHeader>
<div className="mt-4 p-4 bg-muted/30 rounded-md">
<JsonView data={jsonData} style={defaultStyles} />
</div>
</DialogContent>
</Dialog>
);
}
// Uncontrolled mode: when using trigger
return (
<Dialog>
<DialogTrigger asChild>

View file

@ -621,6 +621,7 @@
"documents": "Documents",
"connectors": "Connectors",
"podcasts": "Podcasts",
"editor": "Editor",
"logs": "Logs",
"chats": "Chats",
"settings": "Settings",

View file

@ -621,6 +621,7 @@
"documents": "文档",
"connectors": "连接器",
"podcasts": "播客",
"editor": "编辑器",
"logs": "日志",
"chats": "聊天",
"settings": "设置",

View file

@ -7,6 +7,8 @@ const withNextIntl = createNextIntlPlugin("./i18n/request.ts");
const nextConfig: NextConfig = {
output: "standalone",
// Disable StrictMode for BlockNote compatibility with React 19/Next 15
reactStrictMode: false,
typescript: {
ignoreBuildErrors: true,
},
@ -21,6 +23,17 @@ const nextConfig: NextConfig = {
},
],
},
// Mark BlockNote server packages as external
serverExternalPackages: ["@blocknote/server-util"],
// Configure webpack to handle blocknote packages
webpack: (config, { isServer }) => {
if (isServer) {
// Don't bundle these packages on the server
config.externals = [...(config.externals || []), "@blocknote/server-util"];
}
return config;
},
};
// Wrap the config with MDX and next-intl plugins

View file

@ -22,6 +22,10 @@
},
"dependencies": {
"@ai-sdk/react": "^1.2.12",
"@blocknote/core": "^0.42.3",
"@blocknote/mantine": "^0.42.3",
"@blocknote/react": "^0.42.3",
"@blocknote/server-util": "^0.42.3",
"@hookform/resolvers": "^4.1.3",
"@llamaindex/chat-ui": "^0.5.17",
"@next/third-parties": "^15.5.6",

File diff suppressed because it is too large Load diff