diff --git a/docker/.env.example b/docker/.env.example index e67887840..7025cac52 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -5,7 +5,7 @@ # ============================================================================== # SurfSense version (use "latest", a clean version like "0.0.14", or a specific build like "0.0.14.1") -SURFSENSE_VERSION=0.0.13.9 +SURFSENSE_VERSION=latest # ------------------------------------------------------------------------------ # Core Settings diff --git a/docker/scripts/install.ps1 b/docker/scripts/install.ps1 index d9719d4ab..fc9c75a28 100644 --- a/docker/scripts/install.ps1 +++ b/docker/scripts/install.ps1 @@ -24,7 +24,7 @@ $ErrorActionPreference = 'Stop' # ── Configuration ─────────────────────────────────────────────────────────── -$RepoRaw = "https://raw.githubusercontent.com/MODSetter/SurfSense/dev" +$RepoRaw = "https://raw.githubusercontent.com/MODSetter/SurfSense/main" $InstallDir = ".\surfsense" $OldVolume = "surfsense-data" $DumpFile = ".\surfsense_migration_backup.sql" @@ -208,11 +208,12 @@ if ($MigrationMode) { if (-not (Test-Path $DumpFile)) { Write-Err "Dump file '$DumpFile' not found. The migration script may have failed." } + $DumpFilePath = (Resolve-Path $DumpFile).Path Write-Info "Restoring dump into PostgreSQL 17 - this may take a while for large databases..." $restoreErrFile = Join-Path $env:TEMP "surfsense_restore_err.log" Push-Location $InstallDir - Invoke-NativeSafe { Get-Content $DumpFile | docker compose exec -T -e "PGPASSWORD=$DbPass" db psql -U $DbUser -d $DbName 2>$restoreErrFile | Out-Null } | Out-Null + Invoke-NativeSafe { Get-Content -LiteralPath $DumpFilePath | docker compose exec -T -e "PGPASSWORD=$DbPass" db psql -U $DbUser -d $DbName 2>$restoreErrFile | Out-Null } | Out-Null Pop-Location $fatalErrors = @() @@ -246,7 +247,7 @@ if ($MigrationMode) { Write-Step "Starting all SurfSense services" Push-Location $InstallDir - Invoke-NativeSafe { docker compose up -d } | Out-Null + Invoke-NativeSafe { docker compose up -d } Pop-Location Write-Ok "All services started." @@ -255,7 +256,7 @@ if ($MigrationMode) { } else { Write-Step "Starting SurfSense" Push-Location $InstallDir - Invoke-NativeSafe { docker compose up -d } | Out-Null + Invoke-NativeSafe { docker compose up -d } Pop-Location Write-Ok "All services started." } @@ -316,7 +317,7 @@ Y88b d88P Y88b 888 888 888 Y88b d88P Y8b. 888 888 X88 Y8b. $versionDisplay = (Get-Content $envPath | Select-String '^SURFSENSE_VERSION=' | ForEach-Object { ($_ -split '=',2)[1].Trim('"') }) | Select-Object -First 1 if (-not $versionDisplay) { $versionDisplay = "latest" } -Write-Host " Your personal AI-powered search engine [$versionDisplay]" -ForegroundColor Yellow +Write-Host " OSS Alternative to NotebookLM for Teams [$versionDisplay]" -ForegroundColor Yellow Write-Host ("=" * 62) -ForegroundColor Cyan Write-Host "" diff --git a/docker/scripts/install.sh b/docker/scripts/install.sh index f7729be00..c4a0d5c9f 100644 --- a/docker/scripts/install.sh +++ b/docker/scripts/install.sh @@ -25,7 +25,7 @@ set -euo pipefail main() { -REPO_RAW="https://raw.githubusercontent.com/MODSetter/SurfSense/dev" +REPO_RAW="https://raw.githubusercontent.com/MODSetter/SurfSense/main" INSTALL_DIR="./surfsense" OLD_VOLUME="surfsense-data" DUMP_FILE="./surfsense_migration_backup.sql" @@ -301,7 +301,7 @@ Y88b d88P Y88b 888 888 888 Y88b d88P Y8b. 888 888 X88 Y8b. EOF _version_display=$(grep '^SURFSENSE_VERSION=' "${INSTALL_DIR}/.env" 2>/dev/null | cut -d= -f2 | tr -d '"' | head -1 || true) _version_display="${_version_display:-latest}" -printf " Your personal AI-powered search engine ${YELLOW}[%s]${NC}\n" "${_version_display}" +printf " OSS Alternative to NotebookLM for Teams ${YELLOW}[%s]${NC}\n" "${_version_display}" printf "${CYAN}══════════════════════════════════════════════════════════════${NC}\n\n" info " Frontend: http://localhost:3000" diff --git a/surfsense_backend/app/agents/podcaster/nodes.py b/surfsense_backend/app/agents/podcaster/nodes.py index 3f908737a..4bdfdfc48 100644 --- a/surfsense_backend/app/agents/podcaster/nodes.py +++ b/surfsense_backend/app/agents/podcaster/nodes.py @@ -12,7 +12,7 @@ from litellm import aspeech from app.config import config as app_config from app.services.kokoro_tts_service import get_kokoro_tts_service -from app.services.llm_service import get_document_summary_llm +from app.services.llm_service import get_agent_llm from .configuration import Configuration from .prompts import get_podcast_generation_prompt @@ -31,7 +31,7 @@ async def create_podcast_transcript( user_prompt = configuration.user_prompt # Get search space's document summary LLM - llm = await get_document_summary_llm(state.db_session, search_space_id) + llm = await get_agent_llm(state.db_session, search_space_id) if not llm: error_message = ( f"No document summary LLM configured for search space {search_space_id}" diff --git a/surfsense_backend/app/indexing_pipeline/adapters/file_upload_adapter.py b/surfsense_backend/app/indexing_pipeline/adapters/file_upload_adapter.py index ab1095ee3..0bbb67105 100644 --- a/surfsense_backend/app/indexing_pipeline/adapters/file_upload_adapter.py +++ b/surfsense_backend/app/indexing_pipeline/adapters/file_upload_adapter.py @@ -1,47 +1,83 @@ from sqlalchemy.ext.asyncio import AsyncSession -from app.db import DocumentStatus, DocumentType +from app.db import Document, DocumentStatus, DocumentType from app.indexing_pipeline.connector_document import ConnectorDocument +from app.indexing_pipeline.document_hashing import compute_content_hash from app.indexing_pipeline.indexing_pipeline_service import IndexingPipelineService -async def index_uploaded_file( - markdown_content: str, - filename: str, - etl_service: str, - search_space_id: int, - user_id: str, - session: AsyncSession, - llm, - should_summarize: bool = False, -) -> None: - connector_doc = ConnectorDocument( - title=filename, - source_markdown=markdown_content, - unique_id=filename, - document_type=DocumentType.FILE, - search_space_id=search_space_id, - created_by_id=user_id, - connector_id=None, - should_summarize=should_summarize, - should_use_code_chunker=False, - fallback_summary=markdown_content[:4000], - metadata={ - "FILE_NAME": filename, - "ETL_SERVICE": etl_service, - }, - ) +class UploadDocumentAdapter: + def __init__(self, session: AsyncSession) -> None: + self._session = session + self._service = IndexingPipelineService(session) - service = IndexingPipelineService(session) - documents = await service.prepare_for_indexing([connector_doc]) + async def index( + self, + markdown_content: str, + filename: str, + etl_service: str, + search_space_id: int, + user_id: str, + llm, + should_summarize: bool = False, + ) -> None: + connector_doc = ConnectorDocument( + title=filename, + source_markdown=markdown_content, + unique_id=filename, + document_type=DocumentType.FILE, + search_space_id=search_space_id, + created_by_id=user_id, + connector_id=None, + should_summarize=should_summarize, + should_use_code_chunker=False, + fallback_summary=markdown_content[:4000], + metadata={ + "FILE_NAME": filename, + "ETL_SERVICE": etl_service, + }, + ) - if not documents: - raise RuntimeError("prepare_for_indexing returned no documents") + documents = await self._service.prepare_for_indexing([connector_doc]) - indexed = await service.index(documents[0], connector_doc, llm) + if not documents: + raise RuntimeError("prepare_for_indexing returned no documents") - if not DocumentStatus.is_state(indexed.status, DocumentStatus.READY): - raise RuntimeError(indexed.status.get("reason", "Indexing failed")) + indexed = await self._service.index(documents[0], connector_doc, llm) - indexed.content_needs_reindexing = False - await session.commit() + if not DocumentStatus.is_state(indexed.status, DocumentStatus.READY): + raise RuntimeError(indexed.status.get("reason", "Indexing failed")) + + indexed.content_needs_reindexing = False + await self._session.commit() + + async def reindex(self, document: Document, llm) -> None: + """Re-index an existing document after its source_markdown has been updated.""" + if not document.source_markdown: + raise RuntimeError("Document has no source_markdown to reindex") + + metadata = document.document_metadata or {} + + connector_doc = ConnectorDocument( + title=document.title, + source_markdown=document.source_markdown, + unique_id=document.title, + document_type=document.document_type, + search_space_id=document.search_space_id, + created_by_id=str(document.created_by_id), + connector_id=document.connector_id, + should_summarize=True, + should_use_code_chunker=False, + fallback_summary=document.source_markdown[:4000], + metadata=metadata, + ) + + document.content_hash = compute_content_hash(connector_doc) + + indexed = await self._service.index(document, connector_doc, llm) + + if not DocumentStatus.is_state(indexed.status, DocumentStatus.READY): + raise RuntimeError(indexed.status.get("reason", "Reindexing failed")) + + indexed.content_needs_reindexing = False + await self._session.commit() diff --git a/surfsense_backend/app/tasks/celery_tasks/document_reindex_tasks.py b/surfsense_backend/app/tasks/celery_tasks/document_reindex_tasks.py index a1fca469e..c2dbe7700 100644 --- a/surfsense_backend/app/tasks/celery_tasks/document_reindex_tasks.py +++ b/surfsense_backend/app/tasks/celery_tasks/document_reindex_tasks.py @@ -2,19 +2,16 @@ import logging -from sqlalchemy import delete, select +from sqlalchemy import select from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.orm import selectinload from app.celery_app import celery_app from app.db import Document +from app.indexing_pipeline.adapters.file_upload_adapter import UploadDocumentAdapter from app.services.llm_service import get_user_long_context_llm from app.services.task_logging_service import TaskLoggingService from app.tasks.celery_tasks import get_celery_session_maker -from app.utils.document_converters import ( - create_document_chunks, - generate_document_summary, -) logger = logging.getLogger(__name__) @@ -42,7 +39,6 @@ def reindex_document_task(self, document_id: int, user_id: str): async def _reindex_document(document_id: int, user_id: str): """Async function to reindex a document.""" async with get_celery_session_maker()() as session: - # First, get the document to get search_space_id for logging result = await session.execute( select(Document) .options(selectinload(Document.chunks)) @@ -54,10 +50,8 @@ async def _reindex_document(document_id: int, user_id: str): logger.error(f"Document {document_id} not found") return - # Initialize task logger task_logger = TaskLoggingService(session, document.search_space_id) - # Log task start log_entry = await task_logger.log_task_start( task_name="document_reindex", source="editor", @@ -71,10 +65,7 @@ async def _reindex_document(document_id: int, user_id: str): ) try: - # Read markdown directly from source_markdown - markdown_content = document.source_markdown - - if not markdown_content: + if not document.source_markdown: await task_logger.log_task_failure( log_entry, f"Document {document_id} has no source_markdown to reindex", @@ -85,51 +76,17 @@ async def _reindex_document(document_id: int, user_id: str): logger.info(f"Reindexing document {document_id} ({document.title})") - # 1. Delete old chunks explicitly - from app.db import Chunk - - await session.execute(delete(Chunk).where(Chunk.document_id == document_id)) - await session.flush() # Ensure old chunks are deleted - - # 2. Create new chunks from source_markdown - new_chunks = await create_document_chunks(markdown_content) - - # 3. Add new chunks to session - for chunk in new_chunks: - chunk.document_id = document_id - session.add(chunk) - - logger.info(f"Created {len(new_chunks)} chunks for document {document_id}") - - # 4. Regenerate summary user_llm = await get_user_long_context_llm( session, user_id, document.search_space_id ) - document_metadata = { - "title": document.title, - "document_type": document.document_type.value, - } + adapter = UploadDocumentAdapter(session) + await adapter.reindex(document=document, llm=user_llm) - summary_content, summary_embedding = await generate_document_summary( - markdown_content, user_llm, document_metadata - ) - - # 5. Update document - document.content = summary_content - document.embedding = summary_embedding - document.content_needs_reindexing = False - - await session.commit() - - # Log success await task_logger.log_task_success( log_entry, f"Successfully reindexed document: {document.title}", - { - "chunks_created": len(new_chunks), - "document_id": document_id, - }, + {"document_id": document_id}, ) logger.info(f"Successfully reindexed document {document_id}") diff --git a/surfsense_backend/app/tasks/document_processors/file_processors.py b/surfsense_backend/app/tasks/document_processors/file_processors.py index b77777e06..5e97951bd 100644 --- a/surfsense_backend/app/tasks/document_processors/file_processors.py +++ b/surfsense_backend/app/tasks/document_processors/file_processors.py @@ -18,7 +18,7 @@ from sqlalchemy.ext.asyncio import AsyncSession from app.config import config as app_config from app.db import Document, DocumentStatus, DocumentType, Log, Notification -from app.indexing_pipeline.adapters.file_upload_adapter import index_uploaded_file +from app.indexing_pipeline.adapters.file_upload_adapter import UploadDocumentAdapter from app.services.llm_service import get_user_long_context_llm from app.services.notification_service import NotificationService from app.services.task_logging_service import TaskLoggingService @@ -1871,13 +1871,13 @@ async def process_file_in_background_with_document( user_llm = await get_user_long_context_llm(session, user_id, search_space_id) - await index_uploaded_file( + adapter = UploadDocumentAdapter(session) + await adapter.index( markdown_content=markdown_content, filename=filename, etl_service=etl_service, search_space_id=search_space_id, user_id=user_id, - session=session, llm=user_llm, should_summarize=should_summarize, ) diff --git a/surfsense_backend/tests/integration/indexing_pipeline/adapters/test_file_upload_adapter.py b/surfsense_backend/tests/integration/indexing_pipeline/adapters/test_file_upload_adapter.py index 193e4bd80..fa0fe5787 100644 --- a/surfsense_backend/tests/integration/indexing_pipeline/adapters/test_file_upload_adapter.py +++ b/surfsense_backend/tests/integration/indexing_pipeline/adapters/test_file_upload_adapter.py @@ -2,7 +2,7 @@ import pytest from sqlalchemy import select from app.db import Chunk, Document, DocumentStatus -from app.indexing_pipeline.adapters.file_upload_adapter import index_uploaded_file +from app.indexing_pipeline.adapters.file_upload_adapter import UploadDocumentAdapter pytestmark = pytest.mark.integration @@ -12,13 +12,13 @@ pytestmark = pytest.mark.integration ) async def test_sets_status_ready(db_session, db_search_space, db_user, mocker): """Document status is READY after successful indexing.""" - await index_uploaded_file( + adapter = UploadDocumentAdapter(db_session) + await adapter.index( markdown_content="## Hello\n\nSome content.", filename="test.pdf", etl_service="UNSTRUCTURED", search_space_id=db_search_space.id, user_id=str(db_user.id), - session=db_session, llm=mocker.Mock(), ) @@ -35,14 +35,15 @@ async def test_sets_status_ready(db_session, db_search_space, db_user, mocker): ) async def test_content_is_summary(db_session, db_search_space, db_user, mocker): """Document content is set to the LLM-generated summary.""" - await index_uploaded_file( + adapter = UploadDocumentAdapter(db_session) + await adapter.index( markdown_content="## Hello\n\nSome content.", filename="test.pdf", etl_service="UNSTRUCTURED", search_space_id=db_search_space.id, user_id=str(db_user.id), - session=db_session, llm=mocker.Mock(), + should_summarize=True, ) result = await db_session.execute( @@ -58,13 +59,13 @@ async def test_content_is_summary(db_session, db_search_space, db_user, mocker): ) async def test_chunks_written_to_db(db_session, db_search_space, db_user, mocker): """Chunks derived from the source markdown are persisted in the DB.""" - await index_uploaded_file( + adapter = UploadDocumentAdapter(db_session) + await adapter.index( markdown_content="## Hello\n\nSome content.", filename="test.pdf", etl_service="UNSTRUCTURED", search_space_id=db_search_space.id, user_id=str(db_user.id), - session=db_session, llm=mocker.Mock(), ) @@ -87,13 +88,239 @@ async def test_chunks_written_to_db(db_session, db_search_space, db_user, mocker ) async def test_raises_on_indexing_failure(db_session, db_search_space, db_user, mocker): """RuntimeError is raised when the indexing step fails so the caller can fire a failure notification.""" - with pytest.raises(RuntimeError): - await index_uploaded_file( + adapter = UploadDocumentAdapter(db_session) + with pytest.raises(RuntimeError, match=r"Embedding failed|Indexing failed"): + await adapter.index( markdown_content="## Hello\n\nSome content.", filename="test.pdf", etl_service="UNSTRUCTURED", search_space_id=db_search_space.id, user_id=str(db_user.id), - session=db_session, llm=mocker.Mock(), + should_summarize=True, ) + + +# --------------------------------------------------------------------------- +# reindex() tests +# --------------------------------------------------------------------------- + + +@pytest.mark.usefixtures( + "patched_summarize", "patched_embed_text", "patched_chunk_text" +) +async def test_reindex_updates_content(db_session, db_search_space, db_user, mocker): + """Document content is updated to the new summary after reindexing.""" + adapter = UploadDocumentAdapter(db_session) + await adapter.index( + markdown_content="## Original\n\nOriginal content.", + filename="test.pdf", + etl_service="UNSTRUCTURED", + search_space_id=db_search_space.id, + user_id=str(db_user.id), + llm=mocker.Mock(), + ) + + result = await db_session.execute( + select(Document).filter(Document.search_space_id == db_search_space.id) + ) + document = result.scalars().first() + + document.source_markdown = "## Edited\n\nNew content after user edit." + await db_session.flush() + + await adapter.reindex(document=document, llm=mocker.Mock()) + + await db_session.refresh(document) + assert document.content == "Mocked summary." + + +@pytest.mark.usefixtures( + "patched_summarize", "patched_embed_text", "patched_chunk_text" +) +async def test_reindex_updates_content_hash( + db_session, db_search_space, db_user, mocker +): + """Content hash is recomputed after reindexing with new source markdown.""" + adapter = UploadDocumentAdapter(db_session) + await adapter.index( + markdown_content="## Original\n\nOriginal content.", + filename="test.pdf", + etl_service="UNSTRUCTURED", + search_space_id=db_search_space.id, + user_id=str(db_user.id), + llm=mocker.Mock(), + ) + + result = await db_session.execute( + select(Document).filter(Document.search_space_id == db_search_space.id) + ) + document = result.scalars().first() + original_hash = document.content_hash + + document.source_markdown = "## Edited\n\nNew content after user edit." + await db_session.flush() + + await adapter.reindex(document=document, llm=mocker.Mock()) + + await db_session.refresh(document) + assert document.content_hash != original_hash + + +@pytest.mark.usefixtures( + "patched_summarize", "patched_embed_text", "patched_chunk_text" +) +async def test_reindex_sets_status_ready(db_session, db_search_space, db_user, mocker): + """Document status is READY after successful reindexing.""" + adapter = UploadDocumentAdapter(db_session) + await adapter.index( + markdown_content="## Original\n\nOriginal content.", + filename="test.pdf", + etl_service="UNSTRUCTURED", + search_space_id=db_search_space.id, + user_id=str(db_user.id), + llm=mocker.Mock(), + ) + + result = await db_session.execute( + select(Document).filter(Document.search_space_id == db_search_space.id) + ) + document = result.scalars().first() + + document.source_markdown = "## Edited\n\nNew content after user edit." + await db_session.flush() + + await adapter.reindex(document=document, llm=mocker.Mock()) + + await db_session.refresh(document) + assert DocumentStatus.is_state(document.status, DocumentStatus.READY) + + +@pytest.mark.usefixtures("patched_summarize", "patched_embed_text") +async def test_reindex_replaces_chunks(db_session, db_search_space, db_user, mocker): + """Reindexing replaces old chunks with new content rather than appending.""" + mocker.patch( + "app.indexing_pipeline.indexing_pipeline_service.chunk_text", + side_effect=[["Original chunk."], ["Updated chunk."]], + ) + + adapter = UploadDocumentAdapter(db_session) + await adapter.index( + markdown_content="## Original\n\nOriginal content.", + filename="test.pdf", + etl_service="UNSTRUCTURED", + search_space_id=db_search_space.id, + user_id=str(db_user.id), + llm=mocker.Mock(), + ) + + result = await db_session.execute( + select(Document).filter(Document.search_space_id == db_search_space.id) + ) + document = result.scalars().first() + document_id = document.id + + document.source_markdown = "## Edited\n\nNew content after user edit." + await db_session.flush() + + await adapter.reindex(document=document, llm=mocker.Mock()) + + chunks_result = await db_session.execute( + select(Chunk).filter(Chunk.document_id == document_id) + ) + chunks = chunks_result.scalars().all() + + assert len(chunks) == 1 + assert chunks[0].content == "Updated chunk." + + +@pytest.mark.usefixtures( + "patched_summarize", "patched_embed_text", "patched_chunk_text" +) +async def test_reindex_clears_reindexing_flag( + db_session, db_search_space, db_user, mocker +): + """After successful reindex, content_needs_reindexing is False.""" + adapter = UploadDocumentAdapter(db_session) + await adapter.index( + markdown_content="## Original\n\nOriginal content.", + filename="test.pdf", + etl_service="UNSTRUCTURED", + search_space_id=db_search_space.id, + user_id=str(db_user.id), + llm=mocker.Mock(), + ) + + result = await db_session.execute( + select(Document).filter(Document.search_space_id == db_search_space.id) + ) + document = result.scalars().first() + + document.source_markdown = "## Edited\n\nNew content after user edit." + document.content_needs_reindexing = True + await db_session.flush() + + await adapter.reindex(document=document, llm=mocker.Mock()) + + await db_session.refresh(document) + assert document.content_needs_reindexing is False + + +@pytest.mark.usefixtures("patched_embed_text", "patched_chunk_text") +async def test_reindex_raises_on_failure(db_session, db_search_space, db_user, mocker): + """RuntimeError is raised when reindexing fails so the caller can handle it.""" + mocker.patch( + "app.indexing_pipeline.indexing_pipeline_service.summarize_document", + return_value="Mocked summary.", + ) + + adapter = UploadDocumentAdapter(db_session) + await adapter.index( + markdown_content="## Original\n\nOriginal content.", + filename="test.pdf", + etl_service="UNSTRUCTURED", + search_space_id=db_search_space.id, + user_id=str(db_user.id), + llm=mocker.Mock(), + ) + + result = await db_session.execute( + select(Document).filter(Document.search_space_id == db_search_space.id) + ) + document = result.scalars().first() + + document.source_markdown = "## Edited\n\nNew content after user edit." + await db_session.flush() + + mocker.patch( + "app.indexing_pipeline.indexing_pipeline_service.summarize_document", + side_effect=RuntimeError("LLM unavailable"), + ) + + with pytest.raises(RuntimeError, match=r"Embedding failed|Reindexing failed"): + await adapter.reindex(document=document, llm=mocker.Mock()) + + +async def test_reindex_raises_on_empty_source_markdown( + db_session, db_search_space, db_user, mocker +): + """Reindexing a document with no source_markdown raises immediately.""" + from app.db import DocumentType + + document = Document( + title="empty.pdf", + document_type=DocumentType.FILE, + content="placeholder", + content_hash="abc123", + unique_identifier_hash="def456", + source_markdown="", + search_space_id=db_search_space.id, + created_by_id=str(db_user.id), + ) + db_session.add(document) + await db_session.flush() + + adapter = UploadDocumentAdapter(db_session) + + with pytest.raises(RuntimeError, match="no source_markdown"): + await adapter.reindex(document=document, llm=mocker.Mock()) diff --git a/surfsense_browser_extension/routes/index.tsx b/surfsense_browser_extension/routes/index.tsx index 8df110be1..39aed5854 100644 --- a/surfsense_browser_extension/routes/index.tsx +++ b/surfsense_browser_extension/routes/index.tsx @@ -2,7 +2,7 @@ import { Route, Routes } from "react-router-dom"; import ApiKeyForm from "./pages/ApiKeyForm"; import HomePage from "./pages/HomePage"; -import "../tailwind.css"; +import "~tailwind.css"; export const Routing = () => ( diff --git a/surfsense_browser_extension/routes/pages/ApiKeyForm.tsx b/surfsense_browser_extension/routes/pages/ApiKeyForm.tsx index b6deb1c05..537eba3da 100644 --- a/surfsense_browser_extension/routes/pages/ApiKeyForm.tsx +++ b/surfsense_browser_extension/routes/pages/ApiKeyForm.tsx @@ -4,6 +4,8 @@ import { ReloadIcon } from "@radix-ui/react-icons"; import { useState } from "react"; import { useNavigate } from "react-router-dom"; import { Button } from "~/routes/ui/button"; +import { ConnectionSettingsButton } from "~/routes/ui/connection-settings-button"; +import { buildBackendUrl } from "~utils/backend-url"; const ApiKeyForm = () => { const navigation = useNavigate(); @@ -27,8 +29,7 @@ const ApiKeyForm = () => { setLoading(true); try { - // Verify token is valid by making a request to the API - const response = await fetch(`${process.env.PLASMO_PUBLIC_BACKEND_URL}/verify-token`, { + const response = await fetch(await buildBackendUrl("/verify-token"), { method: "GET", headers: { Authorization: `Bearer ${apiKey}`, @@ -53,6 +54,10 @@ const ApiKeyForm = () => { return (
+
+ +
+
SurfSense diff --git a/surfsense_browser_extension/routes/pages/HomePage.tsx b/surfsense_browser_extension/routes/pages/HomePage.tsx index 362c64056..9d8787d29 100644 --- a/surfsense_browser_extension/routes/pages/HomePage.tsx +++ b/surfsense_browser_extension/routes/pages/HomePage.tsx @@ -16,6 +16,7 @@ import React, { useEffect, useState } from "react"; import { useNavigate } from "react-router-dom"; import { cn } from "~/lib/utils"; import { Button } from "~/routes/ui/button"; +import { ConnectionSettingsButton } from "~/routes/ui/connection-settings-button"; import { Command, CommandEmpty, @@ -27,6 +28,7 @@ import { import { Popover, PopoverContent, PopoverTrigger } from "~/routes/ui/popover"; import { Label } from "~routes/ui/label"; import { useToast } from "~routes/ui/use-toast"; +import { buildBackendUrl } from "~utils/backend-url"; import { getRenderedHtml } from "~utils/commons"; import type { WebHistory } from "~utils/interfaces"; import Loading from "./Loading"; @@ -45,15 +47,19 @@ const HomePage = () => { const checkSearchSpaces = async () => { const storage = new Storage({ area: "local" }); const token = await storage.get("token"); + + if (!token) { + setLoading(false); + navigation("/login"); + return; + } + try { - const response = await fetch( - `${process.env.PLASMO_PUBLIC_BACKEND_URL}/api/v1/searchspaces`, - { - headers: { - Authorization: `Bearer ${token}`, - }, + const response = await fetch(await buildBackendUrl("/api/v1/searchspaces"), { + headers: { + Authorization: `Bearer ${token}`, } - ); + }); if (!response.ok) { throw new Error("Token verification failed"); @@ -66,11 +72,12 @@ const HomePage = () => { await storage.remove("token"); await storage.remove("showShadowDom"); navigation("/login"); + } finally { + setLoading(false); } }; checkSearchSpaces(); - setLoading(false); }, []); useEffect(() => { @@ -304,6 +311,19 @@ const HomePage = () => { navigation("/login"); } + async function handleConnectionSaved(changed: boolean): Promise { + if (!changed) { + return; + } + + const storage = new Storage({ area: "local" }); + await storage.remove("token"); + await storage.remove("showShadowDom"); + await storage.remove("search_space"); + await storage.remove("search_space_id"); + navigation("/login"); + } + if (loading) { return ; } else { @@ -344,15 +364,18 @@ const HomePage = () => {

SurfSense

- +
+ + +
diff --git a/surfsense_browser_extension/routes/ui/connection-settings-button.tsx b/surfsense_browser_extension/routes/ui/connection-settings-button.tsx new file mode 100644 index 000000000..f68f252a5 --- /dev/null +++ b/surfsense_browser_extension/routes/ui/connection-settings-button.tsx @@ -0,0 +1,114 @@ +import { GearIcon } from "@radix-ui/react-icons"; +import { useEffect, useState } from "react"; +import { Button } from "~/routes/ui/button"; +import { + Dialog, + DialogContent, + DialogDescription, + DialogFooter, + DialogHeader, + DialogTitle, +} from "~/routes/ui/dialog"; +import { Label } from "~/routes/ui/label"; +import { + DEFAULT_BACKEND_BASE_URL, + getCustomBackendBaseUrl, + normalizeBackendBaseUrl, + setCustomBackendBaseUrl, +} from "~utils/backend-url"; + +type ConnectionSettingsButtonProps = { + onSaved?: (changed: boolean) => void | Promise; +}; + +export function ConnectionSettingsButton({ onSaved }: ConnectionSettingsButtonProps) { + const [open, setOpen] = useState(false); + const [customUrl, setCustomUrl] = useState(""); + const [savedUrl, setSavedUrl] = useState(""); + + useEffect(() => { + if (!open) { + return; + } + + const loadSettings = async () => { + const normalized = await getCustomBackendBaseUrl(); + setCustomUrl(normalized || DEFAULT_BACKEND_BASE_URL); + setSavedUrl(normalized); + }; + + loadSettings(); + }, [open]); + + const handleSave = async () => { + const normalizedUrl = normalizeBackendBaseUrl(customUrl); + const nextUrl = await setCustomBackendBaseUrl( + normalizedUrl === DEFAULT_BACKEND_BASE_URL ? "" : normalizedUrl + ); + const changed = nextUrl !== savedUrl; + setSavedUrl(nextUrl); + setCustomUrl(nextUrl || DEFAULT_BACKEND_BASE_URL); + setOpen(false); + + if (onSaved) { + await onSaved(changed); + } + }; + + return ( + <> + + + + + Connection Settings + + Leave blank to use the default SurfSense backend URL. + + + +
+ + setCustomUrl(event.target.value)} + placeholder={DEFAULT_BACKEND_BASE_URL} + className="w-full rounded-md border border-gray-700 bg-gray-900 px-3 py-2 text-white placeholder:text-gray-500 focus:outline-none focus:ring-2 focus:ring-teal-500" + /> +

Default: {DEFAULT_BACKEND_BASE_URL}

+
+ + + + + +
+
+ + ); +} diff --git a/surfsense_browser_extension/utils/backend-url.ts b/surfsense_browser_extension/utils/backend-url.ts new file mode 100644 index 000000000..b295bf963 --- /dev/null +++ b/surfsense_browser_extension/utils/backend-url.ts @@ -0,0 +1,41 @@ +import { Storage } from "@plasmohq/storage"; + +export const BACKEND_URL_STORAGE_KEY = "backend_base_url"; +export const FALLBACK_BACKEND_BASE_URL = "https://www.surfsense.com"; + +const storage = new Storage({ area: "local" }); + +export function normalizeBackendBaseUrl(url: string) { + return url.trim().replace(/\/+$/, ""); +} + +export const DEFAULT_BACKEND_BASE_URL = normalizeBackendBaseUrl( + process.env.PLASMO_PUBLIC_BACKEND_URL || FALLBACK_BACKEND_BASE_URL +); + +export async function getCustomBackendBaseUrl() { + const value = await storage.get(BACKEND_URL_STORAGE_KEY); + return typeof value === "string" ? normalizeBackendBaseUrl(value) : ""; +} + +export async function setCustomBackendBaseUrl(url: string) { + const normalized = normalizeBackendBaseUrl(url); + + if (normalized) { + await storage.set(BACKEND_URL_STORAGE_KEY, normalized); + return normalized; + } + + await storage.remove(BACKEND_URL_STORAGE_KEY); + return ""; +} + +export async function getBackendBaseUrl() { + return (await getCustomBackendBaseUrl()) || DEFAULT_BACKEND_BASE_URL; +} + +export async function buildBackendUrl(path: string) { + const baseUrl = await getBackendBaseUrl(); + const normalizedPath = path.startsWith("/") ? path : `/${path}`; + return `${baseUrl}${normalizedPath}`; +} diff --git a/surfsense_web/app/(home)/announcements/page.tsx b/surfsense_web/app/(home)/announcements/page.tsx index bfc883dfc..966c09f77 100644 --- a/surfsense_web/app/(home)/announcements/page.tsx +++ b/surfsense_web/app/(home)/announcements/page.tsx @@ -1,147 +1,9 @@ "use client"; -import { - Bell, - BellOff, - ExternalLink, - Info, - type Megaphone, - Rocket, - Wrench, - Zap, -} from "lucide-react"; -import Link from "next/link"; import { useEffect } from "react"; -import { Badge } from "@/components/ui/badge"; -import { Button } from "@/components/ui/button"; -import { - Card, - CardContent, - CardDescription, - CardFooter, - CardHeader, - CardTitle, -} from "@/components/ui/card"; -import type { AnnouncementCategory } from "@/contracts/types/announcement.types"; -import { type AnnouncementWithState, useAnnouncements } from "@/hooks/use-announcements"; -import { formatRelativeDate } from "@/lib/format-date"; - -// --------------------------------------------------------------------------- -// Category configuration -// --------------------------------------------------------------------------- - -const categoryConfig: Record< - AnnouncementCategory, - { - label: string; - icon: typeof Megaphone; - color: string; - badgeVariant: "default" | "secondary" | "destructive" | "outline"; - } -> = { - feature: { - label: "Feature", - icon: Rocket, - color: "text-emerald-500", - badgeVariant: "default", - }, - update: { - label: "Update", - icon: Zap, - color: "text-blue-500", - badgeVariant: "secondary", - }, - maintenance: { - label: "Maintenance", - icon: Wrench, - color: "text-amber-500", - badgeVariant: "outline", - }, - info: { - label: "Info", - icon: Info, - color: "text-muted-foreground", - badgeVariant: "secondary", - }, -}; - -// --------------------------------------------------------------------------- -// Announcement card -// --------------------------------------------------------------------------- - -function AnnouncementCard({ announcement }: { announcement: AnnouncementWithState }) { - const config = categoryConfig[announcement.category] ?? categoryConfig.info; - const Icon = config.icon; - - return ( - - -
-
-
- -
-
-
- {announcement.title} - - {config.label} - - {announcement.isImportant && ( - - - Important - - )} -
- - {formatRelativeDate(announcement.date)} - -
-
-
-
- - -

{announcement.description}

-
- - {announcement.link && ( - - - - )} -
- ); -} - -// --------------------------------------------------------------------------- -// Empty state -// --------------------------------------------------------------------------- - -function EmptyState() { - return ( -
-
- -
-

No announcements

-

- You're all caught up! New announcements will appear here. -

-
- ); -} +import { AnnouncementCard } from "@/components/announcements/AnnouncementCard"; +import { AnnouncementsEmptyState } from "@/components/announcements/AnnouncementsEmptyState"; +import { useAnnouncements } from "@/hooks/use-announcements"; // --------------------------------------------------------------------------- // Page @@ -171,7 +33,7 @@ export default function AnnouncementsPage() { {/* Content */}
{announcements.length === 0 ? ( - + ) : (
{announcements.map((announcement) => ( diff --git a/surfsense_web/app/verify-token/route.ts b/surfsense_web/app/verify-token/route.ts new file mode 100644 index 000000000..1c11d6ce0 --- /dev/null +++ b/surfsense_web/app/verify-token/route.ts @@ -0,0 +1,25 @@ +import { NextRequest, NextResponse } from "next/server"; + +const backendBaseUrl = (process.env.INTERNAL_FASTAPI_BACKEND_URL || "http://backend:8000").replace( + /\/+$/, + "" +); + +export async function GET(request: NextRequest) { + const response = await fetch(`${backendBaseUrl}/verify-token`, { + method: "GET", + headers: { + Authorization: request.headers.get("authorization") || "", + "X-API-Key": request.headers.get("x-api-key") || "", + }, + cache: "no-store", + }); + + return new NextResponse(response.body, { + status: response.status, + headers: { + "content-type": response.headers.get("content-type") || "application/json", + "cache-control": "no-store", + }, + }); +} diff --git a/surfsense_web/components/announcements/AnnouncementCard.tsx b/surfsense_web/components/announcements/AnnouncementCard.tsx new file mode 100644 index 000000000..daaecee07 --- /dev/null +++ b/surfsense_web/components/announcements/AnnouncementCard.tsx @@ -0,0 +1,117 @@ +"use client"; + +import { + Bell, + ExternalLink, + Info, + type LucideIcon, + Rocket, + Wrench, + Zap, +} from "lucide-react"; +import Link from "next/link"; +import { Badge } from "@/components/ui/badge"; +import { Button } from "@/components/ui/button"; +import { + Card, + CardContent, + CardDescription, + CardFooter, + CardHeader, + CardTitle, +} from "@/components/ui/card"; +import type { AnnouncementCategory } from "@/contracts/types/announcement.types"; +import type { AnnouncementWithState } from "@/hooks/use-announcements"; +import { formatRelativeDate } from "@/lib/format-date"; + +const categoryConfig: Record< + AnnouncementCategory, + { + label: string; + icon: LucideIcon; + color: string; + badgeVariant: "default" | "secondary" | "destructive" | "outline"; + } +> = { + feature: { + label: "Feature", + icon: Rocket, + color: "text-emerald-500", + badgeVariant: "default", + }, + update: { + label: "Update", + icon: Zap, + color: "text-blue-500", + badgeVariant: "secondary", + }, + maintenance: { + label: "Maintenance", + icon: Wrench, + color: "text-amber-500", + badgeVariant: "outline", + }, + info: { + label: "Info", + icon: Info, + color: "text-muted-foreground", + badgeVariant: "secondary", + }, +}; + +export function AnnouncementCard({ announcement }: { announcement: AnnouncementWithState }) { + const config = categoryConfig[announcement.category] ?? categoryConfig.info; + const Icon = config.icon; + + return ( + + +
+
+
+ +
+
+
+ {announcement.title} + + {config.label} + + {announcement.isImportant && ( + + + Important + + )} +
+ + {formatRelativeDate(announcement.date)} + +
+
+
+
+ + +

{announcement.description}

+
+ + {announcement.link && ( + + + + )} +
+ ); +} + diff --git a/surfsense_web/components/announcements/AnnouncementsEmptyState.tsx b/surfsense_web/components/announcements/AnnouncementsEmptyState.tsx new file mode 100644 index 000000000..2ae926b1f --- /dev/null +++ b/surfsense_web/components/announcements/AnnouncementsEmptyState.tsx @@ -0,0 +1,18 @@ +"use client"; + +import { BellOff } from "lucide-react"; + +export function AnnouncementsEmptyState() { + return ( +
+
+ +
+

No announcements

+

+ You're all caught up! New announcements will appear here. +

+
+ ); +} + diff --git a/surfsense_web/components/layout/providers/LayoutDataProvider.tsx b/surfsense_web/components/layout/providers/LayoutDataProvider.tsx index ee585a15e..3b5e127a8 100644 --- a/surfsense_web/components/layout/providers/LayoutDataProvider.tsx +++ b/surfsense_web/components/layout/providers/LayoutDataProvider.tsx @@ -124,6 +124,9 @@ export function LayoutDataProvider({ // Documents sidebar state (shared atom so Composer can toggle it) const [isDocumentsSidebarOpen, setIsDocumentsSidebarOpen] = useAtom(documentsSidebarOpenAtom); + // Announcements sidebar state + const [isAnnouncementsSidebarOpen, setIsAnnouncementsSidebarOpen] = useState(false); + // Search space dialog state const [isCreateSearchSpaceDialogOpen, setIsCreateSearchSpaceDialogOpen] = useState(false); @@ -267,7 +270,7 @@ export function LayoutDataProvider({ () => [ { title: "Inbox", - url: "#inbox", // Special URL to indicate this is handled differently + url: "#inbox", icon: Inbox, isActive: isInboxSidebarOpen, badge: totalUnreadCount > 0 ? formatInboxCount(totalUnreadCount) : undefined, @@ -281,17 +284,17 @@ export function LayoutDataProvider({ }, { title: "Announcements", - url: "/announcements", + url: "#announcements", icon: Megaphone, - isActive: pathname?.includes("/announcements"), + isActive: isAnnouncementsSidebarOpen, badge: announcementUnreadCount > 0 ? formatInboxCount(announcementUnreadCount) : undefined, }, ], [ - pathname, isInboxSidebarOpen, isDocumentsSidebarOpen, totalUnreadCount, + isAnnouncementsSidebarOpen, announcementUnreadCount, isDocumentsProcessing, ] @@ -386,25 +389,37 @@ export function LayoutDataProvider({ const handleNavItemClick = useCallback( (item: NavItem) => { - // Handle inbox specially - toggle sidebar instead of navigating if (item.url === "#inbox") { setIsInboxSidebarOpen((prev) => { if (!prev) { setIsAllSharedChatsSidebarOpen(false); setIsAllPrivateChatsSidebarOpen(false); setIsDocumentsSidebarOpen(false); + setIsAnnouncementsSidebarOpen(false); } return !prev; }); return; } - // Handle documents specially - toggle sidebar instead of navigating if (item.url === "#documents") { setIsDocumentsSidebarOpen((prev) => { if (!prev) { setIsInboxSidebarOpen(false); setIsAllSharedChatsSidebarOpen(false); setIsAllPrivateChatsSidebarOpen(false); + setIsAnnouncementsSidebarOpen(false); + } + return !prev; + }); + return; + } + if (item.url === "#announcements") { + setIsAnnouncementsSidebarOpen((prev) => { + if (!prev) { + setIsInboxSidebarOpen(false); + setIsAllSharedChatsSidebarOpen(false); + setIsAllPrivateChatsSidebarOpen(false); + setIsDocumentsSidebarOpen(false); } return !prev; }); @@ -510,6 +525,7 @@ export function LayoutDataProvider({ setIsAllPrivateChatsSidebarOpen(false); setIsInboxSidebarOpen(false); setIsDocumentsSidebarOpen(false); + setIsAnnouncementsSidebarOpen(false); }, [setIsDocumentsSidebarOpen]); const handleViewAllPrivateChats = useCallback(() => { @@ -517,6 +533,7 @@ export function LayoutDataProvider({ setIsAllSharedChatsSidebarOpen(false); setIsInboxSidebarOpen(false); setIsDocumentsSidebarOpen(false); + setIsAnnouncementsSidebarOpen(false); }, [setIsDocumentsSidebarOpen]); // Delete handlers @@ -633,6 +650,10 @@ export function LayoutDataProvider({ isDocked: isInboxDocked, onDockedChange: setIsInboxDocked, }} + announcementsPanel={{ + open: isAnnouncementsSidebarOpen, + onOpenChange: setIsAnnouncementsSidebarOpen, + }} allSharedChatsPanel={{ open: isAllSharedChatsSidebarOpen, onOpenChange: setIsAllSharedChatsSidebarOpen, diff --git a/surfsense_web/components/layout/ui/shell/LayoutShell.tsx b/surfsense_web/components/layout/ui/shell/LayoutShell.tsx index f1205bd7c..4a33e5e27 100644 --- a/surfsense_web/components/layout/ui/shell/LayoutShell.tsx +++ b/surfsense_web/components/layout/ui/shell/LayoutShell.tsx @@ -13,6 +13,7 @@ import { IconRail } from "../icon-rail"; import { AllPrivateChatsSidebar, AllSharedChatsSidebar, + AnnouncementsSidebar, DocumentsSidebar, InboxSidebar, MobileSidebar, @@ -77,6 +78,10 @@ interface LayoutShellProps { className?: string; // Inbox props inbox?: InboxProps; + announcementsPanel?: { + open: boolean; + onOpenChange: (open: boolean) => void; + }; isLoadingChats?: boolean; // All chats panel props allSharedChatsPanel?: { @@ -128,6 +133,7 @@ export function LayoutShell({ children, className, inbox, + announcementsPanel, isLoadingChats = false, allSharedChatsPanel, allPrivateChatsPanel, @@ -215,6 +221,15 @@ export function LayoutShell({ /> )} + {/* Mobile Announcements Sidebar */} + {announcementsPanel?.open && ( + setMobileMenuOpen(false)} + /> + )} + {/* Mobile All Shared Chats - slide-out panel */} {allSharedChatsPanel && ( )} + {/* Announcements Sidebar */} + {announcementsPanel && ( + + )} + {/* All Shared Chats - slide-out panel */} {allSharedChatsPanel && ( void; + onCloseMobileSidebar?: () => void; +} + +export function AnnouncementsSidebar({ + open, + onOpenChange, + onCloseMobileSidebar, +}: AnnouncementsSidebarProps) { + const isMobile = !useMediaQuery("(min-width: 640px)"); + const { announcements, markAllRead } = useAnnouncements(); + + useEffect(() => { + if (!open) return; + markAllRead(); + }, [open, markAllRead]); + + const body = ( +
+
+
+
+ {isMobile && ( + + )} +

Announcements

+
+
+
+ +
+ {announcements.length === 0 ? ( + + ) : ( +
+ {announcements.map((announcement) => ( + + ))} +
+ )} +
+
+ ); + + return ( + + {body} + + ); +} + diff --git a/surfsense_web/components/layout/ui/sidebar/index.ts b/surfsense_web/components/layout/ui/sidebar/index.ts index 8d295c776..4da08ef50 100644 --- a/surfsense_web/components/layout/ui/sidebar/index.ts +++ b/surfsense_web/components/layout/ui/sidebar/index.ts @@ -1,5 +1,6 @@ export { AllPrivateChatsSidebar } from "./AllPrivateChatsSidebar"; export { AllSharedChatsSidebar } from "./AllSharedChatsSidebar"; +export { AnnouncementsSidebar } from "./AnnouncementsSidebar"; export { ChatListItem } from "./ChatListItem"; export { DocumentsSidebar } from "./DocumentsSidebar"; export { InboxSidebar } from "./InboxSidebar"; diff --git a/surfsense_web/hooks/use-comments.ts b/surfsense_web/hooks/use-comments.ts index c02f9fe16..2f7128149 100644 --- a/surfsense_web/hooks/use-comments.ts +++ b/surfsense_web/hooks/use-comments.ts @@ -1,5 +1,6 @@ import { useQuery, useQueryClient } from "@tanstack/react-query"; import { useEffect, useRef } from "react"; +import type { GetCommentsResponse } from "@/contracts/types/chat-comments.types"; import { chatCommentsApiService } from "@/lib/apis/chat-comments-api.service"; import { cacheKeys } from "@/lib/query-client/cache-keys"; @@ -22,20 +23,20 @@ let _batchTargetIds = new Set(); let _batchReady: Promise | null = null; let _resolveBatchReady: (() => void) | null = null; -function resetBatchGate() { +function resetBatchGate(resolveImmediately = false) { _batchReady = new Promise((r) => { _resolveBatchReady = r; + if (resolveImmediately) r(); }); } // Open the initial gate immediately (no batch pending yet) -resetBatchGate(); -_resolveBatchReady?.(); +resetBatchGate(true); export function useComments({ messageId, enabled = true }: UseCommentsOptions) { const queryClient = useQueryClient(); - return useQuery({ + return useQuery({ queryKey: cacheKeys.comments.byMessage(messageId), queryFn: async () => { // Wait for the batch gate so the useEffect in useBatchCommentsPreload @@ -46,7 +47,7 @@ export function useComments({ messageId, enabled = true }: UseCommentsOptions) { if (_batchInflight && _batchTargetIds.has(messageId)) { await _batchInflight; - const cached = queryClient.getQueryData(cacheKeys.comments.byMessage(messageId)); + const cached = queryClient.getQueryData(cacheKeys.comments.byMessage(messageId)); if (cached) return cached; }