({
queryKey: isDocsChunk
? cacheKeys.documents.byChunk(`doc-${chunkId}`)
: cacheKeys.documents.byChunk(chunkId.toString()),
- queryFn: () =>
- isDocsChunk
- ? documentsApiService.getSurfsenseDocByChunk(chunkId)
- : documentsApiService.getDocumentByChunk({ chunk_id: chunkId }),
+ queryFn: async () => {
+ if (isDocsChunk) {
+ return documentsApiService.getSurfsenseDocByChunk(chunkId);
+ }
+ return documentsApiService.getDocumentByChunk({ chunk_id: chunkId });
+ },
enabled: !!chunkId && open,
staleTime: 5 * 60 * 1000,
});
@@ -332,7 +340,7 @@ export function SourceDetailPanel({
{documentData?.title || title || "Source Document"}
- {documentData
+ {documentData && "document_type" in documentData
? formatDocumentType(documentData.document_type)
: sourceType && formatDocumentType(sourceType)}
{documentData?.chunks && (
@@ -498,7 +506,8 @@ export function SourceDetailPanel({
{/* Document Metadata */}
- {documentData.document_metadata &&
+ {"document_metadata" in documentData &&
+ documentData.document_metadata &&
Object.keys(documentData.document_metadata).length > 0 && (
;
export type DeleteDocumentRequest = z.infer;
export type DeleteDocumentResponse = z.infer;
export type DocumentTypeEnum = z.infer;
+export type SurfsenseDocsChunk = z.infer;
+export type SurfsenseDocsDocument = z.infer;
+export type SurfsenseDocsDocumentWithChunks = z.infer;
+export type GetSurfsenseDocsByChunkRequest = z.infer;
+export type GetSurfsenseDocsByChunkResponse = z.infer;
diff --git a/surfsense_web/lib/apis/documents-api.service.ts b/surfsense_web/lib/apis/documents-api.service.ts
index 372baee4d..2e7d18e44 100644
--- a/surfsense_web/lib/apis/documents-api.service.ts
+++ b/surfsense_web/lib/apis/documents-api.service.ts
@@ -17,6 +17,7 @@ import {
getDocumentsResponse,
getDocumentTypeCountsRequest,
getDocumentTypeCountsResponse,
+ getSurfsenseDocsByChunkResponse,
type SearchDocumentsRequest,
searchDocumentsRequest,
searchDocumentsResponse,
@@ -214,10 +215,9 @@ class DocumentsApiService {
* Used for resolving [citation:doc-XXX] citations
*/
getSurfsenseDocByChunk = async (chunkId: number) => {
- // Response shape matches getDocumentByChunkResponse structure
return baseApiService.get(
`/api/v1/surfsense-docs/by-chunk/${chunkId}`,
- getDocumentByChunkResponse
+ getSurfsenseDocsByChunkResponse
);
};
From f6621f9a9a2235972f11b9f4f671459e4f650e5e Mon Sep 17 00:00:00 2001
From: CREDO23
Date: Mon, 12 Jan 2026 18:28:38 +0200
Subject: [PATCH 38/49] fix: copy docs content to Docker image for Surfsense
docs indexer
---
Dockerfile.allinone | 3 +++
1 file changed, 3 insertions(+)
diff --git a/Dockerfile.allinone b/Dockerfile.allinone
index 1c04ffb99..33ae32023 100644
--- a/Dockerfile.allinone
+++ b/Dockerfile.allinone
@@ -165,6 +165,9 @@ COPY --from=frontend-builder /app/.next/standalone ./
COPY --from=frontend-builder /app/.next/static ./.next/static
COPY --from=frontend-builder /app/public ./public
+# Copy docs content for Surfsense docs indexer (used at runtime for seeding)
+COPY surfsense_web/content/docs /app/surfsense_web/content/docs
+
# ====================
# Setup Backend
# ====================
From 96545056cdf2a88d608e983cfe5e1c63e195e6f5 Mon Sep 17 00:00:00 2001
From: CREDO23
Date: Mon, 12 Jan 2026 20:15:11 +0200
Subject: [PATCH 39/49] refactor: async docs seeding in FastAPI lifespan
---
scripts/docker/entrypoint-allinone.sh | 25 +----------
surfsense_backend/app/app.py | 3 ++
.../app/tasks/surfsense_docs_indexer.py | 45 +++++++++++++++----
.../scripts/seed_surfsense_docs.py | 41 +++++++----------
4 files changed, 58 insertions(+), 56 deletions(-)
diff --git a/scripts/docker/entrypoint-allinone.sh b/scripts/docker/entrypoint-allinone.sh
index 0888facf1..ef0ef28ed 100644
--- a/scripts/docker/entrypoint-allinone.sh
+++ b/scripts/docker/entrypoint-allinone.sh
@@ -145,36 +145,13 @@ run_migrations() {
echo "✅ Database migrations complete"
}
-# ================================================
-# Seed Surfsense documentation
-# ================================================
-seed_surfsense_docs() {
- echo "📚 Seeding Surfsense documentation..."
-
- # Start PostgreSQL temporarily for seeding
- su - postgres -c "/usr/lib/postgresql/14/bin/pg_ctl -D /data/postgres -l /tmp/postgres_seed.log start"
- sleep 5
-
- cd /app/backend
- python scripts/seed_surfsense_docs.py || echo "⚠️ Docs seeding may have already been done"
-
- # Stop PostgreSQL
- su - postgres -c "/usr/lib/postgresql/14/bin/pg_ctl -D /data/postgres stop"
-
- echo "✅ Surfsense documentation seeded"
-}
-
# Run migrations on first start or when explicitly requested
if [ ! -f /data/.migrations_run ] || [ "${FORCE_MIGRATIONS:-false}" = "true" ]; then
run_migrations
touch /data/.migrations_run
fi
-# Seed docs on first start or when explicitly requested
-if [ ! -f /data/.docs_seeded ] || [ "${FORCE_SEED_DOCS:-false}" = "true" ]; then
- seed_surfsense_docs
- touch /data/.docs_seeded
-fi
+# Note: Surfsense docs seeding is now handled by FastAPI startup (app.py lifespan)
# ================================================
# Environment Variables Info
diff --git a/surfsense_backend/app/app.py b/surfsense_backend/app/app.py
index 993961148..3ad9d89bc 100644
--- a/surfsense_backend/app/app.py
+++ b/surfsense_backend/app/app.py
@@ -13,6 +13,7 @@ from app.config import config
from app.db import User, create_db_and_tables, get_async_session
from app.routes import router as crud_router
from app.schemas import UserCreate, UserRead, UserUpdate
+from app.tasks.surfsense_docs_indexer import seed_surfsense_docs
from app.users import SECRET, auth_backend, current_active_user, fastapi_users
@@ -22,6 +23,8 @@ async def lifespan(app: FastAPI):
await create_db_and_tables()
# Setup LangGraph checkpointer tables for conversation persistence
await setup_checkpointer_tables()
+ # Seed Surfsense documentation
+ await seed_surfsense_docs()
yield
# Cleanup: close checkpointer connection on shutdown
await close_checkpointer()
diff --git a/surfsense_backend/app/tasks/surfsense_docs_indexer.py b/surfsense_backend/app/tasks/surfsense_docs_indexer.py
index 51a1c0938..f2c1e69ba 100644
--- a/surfsense_backend/app/tasks/surfsense_docs_indexer.py
+++ b/surfsense_backend/app/tasks/surfsense_docs_indexer.py
@@ -1,6 +1,6 @@
"""
Surfsense documentation indexer.
-Indexes MDX documentation files at migration time.
+Indexes MDX documentation files at startup.
"""
import hashlib
@@ -10,10 +10,11 @@ from datetime import UTC, datetime
from pathlib import Path
from sqlalchemy import select
-from sqlalchemy.orm import Session, selectinload
+from sqlalchemy.ext.asyncio import AsyncSession
+from sqlalchemy.orm import selectinload
from app.config import config
-from app.db import SurfsenseDocsChunk, SurfsenseDocsDocument
+from app.db import SurfsenseDocsChunk, SurfsenseDocsDocument, async_session_maker
logger = logging.getLogger(__name__)
@@ -89,12 +90,12 @@ def create_surfsense_docs_chunks(content: str) -> list[SurfsenseDocsChunk]:
]
-def index_surfsense_docs(session: Session) -> tuple[int, int, int, int]:
+async def index_surfsense_docs(session: AsyncSession) -> tuple[int, int, int, int]:
"""
Index all Surfsense documentation files.
Args:
- session: SQLAlchemy sync session
+ session: SQLAlchemy async session
Returns:
Tuple of (created, updated, skipped, deleted) counts
@@ -105,7 +106,7 @@ def index_surfsense_docs(session: Session) -> tuple[int, int, int, int]:
deleted = 0
# Get all existing docs from database
- existing_docs_result = session.execute(
+ existing_docs_result = await session.execute(
select(SurfsenseDocsDocument).options(selectinload(SurfsenseDocsDocument.chunks))
)
existing_docs = {doc.source: doc for doc in existing_docs_result.scalars().all()}
@@ -178,11 +179,11 @@ def index_surfsense_docs(session: Session) -> tuple[int, int, int, int]:
for source, doc in existing_docs.items():
if source not in processed_sources:
logger.info(f"Deleting removed document: {source}")
- session.delete(doc)
+ await session.delete(doc)
deleted += 1
# Commit all changes
- session.commit()
+ await session.commit()
logger.info(
f"Indexing complete: {created} created, {updated} updated, "
@@ -191,3 +192,31 @@ def index_surfsense_docs(session: Session) -> tuple[int, int, int, int]:
return created, updated, skipped, deleted
+
+async def seed_surfsense_docs() -> tuple[int, int, int, int]:
+ """
+ Seed Surfsense documentation into the database.
+
+ This function indexes all MDX files from the docs directory.
+ It handles creating, updating, and deleting docs based on content changes.
+
+ Returns:
+ Tuple of (created, updated, skipped, deleted) counts
+ Returns (0, 0, 0, 0) if an error occurs
+ """
+ logger.info("Starting Surfsense docs indexing...")
+
+ try:
+ async with async_session_maker() as session:
+ created, updated, skipped, deleted = await index_surfsense_docs(session)
+
+ logger.info(
+ f"Surfsense docs indexing complete: "
+ f"created={created}, updated={updated}, skipped={skipped}, deleted={deleted}"
+ )
+
+ return created, updated, skipped, deleted
+
+ except Exception as e:
+ logger.error(f"Failed to seed Surfsense docs: {e}", exc_info=True)
+ return 0, 0, 0, 0
diff --git a/surfsense_backend/scripts/seed_surfsense_docs.py b/surfsense_backend/scripts/seed_surfsense_docs.py
index 2e9eee649..d9536bf91 100644
--- a/surfsense_backend/scripts/seed_surfsense_docs.py
+++ b/surfsense_backend/scripts/seed_surfsense_docs.py
@@ -1,47 +1,40 @@
#!/usr/bin/env python
"""
Seed Surfsense documentation into the database.
-Run this script after migrations to index MDX documentation files.
+
+CLI wrapper for the seed_surfsense_docs function.
+Can be run manually for debugging or re-indexing.
Usage:
python scripts/seed_surfsense_docs.py
"""
+import asyncio
import sys
from pathlib import Path
# Add the parent directory to the path so we can import app modules
sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
-from sqlalchemy import create_engine
-from sqlalchemy.orm import Session
-
-from app.config import config
-from app.tasks.surfsense_docs_indexer import index_surfsense_docs
+from app.tasks.surfsense_docs_indexer import seed_surfsense_docs
def main():
- """Main entry point for seeding Surfsense docs."""
- print("Starting Surfsense docs seeding...")
+ """CLI entry point for seeding Surfsense docs."""
+ print("=" * 50)
+ print(" Surfsense Documentation Seeding")
+ print("=" * 50)
- # Create sync engine from database URL
- # Convert async URL to sync if needed
- database_url = config.DATABASE_URL
- if database_url.startswith("postgresql+asyncpg://"):
- database_url = database_url.replace("postgresql+asyncpg://", "postgresql://")
+ created, updated, skipped, deleted = asyncio.run(seed_surfsense_docs())
- engine = create_engine(database_url)
-
- with Session(engine) as session:
- created, updated, skipped, deleted = index_surfsense_docs(session)
-
- print(f"\nSurfsense docs seeding complete:")
- print(f" Created: {created}")
- print(f" Updated: {updated}")
- print(f" Skipped: {skipped}")
- print(f" Deleted: {deleted}")
+ print()
+ print("Results:")
+ print(f" Created: {created}")
+ print(f" Updated: {updated}")
+ print(f" Skipped: {skipped}")
+ print(f" Deleted: {deleted}")
+ print("=" * 50)
if __name__ == "__main__":
main()
-
From 19ef32539d33a2c30f757c062576b267abd426a6 Mon Sep 17 00:00:00 2001
From: CREDO23
Date: Mon, 12 Jan 2026 20:20:48 +0200
Subject: [PATCH 40/49] cleanup: remove obsolete comments
---
Dockerfile.allinone | 1 -
scripts/docker/entrypoint-allinone.sh | 2 --
2 files changed, 3 deletions(-)
diff --git a/Dockerfile.allinone b/Dockerfile.allinone
index 33ae32023..95893c0b5 100644
--- a/Dockerfile.allinone
+++ b/Dockerfile.allinone
@@ -165,7 +165,6 @@ COPY --from=frontend-builder /app/.next/standalone ./
COPY --from=frontend-builder /app/.next/static ./.next/static
COPY --from=frontend-builder /app/public ./public
-# Copy docs content for Surfsense docs indexer (used at runtime for seeding)
COPY surfsense_web/content/docs /app/surfsense_web/content/docs
# ====================
diff --git a/scripts/docker/entrypoint-allinone.sh b/scripts/docker/entrypoint-allinone.sh
index ef0ef28ed..8248968ab 100644
--- a/scripts/docker/entrypoint-allinone.sh
+++ b/scripts/docker/entrypoint-allinone.sh
@@ -151,8 +151,6 @@ if [ ! -f /data/.migrations_run ] || [ "${FORCE_MIGRATIONS:-false}" = "true" ];
touch /data/.migrations_run
fi
-# Note: Surfsense docs seeding is now handled by FastAPI startup (app.py lifespan)
-
# ================================================
# Environment Variables Info
# ================================================
From 842004e6170410c50021ad3d3286c669b8926781 Mon Sep 17 00:00:00 2001
From: CREDO23
Date: Mon, 12 Jan 2026 20:54:00 +0200
Subject: [PATCH 41/49] docs: add tool examples to system prompt
---
.../app/agents/new_chat/system_prompt.py | 19 +++++++++++++++++++
1 file changed, 19 insertions(+)
diff --git a/surfsense_backend/app/agents/new_chat/system_prompt.py b/surfsense_backend/app/agents/new_chat/system_prompt.py
index f9dfdb025..169363fe9 100644
--- a/surfsense_backend/app/agents/new_chat/system_prompt.py
+++ b/surfsense_backend/app/agents/new_chat/system_prompt.py
@@ -26,6 +26,13 @@ SURFSENSE_TOOLS_INSTRUCTIONS = """
You have access to the following tools:
+0. search_surfsense_docs: Search the official SurfSense documentation.
+ - Use this tool when the user asks anything about SurfSense itself (the application they are using).
+ - Args:
+ - query: The search query about SurfSense
+ - top_k: Number of documentation chunks to retrieve (default: 10)
+ - Returns: Documentation content with chunk IDs for citations (prefixed with 'doc-', e.g., [citation:doc-123])
+
1. search_knowledge_base: Search the user's personal knowledge base for relevant information.
- Args:
- query: The search query - be specific and include key terms
@@ -152,6 +159,18 @@ You have access to the following tools:
- Airtable/Notion: Check field values, apply mapping above
+- User: "How do I install SurfSense?"
+ - Call: `search_surfsense_docs(query="installation setup")`
+
+- User: "What connectors does SurfSense support?"
+ - Call: `search_surfsense_docs(query="available connectors integrations")`
+
+- User: "How do I set up the Notion connector?"
+ - Call: `search_surfsense_docs(query="Notion connector setup configuration")`
+
+- User: "How do I use Docker to run SurfSense?"
+ - Call: `search_surfsense_docs(query="Docker installation setup")`
+
- User: "Fetch all my notes and what's in them?"
- Call: `search_knowledge_base(query="*", top_k=50, connectors_to_search=["NOTE"])`
From c0b97fcc68cae577d6273a8570ff7717d364d814 Mon Sep 17 00:00:00 2001
From: CREDO23
Date: Mon, 12 Jan 2026 21:00:06 +0200
Subject: [PATCH 42/49] fix: add doc- prefix examples to citation instructions
---
surfsense_backend/app/agents/new_chat/system_prompt.py | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/surfsense_backend/app/agents/new_chat/system_prompt.py b/surfsense_backend/app/agents/new_chat/system_prompt.py
index 169363fe9..15fc17022 100644
--- a/surfsense_backend/app/agents/new_chat/system_prompt.py
+++ b/surfsense_backend/app/agents/new_chat/system_prompt.py
@@ -327,7 +327,7 @@ The documents you receive are structured like this:
-IMPORTANT: You MUST cite using the chunk ids (e.g. 123, 124). Do NOT cite document_id.
+IMPORTANT: You MUST cite using the chunk ids (e.g. 123, 124, doc-45). Do NOT cite document_id.
@@ -338,11 +338,13 @@ IMPORTANT: You MUST cite using the chunk ids (e.g. 123, 124). Do NOT cite docume
- NEVER create your own citation format - use the exact chunk_id values from the documents in the [citation:chunk_id] format
- NEVER format citations as clickable links or as markdown links like "([citation:5](https://example.com))". Always use plain square brackets only
- NEVER make up chunk IDs if you are unsure about the chunk_id. It is better to omit the citation than to guess
+- Copy the EXACT chunk id from the XML - if it says ``, use [citation:doc-123]
CORRECT citation formats:
- [citation:5]
+- [citation:doc-123] (for Surfsense documentation chunks)
- [citation:chunk_id1], [citation:chunk_id2], [citation:chunk_id3]
INCORRECT citation formats (DO NOT use):
From abd4b1bd05dd3e25f479ec362075fe531abee530 Mon Sep 17 00:00:00 2001
From: CREDO23
Date: Mon, 12 Jan 2026 21:34:21 +0200
Subject: [PATCH 43/49] Remove unused WorkspaceAvatar.tsx
---
.../layout/ui/icon-rail/WorkspaceAvatar.tsx | 72 -------------------
1 file changed, 72 deletions(-)
delete mode 100644 surfsense_web/components/layout/ui/icon-rail/WorkspaceAvatar.tsx
diff --git a/surfsense_web/components/layout/ui/icon-rail/WorkspaceAvatar.tsx b/surfsense_web/components/layout/ui/icon-rail/WorkspaceAvatar.tsx
deleted file mode 100644
index 397076cb6..000000000
--- a/surfsense_web/components/layout/ui/icon-rail/WorkspaceAvatar.tsx
+++ /dev/null
@@ -1,72 +0,0 @@
-"use client";
-
-import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip";
-import { cn } from "@/lib/utils";
-
-interface SearchSpaceAvatarProps {
- name: string;
- isActive?: boolean;
- onClick?: () => void;
- size?: "sm" | "md";
-}
-
-/**
- * Generates a consistent color based on search space name
- */
-function stringToColor(str: string): string {
- let hash = 0;
- for (let i = 0; i < str.length; i++) {
- hash = str.charCodeAt(i) + ((hash << 5) - hash);
- }
- const colors = [
- "#6366f1", // indigo
- "#22c55e", // green
- "#f59e0b", // amber
- "#ef4444", // red
- "#8b5cf6", // violet
- "#06b6d4", // cyan
- "#ec4899", // pink
- "#14b8a6", // teal
- ];
- return colors[Math.abs(hash) % colors.length];
-}
-
-/**
- * Gets initials from search space name (max 2 chars)
- */
-function getInitials(name: string): string {
- const words = name.trim().split(/\s+/);
- if (words.length >= 2) {
- return (words[0][0] + words[1][0]).toUpperCase();
- }
- return name.slice(0, 2).toUpperCase();
-}
-
-export function SearchSpaceAvatar({ name, isActive, onClick, size = "md" }: SearchSpaceAvatarProps) {
- const bgColor = stringToColor(name);
- const initials = getInitials(name);
- const sizeClasses = size === "sm" ? "h-8 w-8 text-xs" : "h-10 w-10 text-sm";
-
- return (
-
-
-
-
-
- {name}
-
-
- );
-}
From 4c72b882c8ce0fadee4bfcc7ff9e363a699f687a Mon Sep 17 00:00:00 2001
From: CREDO23
Date: Mon, 12 Jan 2026 21:45:22 +0200
Subject: [PATCH 44/49] Remove outdated clickup-connect-form.tsx
---
.../components/clickup-connect-form.tsx | 385 ------------------
1 file changed, 385 deletions(-)
delete mode 100644 surfsense_web/components/assistant-ui/connector-popup/connect-forms/components/clickup-connect-form.tsx
diff --git a/surfsense_web/components/assistant-ui/connector-popup/connect-forms/components/clickup-connect-form.tsx b/surfsense_web/components/assistant-ui/connector-popup/connect-forms/components/clickup-connect-form.tsx
deleted file mode 100644
index 9f33c6ed9..000000000
--- a/surfsense_web/components/assistant-ui/connector-popup/connect-forms/components/clickup-connect-form.tsx
+++ /dev/null
@@ -1,385 +0,0 @@
-"use client";
-
-import { zodResolver } from "@hookform/resolvers/zod";
-import { Info } from "lucide-react";
-import type { FC } from "react";
-import { useRef, useState } from "react";
-import { useForm } from "react-hook-form";
-import * as z from "zod";
-import {
- Accordion,
- AccordionContent,
- AccordionItem,
- AccordionTrigger,
-} from "@/components/ui/accordion";
-import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert";
-import {
- Form,
- FormControl,
- FormDescription,
- FormField,
- FormItem,
- FormLabel,
- FormMessage,
-} from "@/components/ui/form";
-import { Input } from "@/components/ui/input";
-import { Label } from "@/components/ui/label";
-import {
- Select,
- SelectContent,
- SelectItem,
- SelectTrigger,
- SelectValue,
-} from "@/components/ui/select";
-import { Switch } from "@/components/ui/switch";
-import { EnumConnectorName } from "@/contracts/enums/connector";
-import { DateRangeSelector } from "../../components/date-range-selector";
-import { getConnectorBenefits } from "../connector-benefits";
-import type { ConnectFormProps } from "../index";
-
-const clickupConnectorFormSchema = z.object({
- name: z.string().min(3, {
- message: "Connector name must be at least 3 characters.",
- }),
- api_token: z.string().min(10, {
- message: "ClickUp API Token is required and must be valid.",
- }),
-});
-
-type ClickUpConnectorFormValues = z.infer;
-
-export const ClickUpConnectForm: FC = ({ onSubmit, isSubmitting }) => {
- const isSubmittingRef = useRef(false);
- const [startDate, setStartDate] = useState(undefined);
- const [endDate, setEndDate] = useState(undefined);
- const [periodicEnabled, setPeriodicEnabled] = useState(false);
- const [frequencyMinutes, setFrequencyMinutes] = useState("1440");
- const form = useForm({
- resolver: zodResolver(clickupConnectorFormSchema),
- defaultValues: {
- name: "ClickUp Connector",
- api_token: "",
- },
- });
-
- const handleSubmit = async (values: ClickUpConnectorFormValues) => {
- // Prevent multiple submissions
- if (isSubmittingRef.current || isSubmitting) {
- return;
- }
-
- isSubmittingRef.current = true;
- try {
- await onSubmit({
- name: values.name,
- connector_type: EnumConnectorName.CLICKUP_CONNECTOR,
- config: {
- CLICKUP_API_TOKEN: values.api_token,
- },
- is_indexable: true,
- last_indexed_at: null,
- periodic_indexing_enabled: periodicEnabled,
- indexing_frequency_minutes: periodicEnabled ? parseInt(frequencyMinutes, 10) : null,
- next_scheduled_at: null,
- startDate,
- endDate,
- periodicEnabled,
- frequencyMinutes,
- });
- } finally {
- isSubmittingRef.current = false;
- }
- };
-
- return (
-
-
-
-
-
API Token Required
-
- You'll need a ClickUp API Token to use this connector. You can create one from{" "}
-
- ClickUp Settings
-
-
-
-
-
-
-
- {/* What you get section */}
- {getConnectorBenefits(EnumConnectorName.CLICKUP_CONNECTOR) && (
-
-
What you get with ClickUp integration:
-
- {getConnectorBenefits(EnumConnectorName.CLICKUP_CONNECTOR)?.map((benefit) => (
- - {benefit}
- ))}
-
-
- )}
-
- {/* Documentation Section */}
-
-
-
- Documentation
-
-
-
-
How it works
-
- The ClickUp connector uses the ClickUp API to fetch all tasks and projects that your
- API token has access to within your workspace.
-
-
- -
- For follow up indexing runs, the connector retrieves tasks that have been updated
- since the last indexing attempt.
-
- -
- Indexing is configured to run periodically, so updates should appear in your
- search results within minutes.
-
-
-
-
-
-
-
Authorization
-
-
- API Token Required
-
- You need a ClickUp personal API token to use this connector. The token will be
- used to read your ClickUp data.
-
-
-
-
-
-
- Step 1: Get Your API Token
-
-
- - Log in to your ClickUp account
- - Click your avatar in the upper-right corner and select "Settings"
- - In the sidebar, click "Apps"
- -
- Under "API Token", click Generate or{" "}
- Regenerate
-
- - Copy the generated token (it typically starts with "pk_")
- -
- Paste it in the form above. You can also visit{" "}
-
- ClickUp API Settings
- {" "}
- directly.
-
-
-
-
-
-
- Step 2: Grant necessary access
-
-
- The API Token will have access to all tasks and projects that your user
- account can see. Make sure your account has appropriate permissions for the
- workspaces you want to index.
-
-
-
- Data Privacy
-
- Only tasks, comments, and basic metadata will be indexed. ClickUp
- attachments and linked files are not indexed by this connector.
-
-
-
-
-
-
-
-
-
-
Indexing
-
- -
- Navigate to the Connector Dashboard and select the ClickUp{" "}
- Connector.
-
- -
- Place your API Token in the form field.
-
- -
- Click Connect to establish the connection.
-
- - Once connected, your ClickUp tasks will be indexed automatically.
-
-
-
-
- What Gets Indexed
-
- The ClickUp connector indexes the following data:
-
- - Task names and descriptions
- - Task comments and discussion threads
- - Task status, priority, and assignee information
- - Project and workspace information
-
-
-
-
-
-
-
-
-
- );
-};
From 7ac7cd5f99bcd45aeef0872ee909c983e4eccb10 Mon Sep 17 00:00:00 2001
From: "DESKTOP-RTLN3BA\\$punk"
Date: Mon, 12 Jan 2026 12:47:08 -0800
Subject: [PATCH 45/49] fix: improve error handling for unauthorized responses
and response parsing in BaseApiService
---
surfsense_web/lib/apis/base-api.service.ts | 98 +++++++++++-----------
1 file changed, 51 insertions(+), 47 deletions(-)
diff --git a/surfsense_web/lib/apis/base-api.service.ts b/surfsense_web/lib/apis/base-api.service.ts
index ff71fe14c..d7c281ac6 100644
--- a/surfsense_web/lib/apis/base-api.service.ts
+++ b/surfsense_web/lib/apis/base-api.service.ts
@@ -129,20 +129,24 @@ class BaseApiService {
throw new AppError("Failed to parse response", response.status, response.statusText);
}
+ // Handle 401 first before other error handling - ensures token is cleared and user redirected
+ if (response.status === 401) {
+ handleUnauthorized();
+ throw new AuthenticationError(
+ typeof data === "object" && "detail" in data
+ ? data.detail
+ : "You are not authenticated. Please login again.",
+ response.status,
+ response.statusText
+ );
+ }
+
// For fastapi errors response
if (typeof data === "object" && "detail" in data) {
throw new AppError(data.detail, response.status, response.statusText);
}
switch (response.status) {
- case 401:
- // Use centralized auth handler for 401 responses
- handleUnauthorized();
- throw new AuthenticationError(
- "You are not authenticated. Please login again.",
- response.status,
- response.statusText
- );
case 403:
throw new AuthorizationError(
"You don't have permission to access this resource.",
@@ -157,52 +161,52 @@ class BaseApiService {
}
}
- // biome-ignore lint/suspicious: Unknown
- let data;
- const responseType = mergedOptions.responseType;
+ // biome-ignore lint/suspicious: Unknown
+ let data;
+ const responseType = mergedOptions.responseType;
- try {
- switch (responseType) {
- case ResponseType.JSON:
- data = await response.json();
- break;
- case ResponseType.TEXT:
- data = await response.text();
- break;
- case ResponseType.BLOB:
- data = await response.blob();
- break;
- case ResponseType.ARRAY_BUFFER:
- data = await response.arrayBuffer();
- break;
- // Add more cases as needed
- default:
- data = await response.json();
- }
- } catch (error) {
- console.error("Failed to parse response as JSON:", error);
- throw new AppError("Failed to parse response", response.status, response.statusText);
+ try {
+ switch (responseType) {
+ case ResponseType.JSON:
+ data = await response.json();
+ break;
+ case ResponseType.TEXT:
+ data = await response.text();
+ break;
+ case ResponseType.BLOB:
+ data = await response.blob();
+ break;
+ case ResponseType.ARRAY_BUFFER:
+ data = await response.arrayBuffer();
+ break;
+ // Add more cases as needed
+ default:
+ data = await response.json();
}
+ } catch (error) {
+ console.error("Failed to parse response as JSON:", error);
+ throw new AppError("Failed to parse response", response.status, response.statusText);
+ }
- // Validate response
- if (responseType === ResponseType.JSON) {
- if (!responseSchema) {
- return data;
- }
- const parsedData = responseSchema.safeParse(data);
-
- if (!parsedData.success) {
- /** The request was successful, but the response data does not match the expected schema.
- * This is a client side error, and should be fixed by updating the responseSchema to keep things typed.
- * This error should not be shown to the user , it is for dev only.
- */
- console.error(`Invalid API response schema - ${url} :`, JSON.stringify(parsedData.error));
- }
-
+ // Validate response
+ if (responseType === ResponseType.JSON) {
+ if (!responseSchema) {
return data;
}
+ const parsedData = responseSchema.safeParse(data);
+
+ if (!parsedData.success) {
+ /** The request was successful, but the response data does not match the expected schema.
+ * This is a client side error, and should be fixed by updating the responseSchema to keep things typed.
+ * This error should not be shown to the user , it is for dev only.
+ */
+ console.error(`Invalid API response schema - ${url} :`, JSON.stringify(parsedData.error));
+ }
return data;
+ }
+
+ return data;
} catch (error) {
console.error("Request failed:", JSON.stringify(error));
throw error;
From f62d4cb99ce58009399de2ddf0d477b5befa9f8f Mon Sep 17 00:00:00 2001
From: "DESKTOP-RTLN3BA\\$punk"
Date: Mon, 12 Jan 2026 13:59:55 -0800
Subject: [PATCH 46/49] chore: update .gitignore to include connector status
config and enhance user query atom to fetch data only when a bearer token is
present
---
surfsense_web/.gitignore | 2 ++
surfsense_web/atoms/user/user-query.atoms.ts | 3 +++
2 files changed, 5 insertions(+)
diff --git a/surfsense_web/.gitignore b/surfsense_web/.gitignore
index 6ae7fe0c4..12ad12c84 100644
--- a/surfsense_web/.gitignore
+++ b/surfsense_web/.gitignore
@@ -50,3 +50,5 @@ next-env.d.ts
.pnpm-store/
+# Connector status config (use .example.json as template)
+components/assistant-ui/connector-popup/config/connector-status-config.json
diff --git a/surfsense_web/atoms/user/user-query.atoms.ts b/surfsense_web/atoms/user/user-query.atoms.ts
index ea3e7ec49..cd9ec6c87 100644
--- a/surfsense_web/atoms/user/user-query.atoms.ts
+++ b/surfsense_web/atoms/user/user-query.atoms.ts
@@ -1,11 +1,14 @@
import { atomWithQuery } from "jotai-tanstack-query";
import { userApiService } from "@/lib/apis/user-api.service";
+import { getBearerToken } from "@/lib/auth-utils";
import { cacheKeys } from "@/lib/query-client/cache-keys";
export const currentUserAtom = atomWithQuery(() => {
return {
queryKey: cacheKeys.user.current(),
staleTime: 5 * 60 * 1000, // 5 minutes
+ // Only fetch user data when a bearer token is present
+ enabled: !!getBearerToken(),
queryFn: async () => {
return userApiService.getMe();
},
From 54b1107194d12a5cb019af26b6ddedb7a1b994d8 Mon Sep 17 00:00:00 2001
From: "DESKTOP-RTLN3BA\\$punk"
Date: Mon, 12 Jan 2026 14:09:45 -0800
Subject: [PATCH 47/49] chore: remove connector status config from .gitignore
---
surfsense_web/.gitignore | 2 --
1 file changed, 2 deletions(-)
diff --git a/surfsense_web/.gitignore b/surfsense_web/.gitignore
index 12ad12c84..6ae7fe0c4 100644
--- a/surfsense_web/.gitignore
+++ b/surfsense_web/.gitignore
@@ -50,5 +50,3 @@ next-env.d.ts
.pnpm-store/
-# Connector status config (use .example.json as template)
-components/assistant-ui/connector-popup/config/connector-status-config.json
From 11915df97b565f4ca84b3e577daec0f97ff65314 Mon Sep 17 00:00:00 2001
From: "DESKTOP-RTLN3BA\\$punk"
Date: Mon, 12 Jan 2026 14:14:59 -0800
Subject: [PATCH 48/49] feat: add connector statuses with warnings and
maintenance messages for various connectors
---
.../config/connector-status-config.json | 33 ++++++++++++++++++-
1 file changed, 32 insertions(+), 1 deletion(-)
diff --git a/surfsense_web/components/assistant-ui/connector-popup/config/connector-status-config.json b/surfsense_web/components/assistant-ui/connector-popup/config/connector-status-config.json
index 470ff22e9..6ed792b8e 100644
--- a/surfsense_web/components/assistant-ui/connector-popup/config/connector-status-config.json
+++ b/surfsense_web/components/assistant-ui/connector-popup/config/connector-status-config.json
@@ -1,5 +1,36 @@
{
- "connectorStatuses": {},
+ "connectorStatuses": {
+ "GOOGLE_DRIVE_CONNECTOR": {
+ "enabled": true,
+ "status": "warning",
+ "statusMessage": "Our Google OAuth app is not verified. You may see a 'non-verified app' warning during sign-in."
+ },
+ "GOOGLE_GMAIL_CONNECTOR": {
+ "enabled": true,
+ "status": "warning",
+ "statusMessage": "Our Google OAuth app is not verified. You may see a 'non-verified app' warning during sign-in."
+ },
+ "GOOGLE_CALENDAR_CONNECTOR": {
+ "enabled": true,
+ "status": "warning",
+ "statusMessage": "Our Google OAuth app is not verified. You may see a 'non-verified app' warning during sign-in."
+ },
+ "YOUTUBE_CONNECTOR": {
+ "enabled": true,
+ "status": "warning",
+ "statusMessage": "Doesn't work on cloud version due to YouTube blocks. Will be fixed soon."
+ },
+ "WEBCRAWLER_CONNECTOR": {
+ "enabled": true,
+ "status": "warning",
+ "statusMessage": "Some requests may be blocked if not using Firecrawl."
+ },
+ "GITHUB_CONNECTOR": {
+ "enabled": false,
+ "status": "maintenance",
+ "statusMessage": "Rework in progress."
+ }
+ },
"globalSettings": {
"showWarnings": true,
"allowManualOverride": false
From 73a57589acdad272325e11f202f3dfd4eb4d7ea1 Mon Sep 17 00:00:00 2001
From: "DESKTOP-RTLN3BA\\$punk"
Date: Mon, 12 Jan 2026 14:17:15 -0800
Subject: [PATCH 49/49] chore: linting
---
.../versions/60_add_surfsense_docs_tables.py | 24 +++--
.../new_chat/tools/search_surfsense_docs.py | 15 ++--
surfsense_backend/app/db.py | 4 +-
.../routes/search_source_connectors_routes.py | 5 +-
.../app/schemas/surfsense_docs.py | 1 -
.../app/tasks/surfsense_docs_indexer.py | 67 +++++++-------
.../scripts/seed_surfsense_docs.py | 4 +-
surfsense_web/app/dashboard/page.tsx | 10 +--
.../app/dashboard/user/settings/page.tsx | 17 +---
.../components/connector-status-badge.tsx | 2 +-
.../views/connector-accounts-list-view.tsx | 2 +-
.../assistant-ui/inline-citation.tsx | 6 +-
surfsense_web/components/layout/index.ts | 4 +-
.../layout/providers/LayoutDataProvider.tsx | 2 +-
.../ui/dialogs/CreateSearchSpaceDialog.tsx | 7 +-
.../components/layout/ui/dialogs/index.ts | 1 -
.../layout/ui/icon-rail/SearchSpaceAvatar.tsx | 7 +-
surfsense_web/components/layout/ui/index.ts | 2 +-
.../layout/ui/sheets/AllSearchSpacesSheet.tsx | 89 ++++++++++---------
.../components/layout/ui/sheets/index.ts | 1 -
.../layout/ui/shell/LayoutShell.tsx | 2 +-
.../layout/ui/sidebar/MobileSidebar.tsx | 2 +-
.../components/layout/ui/sidebar/Sidebar.tsx | 9 +-
.../layout/ui/sidebar/SidebarHeader.tsx | 4 +-
surfsense_web/lib/apis/base-api.service.ts | 78 ++++++++--------
25 files changed, 184 insertions(+), 181 deletions(-)
diff --git a/surfsense_backend/alembic/versions/60_add_surfsense_docs_tables.py b/surfsense_backend/alembic/versions/60_add_surfsense_docs_tables.py
index 7e5aa9437..ed03a4077 100644
--- a/surfsense_backend/alembic/versions/60_add_surfsense_docs_tables.py
+++ b/surfsense_backend/alembic/versions/60_add_surfsense_docs_tables.py
@@ -7,7 +7,6 @@ Revises: 59
from collections.abc import Sequence
from alembic import op
-
from app.config import config
# revision identifiers, used by Alembic.
@@ -22,7 +21,7 @@ EMBEDDING_DIM = config.embedding_model_instance.dimension
def upgrade() -> None:
"""Create surfsense_docs_documents and surfsense_docs_chunks tables."""
-
+
# Create surfsense_docs_documents table
op.execute(
f"""
@@ -46,7 +45,7 @@ def upgrade() -> None:
END$$;
"""
)
-
+
# Create indexes for surfsense_docs_documents
op.execute(
"""
@@ -75,7 +74,7 @@ def upgrade() -> None:
END$$;
"""
)
-
+
# Create surfsense_docs_chunks table
op.execute(
f"""
@@ -96,7 +95,7 @@ def upgrade() -> None:
END$$;
"""
)
-
+
# Create indexes for surfsense_docs_chunks
op.execute(
"""
@@ -111,7 +110,7 @@ def upgrade() -> None:
END$$;
"""
)
-
+
# Create vector indexes for similarity search
op.execute(
"""
@@ -119,14 +118,14 @@ def upgrade() -> None:
ON surfsense_docs_documents USING hnsw (embedding public.vector_cosine_ops);
"""
)
-
+
op.execute(
"""
CREATE INDEX IF NOT EXISTS surfsense_docs_chunks_vector_index
ON surfsense_docs_chunks USING hnsw (embedding public.vector_cosine_ops);
"""
)
-
+
# Create full-text search indexes (same pattern as documents/chunks tables)
op.execute(
"""
@@ -134,7 +133,7 @@ def upgrade() -> None:
ON surfsense_docs_documents USING gin (to_tsvector('english', content));
"""
)
-
+
op.execute(
"""
CREATE INDEX IF NOT EXISTS surfsense_docs_chunks_search_index
@@ -148,18 +147,17 @@ def downgrade() -> None:
# Drop full-text search indexes
op.execute("DROP INDEX IF EXISTS surfsense_docs_chunks_search_index")
op.execute("DROP INDEX IF EXISTS surfsense_docs_documents_search_index")
-
+
# Drop vector indexes
op.execute("DROP INDEX IF EXISTS surfsense_docs_chunks_vector_index")
op.execute("DROP INDEX IF EXISTS surfsense_docs_documents_vector_index")
-
+
# Drop regular indexes
op.execute("DROP INDEX IF EXISTS ix_surfsense_docs_chunks_document_id")
op.execute("DROP INDEX IF EXISTS ix_surfsense_docs_documents_updated_at")
op.execute("DROP INDEX IF EXISTS ix_surfsense_docs_documents_content_hash")
op.execute("DROP INDEX IF EXISTS ix_surfsense_docs_documents_source")
-
+
# Drop tables (chunks first due to FK)
op.execute("DROP TABLE IF EXISTS surfsense_docs_chunks")
op.execute("DROP TABLE IF EXISTS surfsense_docs_documents")
-
diff --git a/surfsense_backend/app/agents/new_chat/tools/search_surfsense_docs.py b/surfsense_backend/app/agents/new_chat/tools/search_surfsense_docs.py
index a34e16ff2..b9b370c23 100644
--- a/surfsense_backend/app/agents/new_chat/tools/search_surfsense_docs.py
+++ b/surfsense_backend/app/agents/new_chat/tools/search_surfsense_docs.py
@@ -48,10 +48,12 @@ def format_surfsense_docs_results(results: list[tuple]) -> str:
"metadata": {"source": doc.source},
"chunks": [],
}
- grouped[doc.id]["chunks"].append({
- "chunk_id": f"doc-{chunk.id}",
- "content": chunk.content,
- })
+ grouped[doc.id]["chunks"].append(
+ {
+ "chunk_id": f"doc-{chunk.id}",
+ "content": chunk.content,
+ }
+ )
# Render XML matching format_documents_for_context structure
parts: list[str] = []
@@ -70,7 +72,9 @@ def format_surfsense_docs_results(results: list[tuple]) -> str:
parts.append("")
for ch in g["chunks"]:
- parts.append(f" ")
+ parts.append(
+ f" "
+ )
parts.append("")
parts.append("")
@@ -157,4 +161,3 @@ def create_search_surfsense_docs_tool(db_session: AsyncSession):
)
return search_surfsense_docs
-
diff --git a/surfsense_backend/app/db.py b/surfsense_backend/app/db.py
index 006d73358..a0b174bf6 100644
--- a/surfsense_backend/app/db.py
+++ b/surfsense_backend/app/db.py
@@ -436,7 +436,9 @@ class SurfsenseDocsDocument(BaseModel, TimestampMixin):
__tablename__ = "surfsense_docs_documents"
- source = Column(String, nullable=False, unique=True, index=True) # File path: "connectors/slack.mdx"
+ source = Column(
+ String, nullable=False, unique=True, index=True
+ ) # File path: "connectors/slack.mdx"
title = Column(String, nullable=False)
content = Column(Text, nullable=False)
content_hash = Column(String, nullable=False, index=True) # For detecting changes
diff --git a/surfsense_backend/app/routes/search_source_connectors_routes.py b/surfsense_backend/app/routes/search_source_connectors_routes.py
index 06d75c7c9..8e8ebb72d 100644
--- a/surfsense_backend/app/routes/search_source_connectors_routes.py
+++ b/surfsense_backend/app/routes/search_source_connectors_routes.py
@@ -623,10 +623,7 @@ async def index_connector_content(
SearchSourceConnectorType.LUMA_CONNECTOR,
]:
# Default to today if no end_date provided (users can manually select future dates)
- if end_date is None:
- indexing_to = today_str
- else:
- indexing_to = end_date
+ indexing_to = today_str if end_date is None else end_date
else:
# For non-calendar connectors, cap at today
indexing_to = end_date if end_date else today_str
diff --git a/surfsense_backend/app/schemas/surfsense_docs.py b/surfsense_backend/app/schemas/surfsense_docs.py
index 7464df342..c6029320f 100644
--- a/surfsense_backend/app/schemas/surfsense_docs.py
+++ b/surfsense_backend/app/schemas/surfsense_docs.py
@@ -24,4 +24,3 @@ class SurfsenseDocsDocumentWithChunksRead(BaseModel):
chunks: list[SurfsenseDocsChunkRead]
model_config = ConfigDict(from_attributes=True)
-
diff --git a/surfsense_backend/app/tasks/surfsense_docs_indexer.py b/surfsense_backend/app/tasks/surfsense_docs_indexer.py
index f2c1e69ba..ef287bc65 100644
--- a/surfsense_backend/app/tasks/surfsense_docs_indexer.py
+++ b/surfsense_backend/app/tasks/surfsense_docs_indexer.py
@@ -19,7 +19,12 @@ from app.db import SurfsenseDocsChunk, SurfsenseDocsDocument, async_session_make
logger = logging.getLogger(__name__)
# Path to docs relative to project root
-DOCS_DIR = Path(__file__).resolve().parent.parent.parent.parent / "surfsense_web" / "content" / "docs"
+DOCS_DIR = (
+ Path(__file__).resolve().parent.parent.parent.parent
+ / "surfsense_web"
+ / "content"
+ / "docs"
+)
def parse_mdx_frontmatter(content: str) -> tuple[str, str]:
@@ -38,7 +43,7 @@ def parse_mdx_frontmatter(content: str) -> tuple[str, str]:
if match:
frontmatter = match.group(1)
- content_without_frontmatter = content[match.end():]
+ content_without_frontmatter = content[match.end() :]
# Extract title from frontmatter
title_match = re.search(r"^title:\s*(.+)$", frontmatter, re.MULTILINE)
@@ -93,10 +98,10 @@ def create_surfsense_docs_chunks(content: str) -> list[SurfsenseDocsChunk]:
async def index_surfsense_docs(session: AsyncSession) -> tuple[int, int, int, int]:
"""
Index all Surfsense documentation files.
-
+
Args:
session: SQLAlchemy async session
-
+
Returns:
Tuple of (created, updated, skipped, deleted) counts
"""
@@ -104,45 +109,47 @@ async def index_surfsense_docs(session: AsyncSession) -> tuple[int, int, int, in
updated = 0
skipped = 0
deleted = 0
-
+
# Get all existing docs from database
existing_docs_result = await session.execute(
- select(SurfsenseDocsDocument).options(selectinload(SurfsenseDocsDocument.chunks))
+ select(SurfsenseDocsDocument).options(
+ selectinload(SurfsenseDocsDocument.chunks)
+ )
)
existing_docs = {doc.source: doc for doc in existing_docs_result.scalars().all()}
-
+
# Track which sources we've processed
processed_sources = set()
-
+
# Get all MDX files
mdx_files = get_all_mdx_files()
logger.info(f"Found {len(mdx_files)} MDX files to index")
-
+
for mdx_file in mdx_files:
try:
source = str(mdx_file.relative_to(DOCS_DIR))
processed_sources.add(source)
-
+
# Read file content
raw_content = mdx_file.read_text(encoding="utf-8")
title, content = parse_mdx_frontmatter(raw_content)
content_hash = generate_surfsense_docs_content_hash(raw_content)
-
+
if source in existing_docs:
existing_doc = existing_docs[source]
-
+
# Check if content changed
if existing_doc.content_hash == content_hash:
logger.debug(f"Skipping unchanged: {source}")
skipped += 1
continue
-
+
# Content changed - update document
logger.info(f"Updating changed document: {source}")
-
+
# Create new chunks
chunks = create_surfsense_docs_chunks(content)
-
+
# Update document fields
existing_doc.title = title
existing_doc.content = content
@@ -150,14 +157,14 @@ async def index_surfsense_docs(session: AsyncSession) -> tuple[int, int, int, in
existing_doc.embedding = config.embedding_model_instance.embed(content)
existing_doc.chunks = chunks
existing_doc.updated_at = datetime.now(UTC)
-
+
updated += 1
else:
# New document - create it
logger.info(f"Creating new document: {source}")
-
+
chunks = create_surfsense_docs_chunks(content)
-
+
document = SurfsenseDocsDocument(
source=source,
title=title,
@@ -167,56 +174,56 @@ async def index_surfsense_docs(session: AsyncSession) -> tuple[int, int, int, in
chunks=chunks,
updated_at=datetime.now(UTC),
)
-
+
session.add(document)
created += 1
-
+
except Exception as e:
logger.error(f"Error processing {mdx_file}: {e}", exc_info=True)
continue
-
+
# Delete documents for removed files
for source, doc in existing_docs.items():
if source not in processed_sources:
logger.info(f"Deleting removed document: {source}")
await session.delete(doc)
deleted += 1
-
+
# Commit all changes
await session.commit()
-
+
logger.info(
f"Indexing complete: {created} created, {updated} updated, "
f"{skipped} skipped, {deleted} deleted"
)
-
+
return created, updated, skipped, deleted
async def seed_surfsense_docs() -> tuple[int, int, int, int]:
"""
Seed Surfsense documentation into the database.
-
+
This function indexes all MDX files from the docs directory.
It handles creating, updating, and deleting docs based on content changes.
-
+
Returns:
Tuple of (created, updated, skipped, deleted) counts
Returns (0, 0, 0, 0) if an error occurs
"""
logger.info("Starting Surfsense docs indexing...")
-
+
try:
async with async_session_maker() as session:
created, updated, skipped, deleted = await index_surfsense_docs(session)
-
+
logger.info(
f"Surfsense docs indexing complete: "
f"created={created}, updated={updated}, skipped={skipped}, deleted={deleted}"
)
-
+
return created, updated, skipped, deleted
-
+
except Exception as e:
logger.error(f"Failed to seed Surfsense docs: {e}", exc_info=True)
return 0, 0, 0, 0
diff --git a/surfsense_backend/scripts/seed_surfsense_docs.py b/surfsense_backend/scripts/seed_surfsense_docs.py
index d9536bf91..68899c2aa 100644
--- a/surfsense_backend/scripts/seed_surfsense_docs.py
+++ b/surfsense_backend/scripts/seed_surfsense_docs.py
@@ -24,9 +24,9 @@ def main():
print("=" * 50)
print(" Surfsense Documentation Seeding")
print("=" * 50)
-
+
created, updated, skipped, deleted = asyncio.run(seed_surfsense_docs())
-
+
print()
print("Results:")
print(f" Created: {created}")
diff --git a/surfsense_web/app/dashboard/page.tsx b/surfsense_web/app/dashboard/page.tsx
index 3e6d71829..767ce5201 100644
--- a/surfsense_web/app/dashboard/page.tsx
+++ b/surfsense_web/app/dashboard/page.tsx
@@ -105,9 +105,7 @@ function EmptyState({ onCreateClick }: { onCreateClick: () => void }) {
{t("welcome_title")}
-
- {t("welcome_description")}
-
+
{t("welcome_description")}
-
- {copied ? t("copied") : t("copy")}
-
+ {copied ? t("copied") : t("copy")}
diff --git a/surfsense_web/components/assistant-ui/connector-popup/components/connector-status-badge.tsx b/surfsense_web/components/assistant-ui/connector-popup/components/connector-status-badge.tsx
index ecc3a11cd..7412a4148 100644
--- a/surfsense_web/components/assistant-ui/connector-popup/components/connector-status-badge.tsx
+++ b/surfsense_web/components/assistant-ui/connector-popup/components/connector-status-badge.tsx
@@ -3,8 +3,8 @@
import { AlertTriangle, Ban, Wrench } from "lucide-react";
import type { FC } from "react";
import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip";
-import type { ConnectorStatus } from "../config/connector-status-config";
import { cn } from "@/lib/utils";
+import type { ConnectorStatus } from "../config/connector-status-config";
interface ConnectorStatusBadgeProps {
status: ConnectorStatus;
diff --git a/surfsense_web/components/assistant-ui/connector-popup/views/connector-accounts-list-view.tsx b/surfsense_web/components/assistant-ui/connector-popup/views/connector-accounts-list-view.tsx
index 74dd51929..bec4bfcb8 100644
--- a/surfsense_web/components/assistant-ui/connector-popup/views/connector-accounts-list-view.tsx
+++ b/surfsense_web/components/assistant-ui/connector-popup/views/connector-accounts-list-view.tsx
@@ -8,8 +8,8 @@ import { getConnectorIcon } from "@/contracts/enums/connectorIcons";
import type { SearchSourceConnector } from "@/contracts/types/connector.types";
import type { LogActiveTask, LogSummary } from "@/contracts/types/log.types";
import { cn } from "@/lib/utils";
-import { getConnectorDisplayName } from "../tabs/all-connectors-tab";
import { useConnectorStatus } from "../hooks/use-connector-status";
+import { getConnectorDisplayName } from "../tabs/all-connectors-tab";
interface ConnectorAccountsListViewProps {
connectorType: string;
diff --git a/surfsense_web/components/assistant-ui/inline-citation.tsx b/surfsense_web/components/assistant-ui/inline-citation.tsx
index 9eab9a3c3..6b5a4b091 100644
--- a/surfsense_web/components/assistant-ui/inline-citation.tsx
+++ b/surfsense_web/components/assistant-ui/inline-citation.tsx
@@ -15,7 +15,11 @@ interface InlineCitationProps {
* Renders a clickable numbered badge that opens the SourceDetailPanel with document chunk details.
* Supports both regular knowledge base chunks and Surfsense documentation chunks.
*/
-export const InlineCitation: FC = ({ chunkId, citationNumber, isDocsChunk = false }) => {
+export const InlineCitation: FC = ({
+ chunkId,
+ citationNumber,
+ isDocsChunk = false,
+}) => {
const [isOpen, setIsOpen] = useState(false);
return (
diff --git a/surfsense_web/components/layout/index.ts b/surfsense_web/components/layout/index.ts
index b9c271915..18f8cc9d3 100644
--- a/surfsense_web/components/layout/index.ts
+++ b/surfsense_web/components/layout/index.ts
@@ -6,9 +6,9 @@ export type {
NavItem,
NoteItem,
PageUsage,
+ SearchSpace,
SidebarSectionProps,
User,
- SearchSpace,
} from "./types/layout.types";
export {
AllSearchSpacesSheet,
@@ -23,10 +23,10 @@ export {
NavSection,
NoteListItem,
PageUsageDisplay,
+ SearchSpaceAvatar,
Sidebar,
SidebarCollapseButton,
SidebarHeader,
SidebarSection,
SidebarUserProfile,
- SearchSpaceAvatar,
} from "./ui";
diff --git a/surfsense_web/components/layout/providers/LayoutDataProvider.tsx b/surfsense_web/components/layout/providers/LayoutDataProvider.tsx
index 8f42e22aa..70bc96f58 100644
--- a/surfsense_web/components/layout/providers/LayoutDataProvider.tsx
+++ b/surfsense_web/components/layout/providers/LayoutDataProvider.tsx
@@ -28,8 +28,8 @@ import { resetUser, trackLogout } from "@/lib/posthog/events";
import { cacheKeys } from "@/lib/query-client/cache-keys";
import type { ChatItem, NavItem, NoteItem, SearchSpace } from "../types/layout.types";
import { CreateSearchSpaceDialog } from "../ui/dialogs";
-import { LayoutShell } from "../ui/shell";
import { AllSearchSpacesSheet } from "../ui/sheets";
+import { LayoutShell } from "../ui/shell";
import { AllChatsSidebar } from "../ui/sidebar/AllChatsSidebar";
import { AllNotesSidebar } from "../ui/sidebar/AllNotesSidebar";
diff --git a/surfsense_web/components/layout/ui/dialogs/CreateSearchSpaceDialog.tsx b/surfsense_web/components/layout/ui/dialogs/CreateSearchSpaceDialog.tsx
index 978d46f6c..7e962536f 100644
--- a/surfsense_web/components/layout/ui/dialogs/CreateSearchSpaceDialog.tsx
+++ b/surfsense_web/components/layout/ui/dialogs/CreateSearchSpaceDialog.tsx
@@ -104,11 +104,7 @@ export function CreateSearchSpaceDialog({ open, onOpenChange }: CreateSearchSpac
{t("name_label")}
-
+
@@ -163,4 +159,3 @@ export function CreateSearchSpaceDialog({ open, onOpenChange }: CreateSearchSpac
);
}
-
diff --git a/surfsense_web/components/layout/ui/dialogs/index.ts b/surfsense_web/components/layout/ui/dialogs/index.ts
index 28f3b387d..807a227de 100644
--- a/surfsense_web/components/layout/ui/dialogs/index.ts
+++ b/surfsense_web/components/layout/ui/dialogs/index.ts
@@ -1,2 +1 @@
export { CreateSearchSpaceDialog } from "./CreateSearchSpaceDialog";
-
diff --git a/surfsense_web/components/layout/ui/icon-rail/SearchSpaceAvatar.tsx b/surfsense_web/components/layout/ui/icon-rail/SearchSpaceAvatar.tsx
index 397076cb6..77f4de899 100644
--- a/surfsense_web/components/layout/ui/icon-rail/SearchSpaceAvatar.tsx
+++ b/surfsense_web/components/layout/ui/icon-rail/SearchSpaceAvatar.tsx
@@ -42,7 +42,12 @@ function getInitials(name: string): string {
return name.slice(0, 2).toUpperCase();
}
-export function SearchSpaceAvatar({ name, isActive, onClick, size = "md" }: SearchSpaceAvatarProps) {
+export function SearchSpaceAvatar({
+ name,
+ isActive,
+ onClick,
+ size = "md",
+}: SearchSpaceAvatarProps) {
const bgColor = stringToColor(name);
const initials = getInitials(name);
const sizeClasses = size === "sm" ? "h-8 w-8 text-xs" : "h-10 w-10 text-sm";
diff --git a/surfsense_web/components/layout/ui/index.ts b/surfsense_web/components/layout/ui/index.ts
index c5aba9250..bd3d54838 100644
--- a/surfsense_web/components/layout/ui/index.ts
+++ b/surfsense_web/components/layout/ui/index.ts
@@ -1,8 +1,8 @@
export { CreateSearchSpaceDialog } from "./dialogs";
export { Header } from "./header";
export { IconRail, NavIcon, SearchSpaceAvatar } from "./icon-rail";
-export { LayoutShell } from "./shell";
export { AllSearchSpacesSheet } from "./sheets";
+export { LayoutShell } from "./shell";
export {
ChatListItem,
MobileSidebar,
diff --git a/surfsense_web/components/layout/ui/sheets/AllSearchSpacesSheet.tsx b/surfsense_web/components/layout/ui/sheets/AllSearchSpacesSheet.tsx
index d144c79b3..401de41c3 100644
--- a/surfsense_web/components/layout/ui/sheets/AllSearchSpacesSheet.tsx
+++ b/surfsense_web/components/layout/ui/sheets/AllSearchSpacesSheet.tsx
@@ -1,6 +1,15 @@
"use client";
-import { Calendar, MoreHorizontal, Search, Settings, Share2, Trash2, UserCheck, Users } from "lucide-react";
+import {
+ Calendar,
+ MoreHorizontal,
+ Search,
+ Settings,
+ Share2,
+ Trash2,
+ UserCheck,
+ Users,
+} from "lucide-react";
import { useTranslations } from "next-intl";
import { useState } from "react";
import {
@@ -112,9 +121,7 @@ export function AllSearchSpacesSheet({