From 1b2ac1bd17d41dcf88d03a10c1cb42c1575cf644 Mon Sep 17 00:00:00 2001
From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com>
Date: Thu, 5 Mar 2026 13:42:08 +0530
Subject: [PATCH 01/34] chore: enhance Docker build workflow by adding
conditional checks for job execution and integrating Docker metadata action
for improved tagging and manifest creation
---
.github/workflows/docker_build.yaml | 90 +++++++++++++++++++++--------
1 file changed, 65 insertions(+), 25 deletions(-)
diff --git a/.github/workflows/docker_build.yaml b/.github/workflows/docker_build.yaml
index 15b89198e..d338e9fe9 100644
--- a/.github/workflows/docker_build.yaml
+++ b/.github/workflows/docker_build.yaml
@@ -121,6 +121,12 @@ jobs:
id: image
run: echo "name=${REGISTRY_IMAGE,,}" >> $GITHUB_OUTPUT
+ - name: Docker meta
+ id: meta
+ uses: docker/metadata-action@v5
+ with:
+ images: ${{ steps.image.outputs.name }}
+
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
@@ -139,14 +145,15 @@ jobs:
sudo rm -rf "$AGENT_TOOLSDIRECTORY" || true
docker system prune -af
- - name: Build and push ${{ matrix.name }} (${{ matrix.suffix }})
+ - name: Build and push by digest ${{ matrix.name }} (${{ matrix.suffix }})
id: build
uses: docker/build-push-action@v6
with:
context: ${{ matrix.context }}
file: ${{ matrix.file }}
- push: true
- tags: ${{ steps.image.outputs.name }}:${{ needs.tag_release.outputs.new_tag }}-${{ matrix.suffix }}
+ labels: ${{ steps.meta.outputs.labels }}
+ tags: ${{ steps.image.outputs.name }}
+ outputs: type=image,push-by-digest=true,name-canonical=true,push=true
platforms: ${{ matrix.platform }}
cache-from: type=gha,scope=${{ matrix.image }}-${{ matrix.suffix }}
cache-to: type=gha,mode=max,scope=${{ matrix.image }}-${{ matrix.suffix }}
@@ -159,6 +166,20 @@ jobs:
${{ matrix.image == 'web' && 'NEXT_PUBLIC_ELECTRIC_AUTH_MODE=__NEXT_PUBLIC_ELECTRIC_AUTH_MODE__' || '' }}
${{ matrix.image == 'web' && 'NEXT_PUBLIC_DEPLOYMENT_MODE=__NEXT_PUBLIC_DEPLOYMENT_MODE__' || '' }}
+ - name: Export digest
+ run: |
+ mkdir -p /tmp/digests
+ digest="${{ steps.build.outputs.digest }}"
+ touch "/tmp/digests/${digest#sha256:}"
+
+ - name: Upload digest
+ uses: actions/upload-artifact@v4
+ with:
+ name: digests-${{ matrix.image }}-${{ matrix.suffix }}
+ path: /tmp/digests/*
+ if-no-files-found: error
+ retention-days: 1
+
create_manifest:
runs-on: ubuntu-latest
needs: [tag_release, build]
@@ -170,7 +191,9 @@ jobs:
matrix:
include:
- name: surfsense-backend
+ image: backend
- name: surfsense-web
+ image: web
env:
REGISTRY_IMAGE: ghcr.io/${{ github.repository_owner }}/${{ matrix.name }}
@@ -179,6 +202,21 @@ jobs:
id: image
run: echo "name=${REGISTRY_IMAGE,,}" >> $GITHUB_OUTPUT
+ - name: Download amd64 digest
+ uses: actions/download-artifact@v4
+ with:
+ name: digests-${{ matrix.image }}-amd64
+ path: /tmp/digests
+
+ - name: Download arm64 digest
+ uses: actions/download-artifact@v4
+ with:
+ name: digests-${{ matrix.image }}-arm64
+ path: /tmp/digests
+
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v3
+
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
@@ -186,35 +224,37 @@ jobs:
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- - name: Create and push multi-arch manifest
+ - name: Compute app version
+ id: appver
run: |
VERSION_TAG="${{ needs.tag_release.outputs.new_tag }}"
- IMAGE="${{ steps.image.outputs.name }}"
APP_VERSION=$(echo "$VERSION_TAG" | rev | cut -d. -f2- | rev)
+ echo "app_version=$APP_VERSION" >> $GITHUB_OUTPUT
- docker manifest create ${IMAGE}:${VERSION_TAG} \
- ${IMAGE}:${VERSION_TAG}-amd64 \
- ${IMAGE}:${VERSION_TAG}-arm64
+ - name: Docker meta
+ id: meta
+ uses: docker/metadata-action@v5
+ with:
+ images: ${{ steps.image.outputs.name }}
+ tags: |
+ type=raw,value=${{ needs.tag_release.outputs.new_tag }}
+ type=raw,value=${{ steps.appver.outputs.app_version }},enable=${{ github.ref == format('refs/heads/{0}', github.event.repository.default_branch) || github.event.inputs.branch == github.event.repository.default_branch }}
+ type=ref,event=branch
+ flavor: |
+ latest=${{ github.ref == format('refs/heads/{0}', github.event.repository.default_branch) || github.event.inputs.branch == github.event.repository.default_branch }}
- docker manifest push ${IMAGE}:${VERSION_TAG}
+ - name: Create manifest list and push
+ working-directory: /tmp/digests
+ run: |
+ docker buildx imagetools create \
+ $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
+ $(printf '${{ steps.image.outputs.name }}@sha256:%s ' *)
- if [[ "${{ github.ref }}" == "refs/heads/${{ github.event.repository.default_branch }}" ]] || [[ "${{ github.event.inputs.branch }}" == "${{ github.event.repository.default_branch }}" ]]; then
- docker manifest create ${IMAGE}:${APP_VERSION} \
- ${IMAGE}:${VERSION_TAG}-amd64 \
- ${IMAGE}:${VERSION_TAG}-arm64
-
- docker manifest push ${IMAGE}:${APP_VERSION}
-
- docker manifest create ${IMAGE}:latest \
- ${IMAGE}:${VERSION_TAG}-amd64 \
- ${IMAGE}:${VERSION_TAG}-arm64
-
- docker manifest push ${IMAGE}:latest
- fi
+ - name: Inspect image
+ run: |
+ docker buildx imagetools inspect ${{ steps.image.outputs.name }}:${{ steps.meta.outputs.version }}
- name: Summary
run: |
echo "Multi-arch manifest created for ${{ matrix.name }}!"
- echo "Versioned: ${{ steps.image.outputs.name }}:${{ needs.tag_release.outputs.new_tag }}"
- echo "App version: ${{ steps.image.outputs.name }}:$(echo '${{ needs.tag_release.outputs.new_tag }}' | rev | cut -d. -f2- | rev)"
- echo "Latest: ${{ steps.image.outputs.name }}:latest"
+ echo "Tags: $(jq -cr '.tags | join(", ")' <<< "$DOCKER_METADATA_OUTPUT_JSON")"
From 110502609bdd7d4c72f5c365cb12b5faddef0d87 Mon Sep 17 00:00:00 2001
From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com>
Date: Thu, 5 Mar 2026 13:55:05 +0530
Subject: [PATCH 02/34] chore: add conditional execution logic to Docker build
workflow jobs and improve tagging logic for better handling of version tags
---
.github/workflows/docker_build.yaml | 15 +++++++++++----
1 file changed, 11 insertions(+), 4 deletions(-)
diff --git a/.github/workflows/docker_build.yaml b/.github/workflows/docker_build.yaml
index d338e9fe9..210f0f4bf 100644
--- a/.github/workflows/docker_build.yaml
+++ b/.github/workflows/docker_build.yaml
@@ -26,6 +26,7 @@ permissions:
jobs:
tag_release:
runs-on: ubuntu-latest
+ if: github.ref == format('refs/heads/{0}', github.event.repository.default_branch) || github.event_name == 'workflow_dispatch'
outputs:
new_tag: ${{ steps.tag_version.outputs.next_version }}
steps:
@@ -86,6 +87,7 @@ jobs:
build:
needs: tag_release
+ if: always() && (needs.tag_release.result == 'success' || needs.tag_release.result == 'skipped')
runs-on: ${{ matrix.os }}
permissions:
packages: write
@@ -183,6 +185,7 @@ jobs:
create_manifest:
runs-on: ubuntu-latest
needs: [tag_release, build]
+ if: always() && needs.build.result == 'success'
permissions:
packages: write
contents: read
@@ -228,7 +231,11 @@ jobs:
id: appver
run: |
VERSION_TAG="${{ needs.tag_release.outputs.new_tag }}"
- APP_VERSION=$(echo "$VERSION_TAG" | rev | cut -d. -f2- | rev)
+ if [ -n "$VERSION_TAG" ]; then
+ APP_VERSION=$(echo "$VERSION_TAG" | rev | cut -d. -f2- | rev)
+ else
+ APP_VERSION=""
+ fi
echo "app_version=$APP_VERSION" >> $GITHUB_OUTPUT
- name: Docker meta
@@ -237,9 +244,10 @@ jobs:
with:
images: ${{ steps.image.outputs.name }}
tags: |
- type=raw,value=${{ needs.tag_release.outputs.new_tag }}
- type=raw,value=${{ steps.appver.outputs.app_version }},enable=${{ github.ref == format('refs/heads/{0}', github.event.repository.default_branch) || github.event.inputs.branch == github.event.repository.default_branch }}
+ type=raw,value=${{ needs.tag_release.outputs.new_tag }},enable=${{ needs.tag_release.outputs.new_tag != '' }}
+ type=raw,value=${{ steps.appver.outputs.app_version }},enable=${{ needs.tag_release.outputs.new_tag != '' && (github.ref == format('refs/heads/{0}', github.event.repository.default_branch) || github.event.inputs.branch == github.event.repository.default_branch) }}
type=ref,event=branch
+ type=sha,prefix=git-
flavor: |
latest=${{ github.ref == format('refs/heads/{0}', github.event.repository.default_branch) || github.event.inputs.branch == github.event.repository.default_branch }}
@@ -249,7 +257,6 @@ jobs:
docker buildx imagetools create \
$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ steps.image.outputs.name }}@sha256:%s ' *)
-
- name: Inspect image
run: |
docker buildx imagetools inspect ${{ steps.image.outputs.name }}:${{ steps.meta.outputs.version }}
From f567cd9c73299fc84e00bd995b4870f7a2892ad5 Mon Sep 17 00:00:00 2001
From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com>
Date: Thu, 5 Mar 2026 18:24:26 +0530
Subject: [PATCH 03/34] chore: update summary step in Docker build workflow to
improve output formatting and clarity
---
.github/workflows/docker_build.yaml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/docker_build.yaml b/.github/workflows/docker_build.yaml
index 210f0f4bf..a53a4b414 100644
--- a/.github/workflows/docker_build.yaml
+++ b/.github/workflows/docker_build.yaml
@@ -262,6 +262,6 @@ jobs:
docker buildx imagetools inspect ${{ steps.image.outputs.name }}:${{ steps.meta.outputs.version }}
- name: Summary
- run: |
+ run: |
echo "Multi-arch manifest created for ${{ matrix.name }}!"
echo "Tags: $(jq -cr '.tags | join(", ")' <<< "$DOCKER_METADATA_OUTPUT_JSON")"
From e909735ba784c1db3ba9e3db00ffa39ee7930ae7 Mon Sep 17 00:00:00 2001
From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com>
Date: Thu, 5 Mar 2026 18:39:29 +0530
Subject: [PATCH 04/34] chore: remove .dockerignore file to streamline Docker
build process and eliminate unnecessary exclusions
---
.dockerignore | 97 ---------------------------------------------------
1 file changed, 97 deletions(-)
delete mode 100644 .dockerignore
diff --git a/.dockerignore b/.dockerignore
deleted file mode 100644
index 70d7fb07e..000000000
--- a/.dockerignore
+++ /dev/null
@@ -1,97 +0,0 @@
-# Git
-.git
-.gitignore
-.gitattributes
-
-# Documentation
-*.md
-!README.md
-docs/
-CONTRIBUTING.md
-CODE_OF_CONDUCT.md
-LICENSE
-
-# IDE
-.vscode/
-.idea/
-*.swp
-*.swo
-.cursor/
-
-# Node
-**/node_modules/
-**/.next/
-**/dist/
-**/.turbo/
-**/.cache/
-**/coverage/
-
-# Python
-**/__pycache__/
-**/*.pyc
-**/*.pyo
-**/*.pyd
-**/.Python
-**/build/
-**/develop-eggs/
-**/downloads/
-**/eggs/
-**/.eggs/
-# Python venv lib folders (but not frontend lib folders)
-surfsense_backend/lib/
-surfsense_backend/lib64/
-**/parts/
-**/sdist/
-**/var/
-**/wheels/
-**/*.egg-info/
-**/.installed.cfg
-**/*.egg
-**/pip-log.txt
-**/.tox/
-**/.coverage
-**/htmlcov/
-**/.pytest_cache/
-**/nosetests.xml
-**/coverage.xml
-
-# Environment
-**/.env
-**/.env.*
-!**/.env.example
-**/*.local
-
-# Docker
-**/Dockerfile
-**/docker-compose*.yml
-**/.docker/
-
-# Testing
-**/tests/
-**/test/
-**/__tests__/
-**/*.test.*
-**/*.spec.*
-
-# Logs
-**/*.log
-
-# Temporary files
-**/tmp/
-**/temp/
-**/.tmp/
-**/.temp/
-
-# Build artifacts from backend
-surfsense_backend/podcasts/
-surfsense_backend/temp_audio/
-surfsense_backend/*.bak
-surfsense_backend/*.dat
-surfsense_backend/*.dir
-
-# GitHub
-.github/
-
-# Browser extension (not needed for main deployment)
-surfsense_browser_extension/
-
From ab0f75f8ab8f528a279cac2a29135c26e1adf1e9 Mon Sep 17 00:00:00 2001
From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com>
Date: Mon, 9 Mar 2026 23:08:27 +0530
Subject: [PATCH 05/34] chore: update port configurations in Docker setup to
avoid conflicts and improve local development environment
---
docker/.env.example | 6 +++---
docker/docker-compose.yml | 12 ++++++------
docker/scripts/install.ps1 | 6 +++---
docker/scripts/install.sh | 6 +++---
4 files changed, 15 insertions(+), 15 deletions(-)
diff --git a/docker/.env.example b/docker/.env.example
index e67887840..8eded92b6 100644
--- a/docker/.env.example
+++ b/docker/.env.example
@@ -33,9 +33,9 @@ EMBEDDING_MODEL=sentence-transformers/all-MiniLM-L6-v2
# Ports (change to avoid conflicts with other services on your machine)
# ------------------------------------------------------------------------------
-# BACKEND_PORT=8000
-# FRONTEND_PORT=3000
-# ELECTRIC_PORT=5133
+# BACKEND_PORT=8929
+# FRONTEND_PORT=3929
+# ELECTRIC_PORT=5929
# FLOWER_PORT=5555
# ==============================================================================
diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml
index 9fca4dfb5..ca20e3ed4 100644
--- a/docker/docker-compose.yml
+++ b/docker/docker-compose.yml
@@ -45,7 +45,7 @@ services:
backend:
image: ghcr.io/modsetter/surfsense-backend:${SURFSENSE_VERSION:-latest}
ports:
- - "${BACKEND_PORT:-8000}:8000"
+ - "${BACKEND_PORT:-8929}:8000"
volumes:
- shared_temp:/shared_tmp
env_file:
@@ -61,7 +61,7 @@ services:
UNSTRUCTURED_HAS_PATCHED_LOOP: "1"
ELECTRIC_DB_USER: ${ELECTRIC_DB_USER:-electric}
ELECTRIC_DB_PASSWORD: ${ELECTRIC_DB_PASSWORD:-electric_password}
- NEXT_FRONTEND_URL: ${NEXT_FRONTEND_URL:-http://localhost:${FRONTEND_PORT:-3000}}
+ NEXT_FRONTEND_URL: ${NEXT_FRONTEND_URL:-http://localhost:${FRONTEND_PORT:-3929}}
# Daytona Sandbox – uncomment and set credentials to enable cloud code execution
# DAYTONA_SANDBOX_ENABLED: "TRUE"
# DAYTONA_API_KEY: ${DAYTONA_API_KEY:-}
@@ -151,7 +151,7 @@ services:
electric:
image: electricsql/electric:1.4.10
ports:
- - "${ELECTRIC_PORT:-5133}:3000"
+ - "${ELECTRIC_PORT:-5929}:3000"
environment:
DATABASE_URL: ${ELECTRIC_DATABASE_URL:-postgresql://${ELECTRIC_DB_USER:-electric}:${ELECTRIC_DB_PASSWORD:-electric_password}@${DB_HOST:-db}:${DB_PORT:-5432}/${DB_NAME:-surfsense}?sslmode=${DB_SSLMODE:-disable}}
ELECTRIC_INSECURE: "true"
@@ -169,10 +169,10 @@ services:
frontend:
image: ghcr.io/modsetter/surfsense-web:${SURFSENSE_VERSION:-latest}
ports:
- - "${FRONTEND_PORT:-3000}:3000"
+ - "${FRONTEND_PORT:-3929}:3000"
environment:
- NEXT_PUBLIC_FASTAPI_BACKEND_URL: ${NEXT_PUBLIC_FASTAPI_BACKEND_URL:-http://localhost:${BACKEND_PORT:-8000}}
- NEXT_PUBLIC_ELECTRIC_URL: ${NEXT_PUBLIC_ELECTRIC_URL:-http://localhost:${ELECTRIC_PORT:-5133}}
+ NEXT_PUBLIC_FASTAPI_BACKEND_URL: ${NEXT_PUBLIC_FASTAPI_BACKEND_URL:-http://localhost:${BACKEND_PORT:-8929}}
+ NEXT_PUBLIC_ELECTRIC_URL: ${NEXT_PUBLIC_ELECTRIC_URL:-http://localhost:${ELECTRIC_PORT:-5929}}
NEXT_PUBLIC_FASTAPI_BACKEND_AUTH_TYPE: ${AUTH_TYPE:-LOCAL}
NEXT_PUBLIC_ETL_SERVICE: ${ETL_SERVICE:-DOCLING}
NEXT_PUBLIC_DEPLOYMENT_MODE: ${DEPLOYMENT_MODE:-self-hosted}
diff --git a/docker/scripts/install.ps1 b/docker/scripts/install.ps1
index d9719d4ab..c5633d58c 100644
--- a/docker/scripts/install.ps1
+++ b/docker/scripts/install.ps1
@@ -320,9 +320,9 @@ Write-Host " Your personal AI-powered search engine [$versionDisplay]"
Write-Host ("=" * 62) -ForegroundColor Cyan
Write-Host ""
-Write-Info " Frontend: http://localhost:3000"
-Write-Info " Backend: http://localhost:8000"
-Write-Info " API Docs: http://localhost:8000/docs"
+Write-Info " Frontend: http://localhost:3929"
+Write-Info " Backend: http://localhost:8929"
+Write-Info " API Docs: http://localhost:8929/docs"
Write-Info ""
Write-Info " Config: $InstallDir\.env"
Write-Info " Logs: cd $InstallDir; docker compose logs -f"
diff --git a/docker/scripts/install.sh b/docker/scripts/install.sh
index f7729be00..84363c1c5 100644
--- a/docker/scripts/install.sh
+++ b/docker/scripts/install.sh
@@ -304,9 +304,9 @@ _version_display="${_version_display:-latest}"
printf " Your personal AI-powered search engine ${YELLOW}[%s]${NC}\n" "${_version_display}"
printf "${CYAN}══════════════════════════════════════════════════════════════${NC}\n\n"
-info " Frontend: http://localhost:3000"
-info " Backend: http://localhost:8000"
-info " API Docs: http://localhost:8000/docs"
+info " Frontend: http://localhost:3929"
+info " Backend: http://localhost:8929"
+info " API Docs: http://localhost:8929/docs"
info ""
info " Config: ${INSTALL_DIR}/.env"
info " Logs: cd ${INSTALL_DIR} && ${DC} logs -f"
From 4e0886e06d9d3828c4d9317113842fb416b10b40 Mon Sep 17 00:00:00 2001
From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com>
Date: Tue, 10 Mar 2026 02:57:44 +0530
Subject: [PATCH 06/34] chore: rename services and volumes in
docker-compose.dev.yml for clarity and consistency in local development
environment
---
docker/docker-compose.dev.yml | 15 ++++++++-------
1 file changed, 8 insertions(+), 7 deletions(-)
diff --git a/docker/docker-compose.dev.yml b/docker/docker-compose.dev.yml
index b76f26b2d..4d602f584 100644
--- a/docker/docker-compose.dev.yml
+++ b/docker/docker-compose.dev.yml
@@ -8,7 +8,7 @@
# For production with prebuilt images, use docker/docker-compose.yml instead.
# =============================================================================
-name: surfsense
+name: surfsense-dev
services:
db:
@@ -162,8 +162,9 @@ services:
image: electricsql/electric:1.4.10
ports:
- "${ELECTRIC_PORT:-5133}:3000"
- # depends_on:
- # - db
+ depends_on:
+ db:
+ condition: service_healthy
environment:
- DATABASE_URL=${ELECTRIC_DATABASE_URL:-postgresql://${ELECTRIC_DB_USER:-electric}:${ELECTRIC_DB_PASSWORD:-electric_password}@${DB_HOST:-db}:${DB_PORT:-5432}/${DB_NAME:-surfsense}?sslmode=${DB_SSLMODE:-disable}}
- ELECTRIC_INSECURE=true
@@ -197,10 +198,10 @@ services:
volumes:
postgres_data:
- name: surfsense-postgres
+ name: surfsense-dev-postgres
pgadmin_data:
- name: surfsense-pgadmin
+ name: surfsense-dev-pgadmin
redis_data:
- name: surfsense-redis
+ name: surfsense-dev-redis
shared_temp:
- name: surfsense-shared-temp
+ name: surfsense-dev-shared-temp
From 609086ecc88237d936a7e78f3fce666f7a2d6ddd Mon Sep 17 00:00:00 2001
From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com>
Date: Tue, 10 Mar 2026 03:01:26 +0530
Subject: [PATCH 07/34] chore: update Docker installation documentation and add
new installation options for improved user guidance
---
surfsense_web/app/sitemap.ts | 20 +-
.../content/docs/connectors/airtable.mdx | 2 +-
.../content/docs/connectors/clickup.mdx | 2 +-
.../content/docs/connectors/confluence.mdx | 2 +-
.../content/docs/connectors/discord.mdx | 2 +-
.../content/docs/connectors/gmail.mdx | 2 +-
.../docs/connectors/google-calendar.mdx | 2 +-
.../content/docs/connectors/google-drive.mdx | 2 +-
.../content/docs/connectors/jira.mdx | 2 +-
.../content/docs/connectors/linear.mdx | 2 +-
.../docs/connectors/microsoft-teams.mdx | 2 +-
.../content/docs/connectors/notion.mdx | 2 +-
.../content/docs/connectors/slack.mdx | 2 +-
.../content/docs/docker-installation.mdx | 301 ------------------
.../docs/docker-installation/dev-compose.mdx | 30 ++
.../docker-installation/docker-compose.mdx | 188 +++++++++++
.../docker-installation/install-script.mdx | 41 +++
.../docs/docker-installation/meta.json | 6 +
.../migrate-from-allinone.mdx | 0
.../docs/docker-installation/updating.mdx | 50 +++
.../content/docs/how-to/electric-sql.mdx | 80 ++---
surfsense_web/content/docs/how-to/meta.json | 2 +-
surfsense_web/content/docs/installation.mdx | 2 +-
23 files changed, 389 insertions(+), 355 deletions(-)
delete mode 100644 surfsense_web/content/docs/docker-installation.mdx
create mode 100644 surfsense_web/content/docs/docker-installation/dev-compose.mdx
create mode 100644 surfsense_web/content/docs/docker-installation/docker-compose.mdx
create mode 100644 surfsense_web/content/docs/docker-installation/install-script.mdx
create mode 100644 surfsense_web/content/docs/docker-installation/meta.json
rename surfsense_web/content/docs/{how-to => docker-installation}/migrate-from-allinone.mdx (100%)
create mode 100644 surfsense_web/content/docs/docker-installation/updating.mdx
diff --git a/surfsense_web/app/sitemap.ts b/surfsense_web/app/sitemap.ts
index 2f59c2b2d..414b41719 100644
--- a/surfsense_web/app/sitemap.ts
+++ b/surfsense_web/app/sitemap.ts
@@ -55,7 +55,25 @@ export default function sitemap(): MetadataRoute.Sitemap {
priority: 0.9,
},
{
- url: "https://www.surfsense.com/docs/docker-installation",
+ url: "https://www.surfsense.com/docs/docker-installation/install-script",
+ lastModified,
+ changeFrequency: "daily",
+ priority: 0.9,
+ },
+ {
+ url: "https://www.surfsense.com/docs/docker-installation/docker-compose",
+ lastModified,
+ changeFrequency: "daily",
+ priority: 0.9,
+ },
+ {
+ url: "https://www.surfsense.com/docs/docker-installation/updating",
+ lastModified,
+ changeFrequency: "daily",
+ priority: 0.9,
+ },
+ {
+ url: "https://www.surfsense.com/docs/docker-installation/dev-compose",
lastModified,
changeFrequency: "daily",
priority: 0.9,
diff --git a/surfsense_web/content/docs/connectors/airtable.mdx b/surfsense_web/content/docs/connectors/airtable.mdx
index 71148335c..e948e9dd3 100644
--- a/surfsense_web/content/docs/connectors/airtable.mdx
+++ b/surfsense_web/content/docs/connectors/airtable.mdx
@@ -88,7 +88,7 @@ After saving, you'll find your OAuth credentials on the integration page:
## Running SurfSense with Airtable Connector
-Add the Airtable credentials to your `.env` file (created during [Docker installation](/docs/docker-installation)):
+Add the Airtable credentials to your `.env` file (created during [Docker installation](/docs/docker-installation/docker-compose)):
```bash
AIRTABLE_CLIENT_ID=your_airtable_client_id
diff --git a/surfsense_web/content/docs/connectors/clickup.mdx b/surfsense_web/content/docs/connectors/clickup.mdx
index 768bca859..31709dfdd 100644
--- a/surfsense_web/content/docs/connectors/clickup.mdx
+++ b/surfsense_web/content/docs/connectors/clickup.mdx
@@ -44,7 +44,7 @@ After creating the app, you'll see your credentials:
## Running SurfSense with ClickUp Connector
-Add the ClickUp credentials to your `.env` file (created during [Docker installation](/docs/docker-installation)):
+Add the ClickUp credentials to your `.env` file (created during [Docker installation](/docs/docker-installation/docker-compose)):
```bash
CLICKUP_CLIENT_ID=your_clickup_client_id
diff --git a/surfsense_web/content/docs/connectors/confluence.mdx b/surfsense_web/content/docs/connectors/confluence.mdx
index 3ee3394a4..08effa104 100644
--- a/surfsense_web/content/docs/connectors/confluence.mdx
+++ b/surfsense_web/content/docs/connectors/confluence.mdx
@@ -97,7 +97,7 @@ Select the **"Granular scopes"** tab and enable:
## Running SurfSense with Confluence Connector
-Add the Atlassian credentials to your `.env` file (created during [Docker installation](/docs/docker-installation)):
+Add the Atlassian credentials to your `.env` file (created during [Docker installation](/docs/docker-installation/docker-compose)):
```bash
ATLASSIAN_CLIENT_ID=your_atlassian_client_id
diff --git a/surfsense_web/content/docs/connectors/discord.mdx b/surfsense_web/content/docs/connectors/discord.mdx
index 05825e0ea..cef0c2d10 100644
--- a/surfsense_web/content/docs/connectors/discord.mdx
+++ b/surfsense_web/content/docs/connectors/discord.mdx
@@ -64,7 +64,7 @@ You'll also see your **Application ID** and **Public Key** on this page.
## Running SurfSense with Discord Connector
-Add the Discord credentials to your `.env` file (created during [Docker installation](/docs/docker-installation)):
+Add the Discord credentials to your `.env` file (created during [Docker installation](/docs/docker-installation/docker-compose)):
```bash
DISCORD_CLIENT_ID=your_discord_client_id
diff --git a/surfsense_web/content/docs/connectors/gmail.mdx b/surfsense_web/content/docs/connectors/gmail.mdx
index 1b3f81efe..4b7e93c49 100644
--- a/surfsense_web/content/docs/connectors/gmail.mdx
+++ b/surfsense_web/content/docs/connectors/gmail.mdx
@@ -70,7 +70,7 @@ This guide walks you through setting up a Google OAuth 2.0 integration for SurfS
## Running SurfSense with Gmail Connector
-Add the Google OAuth credentials to your `.env` file (created during [Docker installation](/docs/docker-installation)):
+Add the Google OAuth credentials to your `.env` file (created during [Docker installation](/docs/docker-installation/docker-compose)):
```bash
GOOGLE_OAUTH_CLIENT_ID=your_google_client_id
diff --git a/surfsense_web/content/docs/connectors/google-calendar.mdx b/surfsense_web/content/docs/connectors/google-calendar.mdx
index 481b05444..f0745c8a0 100644
--- a/surfsense_web/content/docs/connectors/google-calendar.mdx
+++ b/surfsense_web/content/docs/connectors/google-calendar.mdx
@@ -69,7 +69,7 @@ This guide walks you through setting up a Google OAuth 2.0 integration for SurfS
## Running SurfSense with Google Calendar Connector
-Add the Google OAuth credentials to your `.env` file (created during [Docker installation](/docs/docker-installation)):
+Add the Google OAuth credentials to your `.env` file (created during [Docker installation](/docs/docker-installation/docker-compose)):
```bash
GOOGLE_OAUTH_CLIENT_ID=your_google_client_id
diff --git a/surfsense_web/content/docs/connectors/google-drive.mdx b/surfsense_web/content/docs/connectors/google-drive.mdx
index 238100860..1193b860b 100644
--- a/surfsense_web/content/docs/connectors/google-drive.mdx
+++ b/surfsense_web/content/docs/connectors/google-drive.mdx
@@ -70,7 +70,7 @@ This guide walks you through setting up a Google OAuth 2.0 integration for SurfS
## Running SurfSense with Google Drive Connector
-Add the Google OAuth credentials to your `.env` file (created during [Docker installation](/docs/docker-installation)):
+Add the Google OAuth credentials to your `.env` file (created during [Docker installation](/docs/docker-installation/docker-compose)):
```bash
GOOGLE_OAUTH_CLIENT_ID=your_google_client_id
diff --git a/surfsense_web/content/docs/connectors/jira.mdx b/surfsense_web/content/docs/connectors/jira.mdx
index 5bddbab8d..ddbc70a8f 100644
--- a/surfsense_web/content/docs/connectors/jira.mdx
+++ b/surfsense_web/content/docs/connectors/jira.mdx
@@ -84,7 +84,7 @@ This guide walks you through setting up an Atlassian OAuth 2.0 (3LO) integration
## Running SurfSense with Jira Connector
-Add the Atlassian credentials to your `.env` file (created during [Docker installation](/docs/docker-installation)):
+Add the Atlassian credentials to your `.env` file (created during [Docker installation](/docs/docker-installation/docker-compose)):
```bash
ATLASSIAN_CLIENT_ID=your_atlassian_client_id
diff --git a/surfsense_web/content/docs/connectors/linear.mdx b/surfsense_web/content/docs/connectors/linear.mdx
index 3fd82aba1..1dd5af9d5 100644
--- a/surfsense_web/content/docs/connectors/linear.mdx
+++ b/surfsense_web/content/docs/connectors/linear.mdx
@@ -53,7 +53,7 @@ After creating the application, you'll see your OAuth credentials:
## Running SurfSense with Linear Connector
-Add the Linear credentials to your `.env` file (created during [Docker installation](/docs/docker-installation)):
+Add the Linear credentials to your `.env` file (created during [Docker installation](/docs/docker-installation/docker-compose)):
```bash
LINEAR_CLIENT_ID=your_linear_client_id
diff --git a/surfsense_web/content/docs/connectors/microsoft-teams.mdx b/surfsense_web/content/docs/connectors/microsoft-teams.mdx
index 5a05be709..aba64da20 100644
--- a/surfsense_web/content/docs/connectors/microsoft-teams.mdx
+++ b/surfsense_web/content/docs/connectors/microsoft-teams.mdx
@@ -90,7 +90,7 @@ After registration, you'll be taken to the app's **Overview** page. Here you'll
## Running SurfSense with Microsoft Teams Connector
-Add the Microsoft Teams credentials to your `.env` file (created during [Docker installation](/docs/docker-installation)):
+Add the Microsoft Teams credentials to your `.env` file (created during [Docker installation](/docs/docker-installation/docker-compose)):
```bash
TEAMS_CLIENT_ID=your_microsoft_client_id
diff --git a/surfsense_web/content/docs/connectors/notion.mdx b/surfsense_web/content/docs/connectors/notion.mdx
index ca5856340..99c95d8bd 100644
--- a/surfsense_web/content/docs/connectors/notion.mdx
+++ b/surfsense_web/content/docs/connectors/notion.mdx
@@ -91,7 +91,7 @@ For additional information:
## Running SurfSense with Notion Connector
-Add the Notion credentials to your `.env` file (created during [Docker installation](/docs/docker-installation)):
+Add the Notion credentials to your `.env` file (created during [Docker installation](/docs/docker-installation/docker-compose)):
```bash
NOTION_OAUTH_CLIENT_ID=your_notion_client_id
diff --git a/surfsense_web/content/docs/connectors/slack.mdx b/surfsense_web/content/docs/connectors/slack.mdx
index af38487cc..f68d6e875 100644
--- a/surfsense_web/content/docs/connectors/slack.mdx
+++ b/surfsense_web/content/docs/connectors/slack.mdx
@@ -80,7 +80,7 @@ Click **"Add an OAuth Scope"** to add each scope.
## Running SurfSense with Slack Connector
-Add the Slack credentials to your `.env` file (created during [Docker installation](/docs/docker-installation)):
+Add the Slack credentials to your `.env` file (created during [Docker installation](/docs/docker-installation/docker-compose)):
```bash
SLACK_CLIENT_ID=your_slack_client_id
diff --git a/surfsense_web/content/docs/docker-installation.mdx b/surfsense_web/content/docs/docker-installation.mdx
deleted file mode 100644
index 043405609..000000000
--- a/surfsense_web/content/docs/docker-installation.mdx
+++ /dev/null
@@ -1,301 +0,0 @@
----
-title: Docker Installation
-description: Setting up SurfSense using Docker
-icon: Container
----
-
-This guide explains how to run SurfSense using Docker, with options ranging from a single-command install to a fully manual setup.
-
-## Quick Start
-
-### Option 1 — Install Script (recommended)
-
-Downloads the compose files, generates a `SECRET_KEY`, starts all services, and sets up [Watchtower](https://github.com/nicholas-fedor/watchtower) for automatic daily updates.
-
-**Prerequisites:** [Docker Desktop](https://www.docker.com/products/docker-desktop/) must be installed and running.
-
-#### For Linux/macOS users:
-
-```bash
-curl -fsSL https://raw.githubusercontent.com/MODSetter/SurfSense/main/docker/scripts/install.sh | bash
-```
-
-#### For Windows users (PowerShell):
-
-```powershell
-irm https://raw.githubusercontent.com/MODSetter/SurfSense/main/docker/scripts/install.ps1 | iex
-```
-
-This creates a `./surfsense/` directory with `docker-compose.yml` and `.env`, then runs `docker compose up -d`.
-
-To skip Watchtower (e.g. in production where you manage updates yourself):
-
-```bash
-curl -fsSL https://raw.githubusercontent.com/MODSetter/SurfSense/main/docker/scripts/install.sh | bash -s -- --no-watchtower
-```
-
-To customise the check interval (default 24h), use `--watchtower-interval=SECONDS`.
-
-### Option 2 — Manual Docker Compose
-
-```bash
-git clone https://github.com/MODSetter/SurfSense.git
-cd SurfSense/docker
-cp .env.example .env
-# Edit .env — at minimum set SECRET_KEY
-docker compose up -d
-```
-
-After starting, access SurfSense at:
-
-- **Frontend**: [http://localhost:3000](http://localhost:3000)
-- **Backend API**: [http://localhost:8000](http://localhost:8000)
-- **API Docs**: [http://localhost:8000/docs](http://localhost:8000/docs)
-- **Electric SQL**: [http://localhost:5133](http://localhost:5133)
-
----
-
-## Updating
-
-**Option 1 — Watchtower daemon (recommended, auto-updates every 24 h):**
-
-If you used the install script (Option 1 above), Watchtower is already running. No extra setup needed.
-
-For manual Docker Compose installs (Option 2), start Watchtower separately:
-
-```bash
-docker run -d --name watchtower \
- --restart unless-stopped \
- -v /var/run/docker.sock:/var/run/docker.sock \
- nickfedor/watchtower \
- --label-enable \
- --interval 86400
-```
-
-**Option 2 — Watchtower one-time update:**
-
-```bash
-docker run --rm -v /var/run/docker.sock:/var/run/docker.sock \
- nickfedor/watchtower --run-once \
- --label-filter "com.docker.compose.project=surfsense"
-```
-
-
-Use `nickfedor/watchtower`. The original `containrrr/watchtower` is no longer maintained and may fail with newer Docker versions.
-
-
-**Option 3 — Manual:**
-
-```bash
-cd surfsense # or SurfSense/docker if you cloned manually
-docker compose pull && docker compose up -d
-```
-
-Database migrations are applied automatically on every startup.
-
----
-
-## Configuration
-
-All configuration lives in a single `docker/.env` file (or `surfsense/.env` if you used the install script). Copy `.env.example` to `.env` and edit the values you need.
-
-### Required
-
-| Variable | Description |
-|----------|-------------|
-| `SECRET_KEY` | JWT secret key. Generate with: `openssl rand -base64 32`. Auto-generated by the install script. |
-
-### Core Settings
-
-| Variable | Description | Default |
-|----------|-------------|---------|
-| `SURFSENSE_VERSION` | Image tag to deploy. Use `latest`, a clean version (e.g. `0.0.14`), or a specific build (e.g. `0.0.14.1`) | `latest` |
-| `AUTH_TYPE` | Authentication method: `LOCAL` (email/password) or `GOOGLE` (OAuth) | `LOCAL` |
-| `ETL_SERVICE` | Document parsing: `DOCLING` (local), `UNSTRUCTURED`, or `LLAMACLOUD` | `DOCLING` |
-| `EMBEDDING_MODEL` | Embedding model for vector search | `sentence-transformers/all-MiniLM-L6-v2` |
-| `TTS_SERVICE` | Text-to-speech provider for podcasts | `local/kokoro` |
-| `STT_SERVICE` | Speech-to-text provider for audio files | `local/base` |
-| `REGISTRATION_ENABLED` | Allow new user registrations | `TRUE` |
-
-### Ports
-
-| Variable | Description | Default |
-|----------|-------------|---------|
-| `FRONTEND_PORT` | Frontend service port | `3000` |
-| `BACKEND_PORT` | Backend API service port | `8000` |
-| `ELECTRIC_PORT` | Electric SQL service port | `5133` |
-
-### Custom Domain / Reverse Proxy
-
-Only set these if serving SurfSense on a real domain via a reverse proxy (Caddy, Nginx, Cloudflare Tunnel, etc.). Leave commented out for standard localhost deployments.
-
-| Variable | Description |
-|----------|-------------|
-| `NEXT_FRONTEND_URL` | Public frontend URL (e.g. `https://app.yourdomain.com`) |
-| `BACKEND_URL` | Public backend URL for OAuth callbacks (e.g. `https://api.yourdomain.com`) |
-| `NEXT_PUBLIC_FASTAPI_BACKEND_URL` | Backend URL used by the frontend (e.g. `https://api.yourdomain.com`) |
-| `NEXT_PUBLIC_ELECTRIC_URL` | Electric SQL URL used by the frontend (e.g. `https://electric.yourdomain.com`) |
-
-### Database
-
-Defaults work out of the box. Change for security in production.
-
-| Variable | Description | Default |
-|----------|-------------|---------|
-| `DB_USER` | PostgreSQL username | `surfsense` |
-| `DB_PASSWORD` | PostgreSQL password | `surfsense` |
-| `DB_NAME` | PostgreSQL database name | `surfsense` |
-| `DB_HOST` | PostgreSQL host | `db` |
-| `DB_PORT` | PostgreSQL port | `5432` |
-| `DB_SSLMODE` | SSL mode: `disable`, `require`, `verify-ca`, `verify-full` | `disable` |
-| `DATABASE_URL` | Full connection URL override. Use for managed databases (RDS, Supabase, etc.) | *(built from above)* |
-
-### Electric SQL
-
-| Variable | Description | Default |
-|----------|-------------|---------|
-| `ELECTRIC_DB_USER` | Replication user for Electric SQL | `electric` |
-| `ELECTRIC_DB_PASSWORD` | Replication password for Electric SQL | `electric_password` |
-| `ELECTRIC_DATABASE_URL` | Full connection URL override for Electric. Set to `host.docker.internal` when pointing at a local Postgres instance | *(built from above)* |
-
-### Authentication
-
-| Variable | Description |
-|----------|-------------|
-| `GOOGLE_OAUTH_CLIENT_ID` | Google OAuth client ID (required if `AUTH_TYPE=GOOGLE`) |
-| `GOOGLE_OAUTH_CLIENT_SECRET` | Google OAuth client secret (required if `AUTH_TYPE=GOOGLE`) |
-
-Create credentials at the [Google Cloud Console](https://console.cloud.google.com/apis/credentials).
-
-### External API Keys
-
-| Variable | Description |
-|----------|-------------|
-| `FIRECRAWL_API_KEY` | Firecrawl API key for web crawling |
-| `UNSTRUCTURED_API_KEY` | Unstructured.io API key (required if `ETL_SERVICE=UNSTRUCTURED`) |
-| `LLAMA_CLOUD_API_KEY` | LlamaCloud API key (required if `ETL_SERVICE=LLAMACLOUD`) |
-
-### Connector OAuth Keys
-
-Uncomment the connectors you want to use. Redirect URIs follow the pattern `http://localhost:8000/api/v1/auth//connector/callback`.
-
-| Connector | Variables |
-|-----------|-----------|
-| Google Drive / Gmail / Calendar | `GOOGLE_DRIVE_REDIRECT_URI`, `GOOGLE_GMAIL_REDIRECT_URI`, `GOOGLE_CALENDAR_REDIRECT_URI` |
-| Notion | `NOTION_CLIENT_ID`, `NOTION_CLIENT_SECRET`, `NOTION_REDIRECT_URI` |
-| Slack | `SLACK_CLIENT_ID`, `SLACK_CLIENT_SECRET`, `SLACK_REDIRECT_URI` |
-| Discord | `DISCORD_CLIENT_ID`, `DISCORD_CLIENT_SECRET`, `DISCORD_BOT_TOKEN`, `DISCORD_REDIRECT_URI` |
-| Jira & Confluence | `ATLASSIAN_CLIENT_ID`, `ATLASSIAN_CLIENT_SECRET`, `JIRA_REDIRECT_URI`, `CONFLUENCE_REDIRECT_URI` |
-| Linear | `LINEAR_CLIENT_ID`, `LINEAR_CLIENT_SECRET`, `LINEAR_REDIRECT_URI` |
-| ClickUp | `CLICKUP_CLIENT_ID`, `CLICKUP_CLIENT_SECRET`, `CLICKUP_REDIRECT_URI` |
-| Airtable | `AIRTABLE_CLIENT_ID`, `AIRTABLE_CLIENT_SECRET`, `AIRTABLE_REDIRECT_URI` |
-| Microsoft Teams | `TEAMS_CLIENT_ID`, `TEAMS_CLIENT_SECRET`, `TEAMS_REDIRECT_URI` |
-
-For Airtable, create an OAuth integration at the [Airtable Developer Hub](https://airtable.com/create/oauth).
-
-### Observability (optional)
-
-| Variable | Description |
-|----------|-------------|
-| `LANGSMITH_TRACING` | Enable LangSmith tracing (`true` / `false`) |
-| `LANGSMITH_ENDPOINT` | LangSmith API endpoint |
-| `LANGSMITH_API_KEY` | LangSmith API key |
-| `LANGSMITH_PROJECT` | LangSmith project name |
-
-### Advanced (optional)
-
-| Variable | Description | Default |
-|----------|-------------|---------|
-| `SCHEDULE_CHECKER_INTERVAL` | How often to check for scheduled connector tasks (e.g. `5m`, `1h`) | `5m` |
-| `RERANKERS_ENABLED` | Enable document reranking for improved search | `FALSE` |
-| `RERANKERS_MODEL_NAME` | Reranker model name (e.g. `ms-marco-MiniLM-L-12-v2`) | |
-| `RERANKERS_MODEL_TYPE` | Reranker model type (e.g. `flashrank`) | |
-| `PAGES_LIMIT` | Max pages per user for ETL services | unlimited |
-
----
-
-## Docker Services
-
-| Service | Description |
-|---------|-------------|
-| `db` | PostgreSQL with pgvector extension |
-| `redis` | Message broker for Celery |
-| `backend` | FastAPI application server |
-| `celery_worker` | Background task processing (document indexing, etc.) |
-| `celery_beat` | Periodic task scheduler (connector sync) |
-| `electric` | Electric SQL — real-time sync for the frontend |
-| `frontend` | Next.js web application |
-
-All services start automatically with `docker compose up -d`.
-
-The backend includes a health check — dependent services (workers, frontend) wait until the API is fully ready before starting. You can monitor startup progress with `docker compose ps` (look for `(health: starting)` → `(healthy)`).
-
----
-
-## Development Compose File
-
-If you're contributing to SurfSense and want to build from source, use `docker-compose.dev.yml` instead:
-
-```bash
-cd SurfSense/docker
-docker compose -f docker-compose.dev.yml up --build
-```
-
-This file builds the backend and frontend from your local source code (instead of pulling prebuilt images) and includes pgAdmin for database inspection at [http://localhost:5050](http://localhost:5050). Use the production `docker-compose.yml` for all other cases.
-
-The following `.env` variables are **only used by the dev compose file** (they have no effect on the production `docker-compose.yml`):
-
-| Variable | Description | Default |
-|----------|-------------|---------|
-| `PGADMIN_PORT` | pgAdmin web UI port | `5050` |
-| `PGADMIN_DEFAULT_EMAIL` | pgAdmin login email | `admin@surfsense.com` |
-| `PGADMIN_DEFAULT_PASSWORD` | pgAdmin login password | `surfsense` |
-| `REDIS_PORT` | Exposed Redis port (internal-only in prod) | `6379` |
-| `NEXT_PUBLIC_FASTAPI_BACKEND_AUTH_TYPE` | Frontend build arg for auth type | `LOCAL` |
-| `NEXT_PUBLIC_ETL_SERVICE` | Frontend build arg for ETL service | `DOCLING` |
-| `NEXT_PUBLIC_DEPLOYMENT_MODE` | Frontend build arg for deployment mode | `self-hosted` |
-| `NEXT_PUBLIC_ELECTRIC_AUTH_MODE` | Frontend build arg for Electric auth | `insecure` |
-
-In the production compose file, the `NEXT_PUBLIC_*` frontend variables are automatically derived from `AUTH_TYPE`, `ETL_SERVICE`, and the port settings. In the dev compose file, they are passed as build args since the frontend is built from source.
-
----
-
-## Migrating from the All-in-One Container
-
-
-If you were previously using `docker-compose.quickstart.yml` (the legacy all-in-one `surfsense` container), your data lives in a `surfsense-data` volume and requires a **one-time migration** before switching to the current setup. PostgreSQL has been upgraded from version 14 to 17, so a simple volume swap will not work.
-
-See the full step-by-step guide: [Migrate from the All-in-One Container](/docs/how-to/migrate-from-allinone).
-
-
----
-
-## Useful Commands
-
-```bash
-# View logs (all services)
-docker compose logs -f
-
-# View logs for a specific service
-docker compose logs -f backend
-docker compose logs -f electric
-
-# Stop all services
-docker compose down
-
-# Restart a specific service
-docker compose restart backend
-
-# Stop and remove all containers + volumes (destructive!)
-docker compose down -v
-```
-
----
-
-## Troubleshooting
-
-- **Ports already in use** — Change the relevant `*_PORT` variable in `.env` and restart.
-- **Permission errors on Linux** — You may need to prefix `docker` commands with `sudo`.
-- **Electric SQL not connecting** — Check `docker compose logs electric`. If it shows `domain does not exist: db`, ensure `ELECTRIC_DATABASE_URL` is not set to a stale value in `.env`.
-- **Real-time updates not working in browser** — Open DevTools → Console and look for `[Electric]` errors. Check that `NEXT_PUBLIC_ELECTRIC_URL` matches the running Electric SQL address.
-- **Line ending issues on Windows** — Run `git config --global core.autocrlf true` before cloning.
diff --git a/surfsense_web/content/docs/docker-installation/dev-compose.mdx b/surfsense_web/content/docs/docker-installation/dev-compose.mdx
new file mode 100644
index 000000000..19b76eb7d
--- /dev/null
+++ b/surfsense_web/content/docs/docker-installation/dev-compose.mdx
@@ -0,0 +1,30 @@
+---
+title: Development Compose
+description: Building SurfSense from source using docker-compose.dev.yml
+---
+
+If you're contributing to SurfSense and want to build from source, use `docker-compose.dev.yml` instead:
+
+```bash
+cd SurfSense/docker
+docker compose -f docker-compose.dev.yml up --build
+```
+
+This file builds the backend and frontend from your local source code (instead of pulling prebuilt images) and includes pgAdmin for database inspection at [http://localhost:5050](http://localhost:5050). Use the production `docker-compose.yml` for all other cases.
+
+## Dev-Only Environment Variables
+
+The following `.env` variables are **only used by the dev compose file** (they have no effect on the production `docker-compose.yml`):
+
+| Variable | Description | Default |
+|----------|-------------|---------|
+| `PGADMIN_PORT` | pgAdmin web UI port | `5050` |
+| `PGADMIN_DEFAULT_EMAIL` | pgAdmin login email | `admin@surfsense.com` |
+| `PGADMIN_DEFAULT_PASSWORD` | pgAdmin login password | `surfsense` |
+| `REDIS_PORT` | Exposed Redis port (internal-only in prod) | `6379` |
+| `NEXT_PUBLIC_FASTAPI_BACKEND_AUTH_TYPE` | Frontend build arg for auth type | `LOCAL` |
+| `NEXT_PUBLIC_ETL_SERVICE` | Frontend build arg for ETL service | `DOCLING` |
+| `NEXT_PUBLIC_DEPLOYMENT_MODE` | Frontend build arg for deployment mode | `self-hosted` |
+| `NEXT_PUBLIC_ELECTRIC_AUTH_MODE` | Frontend build arg for Electric auth | `insecure` |
+
+In the production compose file, the `NEXT_PUBLIC_*` frontend variables are automatically derived from `AUTH_TYPE`, `ETL_SERVICE`, and the port settings. In the dev compose file, they are passed as build args since the frontend is built from source.
diff --git a/surfsense_web/content/docs/docker-installation/docker-compose.mdx b/surfsense_web/content/docs/docker-installation/docker-compose.mdx
new file mode 100644
index 000000000..c56f08106
--- /dev/null
+++ b/surfsense_web/content/docs/docker-installation/docker-compose.mdx
@@ -0,0 +1,188 @@
+---
+title: Docker Compose
+description: Manual Docker Compose setup for SurfSense
+---
+
+## Setup
+
+```bash
+git clone https://github.com/MODSetter/SurfSense.git
+cd SurfSense/docker
+cp .env.example .env
+# Edit .env, at minimum set SECRET_KEY
+docker compose up -d
+```
+
+After starting, access SurfSense at:
+
+- **Frontend**: [http://localhost:3929](http://localhost:3929)
+- **Backend API**: [http://localhost:8929](http://localhost:8929)
+- **API Docs**: [http://localhost:8929/docs](http://localhost:8929/docs)
+- **Electric SQL**: [http://localhost:5929](http://localhost:5929)
+
+---
+
+## Configuration
+
+All configuration lives in a single `docker/.env` file (or `surfsense/.env` if you used the install script). Copy `.env.example` to `.env` and edit the values you need.
+
+### Required
+
+| Variable | Description |
+|----------|-------------|
+| `SECRET_KEY` | JWT secret key. Generate with: `openssl rand -base64 32`. Auto-generated by the install script. |
+
+### Core Settings
+
+| Variable | Description | Default |
+|----------|-------------|---------|
+| `SURFSENSE_VERSION` | Image tag to deploy. Use `latest`, a clean version (e.g. `0.0.14`), or a specific build (e.g. `0.0.14.1`) | `latest` |
+| `AUTH_TYPE` | Authentication method: `LOCAL` (email/password) or `GOOGLE` (OAuth) | `LOCAL` |
+| `ETL_SERVICE` | Document parsing: `DOCLING` (local), `UNSTRUCTURED`, or `LLAMACLOUD` | `DOCLING` |
+| `EMBEDDING_MODEL` | Embedding model for vector search | `sentence-transformers/all-MiniLM-L6-v2` |
+| `TTS_SERVICE` | Text-to-speech provider for podcasts | `local/kokoro` |
+| `STT_SERVICE` | Speech-to-text provider for audio files | `local/base` |
+| `REGISTRATION_ENABLED` | Allow new user registrations | `TRUE` |
+
+### Ports
+
+| Variable | Description | Default |
+|----------|-------------|---------|
+| `FRONTEND_PORT` | Frontend service port | `3929` |
+| `BACKEND_PORT` | Backend API service port | `8929` |
+| `ELECTRIC_PORT` | Electric SQL service port | `5929` |
+
+### Custom Domain / Reverse Proxy
+
+Only set these if serving SurfSense on a real domain via a reverse proxy (Caddy, Nginx, Cloudflare Tunnel, etc.). Leave commented out for standard localhost deployments.
+
+| Variable | Description |
+|----------|-------------|
+| `NEXT_FRONTEND_URL` | Public frontend URL (e.g. `https://app.yourdomain.com`) |
+| `BACKEND_URL` | Public backend URL for OAuth callbacks (e.g. `https://api.yourdomain.com`) |
+| `NEXT_PUBLIC_FASTAPI_BACKEND_URL` | Backend URL used by the frontend (e.g. `https://api.yourdomain.com`) |
+| `NEXT_PUBLIC_ELECTRIC_URL` | Electric SQL URL used by the frontend (e.g. `https://electric.yourdomain.com`) |
+
+### Database
+
+Defaults work out of the box. Change for security in production.
+
+| Variable | Description | Default |
+|----------|-------------|---------|
+| `DB_USER` | PostgreSQL username | `surfsense` |
+| `DB_PASSWORD` | PostgreSQL password | `surfsense` |
+| `DB_NAME` | PostgreSQL database name | `surfsense` |
+| `DB_HOST` | PostgreSQL host | `db` |
+| `DB_PORT` | PostgreSQL port | `5432` |
+| `DB_SSLMODE` | SSL mode: `disable`, `require`, `verify-ca`, `verify-full` | `disable` |
+| `DATABASE_URL` | Full connection URL override. Use for managed databases (RDS, Supabase, etc.) | *(built from above)* |
+
+### Electric SQL
+
+| Variable | Description | Default |
+|----------|-------------|---------|
+| `ELECTRIC_DB_USER` | Replication user for Electric SQL | `electric` |
+| `ELECTRIC_DB_PASSWORD` | Replication password for Electric SQL | `electric_password` |
+| `ELECTRIC_DATABASE_URL` | Full connection URL override for Electric. Set to `host.docker.internal` when pointing at a local Postgres instance | *(built from above)* |
+
+### Authentication
+
+| Variable | Description |
+|----------|-------------|
+| `GOOGLE_OAUTH_CLIENT_ID` | Google OAuth client ID (required if `AUTH_TYPE=GOOGLE`) |
+| `GOOGLE_OAUTH_CLIENT_SECRET` | Google OAuth client secret (required if `AUTH_TYPE=GOOGLE`) |
+
+Create credentials at the [Google Cloud Console](https://console.cloud.google.com/apis/credentials).
+
+### External API Keys
+
+| Variable | Description |
+|----------|-------------|
+| `FIRECRAWL_API_KEY` | [Firecrawl](https://www.firecrawl.dev/) API key for web crawling |
+| `UNSTRUCTURED_API_KEY` | [Unstructured.io](https://unstructured.io/) API key (required if `ETL_SERVICE=UNSTRUCTURED`) |
+| `LLAMA_CLOUD_API_KEY` | [LlamaCloud](https://cloud.llamaindex.ai/) API key (required if `ETL_SERVICE=LLAMACLOUD`) |
+
+### Connector OAuth Keys
+
+Uncomment the connectors you want to use. Redirect URIs follow the pattern `http://localhost:8000/api/v1/auth//connector/callback`.
+
+| Connector | Variables |
+|-----------|-----------|
+| Google Drive / Gmail / Calendar | `GOOGLE_DRIVE_REDIRECT_URI`, `GOOGLE_GMAIL_REDIRECT_URI`, `GOOGLE_CALENDAR_REDIRECT_URI` |
+| Notion | `NOTION_CLIENT_ID`, `NOTION_CLIENT_SECRET`, `NOTION_REDIRECT_URI` |
+| Slack | `SLACK_CLIENT_ID`, `SLACK_CLIENT_SECRET`, `SLACK_REDIRECT_URI` |
+| Discord | `DISCORD_CLIENT_ID`, `DISCORD_CLIENT_SECRET`, `DISCORD_BOT_TOKEN`, `DISCORD_REDIRECT_URI` |
+| Jira & Confluence | `ATLASSIAN_CLIENT_ID`, `ATLASSIAN_CLIENT_SECRET`, `JIRA_REDIRECT_URI`, `CONFLUENCE_REDIRECT_URI` |
+| Linear | `LINEAR_CLIENT_ID`, `LINEAR_CLIENT_SECRET`, `LINEAR_REDIRECT_URI` |
+| ClickUp | `CLICKUP_CLIENT_ID`, `CLICKUP_CLIENT_SECRET`, `CLICKUP_REDIRECT_URI` |
+| Airtable | `AIRTABLE_CLIENT_ID`, `AIRTABLE_CLIENT_SECRET`, `AIRTABLE_REDIRECT_URI` |
+| Microsoft Teams | `TEAMS_CLIENT_ID`, `TEAMS_CLIENT_SECRET`, `TEAMS_REDIRECT_URI` |
+
+### Observability (optional)
+
+| Variable | Description |
+|----------|-------------|
+| `LANGSMITH_TRACING` | Enable LangSmith tracing (`true` / `false`) |
+| `LANGSMITH_ENDPOINT` | LangSmith API endpoint |
+| `LANGSMITH_API_KEY` | LangSmith API key |
+| `LANGSMITH_PROJECT` | LangSmith project name |
+
+### Advanced (optional)
+
+| Variable | Description | Default |
+|----------|-------------|---------|
+| `SCHEDULE_CHECKER_INTERVAL` | How often to check for scheduled connector tasks (e.g. `5m`, `1h`) | `5m` |
+| `RERANKERS_ENABLED` | Enable document reranking for improved search | `FALSE` |
+| `RERANKERS_MODEL_NAME` | Reranker model name (e.g. `ms-marco-MiniLM-L-12-v2`) | |
+| `RERANKERS_MODEL_TYPE` | Reranker model type (e.g. `flashrank`) | |
+| `PAGES_LIMIT` | Max pages per user for ETL services | unlimited |
+
+---
+
+## Docker Services
+
+| Service | Description |
+|---------|-------------|
+| `db` | PostgreSQL with pgvector extension |
+| `redis` | Message broker for Celery |
+| `backend` | FastAPI application server |
+| `celery_worker` | Background task processing (document indexing, etc.) |
+| `celery_beat` | Periodic task scheduler (connector sync) |
+| `electric` | Electric SQL (real-time sync for the frontend) |
+| `frontend` | Next.js web application |
+
+All services start automatically with `docker compose up -d`.
+
+The backend includes a health check. Dependent services (workers, frontend) wait until the API is fully ready before starting. You can monitor startup progress with `docker compose ps` (look for `(health: starting)` → `(healthy)`).
+
+---
+
+## Useful Commands
+
+```bash
+# View logs (all services)
+docker compose logs -f
+
+# View logs for a specific service
+docker compose logs -f backend
+docker compose logs -f electric
+
+# Stop all services
+docker compose down
+
+# Restart a specific service
+docker compose restart backend
+
+# Stop and remove all containers + volumes (destructive!)
+docker compose down -v
+```
+
+---
+
+## Troubleshooting
+
+- **Ports already in use**: Change the relevant `*_PORT` variable in `.env` and restart.
+- **Permission errors on Linux**: You may need to prefix `docker` commands with `sudo`.
+- **Electric SQL not connecting**: Check `docker compose logs electric`. If it shows `domain does not exist: db`, ensure `ELECTRIC_DATABASE_URL` is not set to a stale value in `.env`.
+- **Real-time updates not working in browser**: Open DevTools → Console and look for `[Electric]` errors. Check that `NEXT_PUBLIC_ELECTRIC_URL` matches the running Electric SQL address.
+- **Line ending issues on Windows**: Run `git config --global core.autocrlf true` before cloning.
diff --git a/surfsense_web/content/docs/docker-installation/install-script.mdx b/surfsense_web/content/docs/docker-installation/install-script.mdx
new file mode 100644
index 000000000..d68938a3f
--- /dev/null
+++ b/surfsense_web/content/docs/docker-installation/install-script.mdx
@@ -0,0 +1,41 @@
+---
+title: Install Script
+description: One-command installation of SurfSense using Docker
+---
+
+Downloads the compose files, generates a `SECRET_KEY`, starts all services, and sets up [Watchtower](https://github.com/nicholas-fedor/watchtower) for automatic daily updates.
+
+**Prerequisites:** [Docker Desktop](https://www.docker.com/products/docker-desktop/) must be installed and running.
+
+### For Linux/macOS users:
+
+```bash
+curl -fsSL https://raw.githubusercontent.com/MODSetter/SurfSense/main/docker/scripts/install.sh | bash
+```
+
+### For Windows users (PowerShell):
+
+```powershell
+irm https://raw.githubusercontent.com/MODSetter/SurfSense/main/docker/scripts/install.ps1 | iex
+```
+
+This creates a `./surfsense/` directory with `docker-compose.yml` and `.env`, then runs `docker compose up -d`.
+
+To skip Watchtower (e.g. in production where you manage updates yourself):
+
+```bash
+curl -fsSL https://raw.githubusercontent.com/MODSetter/SurfSense/main/docker/scripts/install.sh | bash -s -- --no-watchtower
+```
+
+To customise the check interval (default 24h), use `--watchtower-interval=SECONDS`.
+
+---
+
+## Access SurfSense
+
+After starting, access SurfSense at:
+
+- **Frontend**: [http://localhost:3929](http://localhost:3929)
+- **Backend API**: [http://localhost:8929](http://localhost:8929)
+- **API Docs**: [http://localhost:8929/docs](http://localhost:8929/docs)
+- **Electric SQL**: [http://localhost:5929](http://localhost:5929)
diff --git a/surfsense_web/content/docs/docker-installation/meta.json b/surfsense_web/content/docs/docker-installation/meta.json
new file mode 100644
index 000000000..13683547b
--- /dev/null
+++ b/surfsense_web/content/docs/docker-installation/meta.json
@@ -0,0 +1,6 @@
+{
+ "title": "Docker Installation",
+ "pages": ["install-script", "docker-compose", "updating", "dev-compose", "migrate-from-allinone"],
+ "icon": "Container",
+ "defaultOpen": false
+}
diff --git a/surfsense_web/content/docs/how-to/migrate-from-allinone.mdx b/surfsense_web/content/docs/docker-installation/migrate-from-allinone.mdx
similarity index 100%
rename from surfsense_web/content/docs/how-to/migrate-from-allinone.mdx
rename to surfsense_web/content/docs/docker-installation/migrate-from-allinone.mdx
diff --git a/surfsense_web/content/docs/docker-installation/updating.mdx b/surfsense_web/content/docs/docker-installation/updating.mdx
new file mode 100644
index 000000000..6ef2fcecc
--- /dev/null
+++ b/surfsense_web/content/docs/docker-installation/updating.mdx
@@ -0,0 +1,50 @@
+---
+title: Updating
+description: How to update your SurfSense Docker deployment
+---
+
+## Watchtower Daemon (recommended)
+
+Auto-updates every 24 hours. If you used the [install script](/docs/docker-installation/install-script), Watchtower is already running. No extra setup needed.
+
+For [manual Docker Compose](/docs/docker-installation/docker-compose) installs, start Watchtower separately:
+
+```bash
+docker run -d --name watchtower \
+ --restart unless-stopped \
+ -v /var/run/docker.sock:/var/run/docker.sock \
+ nickfedor/watchtower \
+ --label-enable \
+ --interval 86400
+```
+
+## Watchtower One-Time Update
+
+```bash
+docker run --rm -v /var/run/docker.sock:/var/run/docker.sock \
+ nickfedor/watchtower --run-once \
+ --label-filter "com.docker.compose.project=surfsense"
+```
+
+
+Use `nickfedor/watchtower`. The original `containrrr/watchtower` is no longer maintained and may fail with newer Docker versions.
+
+
+## Manual Update
+
+```bash
+cd surfsense # or SurfSense/docker if you cloned manually
+docker compose pull && docker compose up -d
+```
+
+Database migrations are applied automatically on every startup.
+
+---
+
+## Migrating from the All-in-One Container
+
+
+If you were previously using `docker-compose.quickstart.yml` (the legacy all-in-one `surfsense` container), your data lives in a `surfsense-data` volume and requires a **one-time migration** before switching to the current setup. PostgreSQL has been upgraded from version 14 to 17, so a simple volume swap will not work.
+
+See the full step-by-step guide: [Migrate from the All-in-One Container](/docs/docker-installation/migrate-from-allinone).
+
diff --git a/surfsense_web/content/docs/how-to/electric-sql.mdx b/surfsense_web/content/docs/how-to/electric-sql.mdx
index fb2cf941a..f051a9ab5 100644
--- a/surfsense_web/content/docs/how-to/electric-sql.mdx
+++ b/surfsense_web/content/docs/how-to/electric-sql.mdx
@@ -5,7 +5,7 @@ description: Setting up Electric SQL for real-time data synchronization in SurfS
[Electric SQL](https://electric-sql.com/) enables real-time data synchronization in SurfSense, providing instant updates for inbox items, document indexing status, and connector sync progress without manual refresh. The frontend uses [PGlite](https://pglite.dev/) (a lightweight PostgreSQL in the browser) to maintain a local database that syncs with the backend via Electric SQL.
-## What Does Electric SQL Do?
+## What does Electric SQL do?
When you index documents or receive inbox updates, Electric SQL pushes updates to your browser in real-time. The data flows like this:
@@ -23,45 +23,24 @@ This means:
## Docker Setup
-The `docker-compose.yml` includes the Electric SQL service. It is pre-configured to connect to the Docker-managed `db` container out of the box.
+- The `docker-compose.yml` includes the Electric SQL service, pre-configured to connect to the Docker-managed `db` container.
+- No additional configuration is required. Electric SQL works with the Docker PostgreSQL instance out of the box.
-```bash
-docker compose up -d
-```
+## Manual Setup (Development Only)
-The Electric SQL service configuration in `docker-compose.yml`:
-
-```yaml
-electric:
- image: electricsql/electric:1.4.6
- ports:
- - "${ELECTRIC_PORT:-5133}:3000"
- environment:
- DATABASE_URL: ${ELECTRIC_DATABASE_URL:-postgresql://${ELECTRIC_DB_USER:-electric}:${ELECTRIC_DB_PASSWORD:-electric_password}@${DB_HOST:-db}:${DB_PORT:-5432}/${DB_NAME:-surfsense}?sslmode=${DB_SSLMODE:-disable}}
- ELECTRIC_INSECURE: "true"
- ELECTRIC_WRITE_TO_PG_MODE: direct
- depends_on:
- db:
- condition: service_healthy
-```
-
-No additional configuration is required — Electric SQL is pre-configured to work with the Docker PostgreSQL instance.
-
-## Manual Setup
-
-Follow the steps below based on your PostgreSQL setup.
+This section is intended for local development environments. Follow the steps below based on your PostgreSQL setup.
### Step 1: Configure Environment Variables
Ensure your environment files are configured. If you haven't set up SurfSense yet, follow the [Manual Installation Guide](/docs/manual-installation) first.
-For Electric SQL, verify these variables are set in `docker/.env`:
+For Electric SQL, verify these variables are set:
+
+**Backend (`surfsense_backend/.env`):**
```bash
-ELECTRIC_PORT=5133
ELECTRIC_DB_USER=electric
ELECTRIC_DB_PASSWORD=electric_password
-NEXT_PUBLIC_ELECTRIC_URL=http://localhost:5133
```
**Frontend (`surfsense_web/.env`):**
@@ -71,17 +50,19 @@ NEXT_PUBLIC_ELECTRIC_URL=http://localhost:5133
NEXT_PUBLIC_ELECTRIC_AUTH_MODE=insecure
```
+Next, choose the option that matches your PostgreSQL setup:
+
---
### Option A: Using Docker PostgreSQL
-If you're using the Docker-managed PostgreSQL instance, no extra configuration is needed. Just start the services:
+If you're using the Docker-managed PostgreSQL instance, no extra configuration is needed. Just start the services using the development compose file (which exposes the PostgreSQL port to your host machine):
```bash
-docker compose up -d db electric
+docker compose -f docker-compose.dev.yml up -d db electric
```
-Then run the database migration and start the backend:
+Then run the database migration, start the backend, and launch the frontend:
```bash
cd surfsense_backend
@@ -89,6 +70,13 @@ uv run alembic upgrade head
uv run main.py
```
+In a separate terminal, start the frontend:
+
+```bash
+cd surfsense_web
+pnpm run dev
+```
+
Electric SQL is now configured and connected to your Docker PostgreSQL database.
---
@@ -148,7 +136,7 @@ ELECTRIC_DATABASE_URL=postgresql://electric:electric_password@host.docker.intern
**4. Start Electric SQL only (skip the Docker `db` container):**
```bash
-docker compose up -d --no-deps electric
+docker compose -f docker-compose.dev.yml up -d --no-deps electric
```
The `--no-deps` flag starts only the `electric` service without starting the Docker-managed `db` container.
@@ -161,18 +149,32 @@ uv run alembic upgrade head
uv run main.py
```
+In a separate terminal, start the frontend:
+
+```bash
+cd surfsense_web
+pnpm run dev
+```
+
Electric SQL is now configured and connected to your local PostgreSQL database.
## Environment Variables Reference
+**Required for manual setup:**
+
| Variable | Location | Description | Default |
|----------|----------|-------------|---------|
-| `ELECTRIC_PORT` | `docker/.env` | Port to expose Electric SQL | `5133` |
-| `ELECTRIC_DB_USER` | `docker/.env` | Database user for Electric replication | `electric` |
-| `ELECTRIC_DB_PASSWORD` | `docker/.env` | Database password for Electric replication | `electric_password` |
-| `ELECTRIC_DATABASE_URL` | `docker/.env` | Full connection URL override for Electric. Set to use `host.docker.internal` when pointing at a local Postgres instance | *(built from above defaults)* |
-| `NEXT_PUBLIC_ELECTRIC_URL` | Frontend `.env` | Electric SQL server URL (PGlite connects to this) | `http://localhost:5133` |
-| `NEXT_PUBLIC_ELECTRIC_AUTH_MODE` | Frontend `.env` | Authentication mode (`insecure` for dev, `secure` for production) | `insecure` |
+| `ELECTRIC_DB_USER` | `surfsense_backend/.env` | Database user for Electric replication | `electric` |
+| `ELECTRIC_DB_PASSWORD` | `surfsense_backend/.env` | Database password for Electric replication | `electric_password` |
+| `NEXT_PUBLIC_ELECTRIC_URL` | `surfsense_web/.env` | Electric SQL server URL (PGlite connects to this) | `http://localhost:5133` |
+| `NEXT_PUBLIC_ELECTRIC_AUTH_MODE` | `surfsense_web/.env` | Authentication mode (`insecure` for dev, `secure` for production) | `insecure` |
+
+**Optional / Docker-only:**
+
+| Variable | Location | Description | Default |
+|----------|----------|-------------|---------|
+| `ELECTRIC_PORT` | `docker/.env` | Port to expose Electric SQL on the host | `5133` (dev), `5929` (production) |
+| `ELECTRIC_DATABASE_URL` | `docker/.env` | Full connection URL override for Electric. Only needed for Option B (local Postgres via `host.docker.internal`) | *(built from above defaults)* |
## Verify Setup
diff --git a/surfsense_web/content/docs/how-to/meta.json b/surfsense_web/content/docs/how-to/meta.json
index c8ecb05d9..aeb1bc3b3 100644
--- a/surfsense_web/content/docs/how-to/meta.json
+++ b/surfsense_web/content/docs/how-to/meta.json
@@ -1,6 +1,6 @@
{
"title": "How to",
- "pages": ["electric-sql", "realtime-collaboration", "migrate-from-allinone"],
+ "pages": ["electric-sql", "realtime-collaboration"],
"icon": "BookOpen",
"defaultOpen": false
}
diff --git a/surfsense_web/content/docs/installation.mdx b/surfsense_web/content/docs/installation.mdx
index 6aa2eeb90..aa3a2a72d 100644
--- a/surfsense_web/content/docs/installation.mdx
+++ b/surfsense_web/content/docs/installation.mdx
@@ -12,7 +12,7 @@ There are two ways to install SurfSense, but both require the repository to be c
This method provides a containerized environment with all dependencies pre-configured. Less Customization.
-[Learn more about Docker installation](/docs/docker-installation)
+[Learn more about Docker installation](/docs/docker-installation/install-script)
## Manual Installation (Preferred)
From 2329121bc0768c9bc8d8c6e1b440a4ccbca31d85 Mon Sep 17 00:00:00 2001
From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com>
Date: Tue, 10 Mar 2026 03:15:45 +0530
Subject: [PATCH 08/34] chore: update documentation titles and improve clarity
in Docker installation guides
---
.../docs/docker-installation/dev-compose.mdx | 2 +-
.../docker-installation/install-script.mdx | 2 +-
.../migrate-from-allinone.mdx | 81 -------------------
surfsense_web/content/docs/meta.json | 2 +-
4 files changed, 3 insertions(+), 84 deletions(-)
diff --git a/surfsense_web/content/docs/docker-installation/dev-compose.mdx b/surfsense_web/content/docs/docker-installation/dev-compose.mdx
index 19b76eb7d..302026c2a 100644
--- a/surfsense_web/content/docs/docker-installation/dev-compose.mdx
+++ b/surfsense_web/content/docs/docker-installation/dev-compose.mdx
@@ -1,5 +1,5 @@
---
-title: Development Compose
+title: Docker Compose Development
description: Building SurfSense from source using docker-compose.dev.yml
---
diff --git a/surfsense_web/content/docs/docker-installation/install-script.mdx b/surfsense_web/content/docs/docker-installation/install-script.mdx
index d68938a3f..bbe95c230 100644
--- a/surfsense_web/content/docs/docker-installation/install-script.mdx
+++ b/surfsense_web/content/docs/docker-installation/install-script.mdx
@@ -1,5 +1,5 @@
---
-title: Install Script
+title: One-Line Install Script
description: One-command installation of SurfSense using Docker
---
diff --git a/surfsense_web/content/docs/docker-installation/migrate-from-allinone.mdx b/surfsense_web/content/docs/docker-installation/migrate-from-allinone.mdx
index 3de0b043d..c623b59e7 100644
--- a/surfsense_web/content/docs/docker-installation/migrate-from-allinone.mdx
+++ b/surfsense_web/content/docs/docker-installation/migrate-from-allinone.mdx
@@ -81,87 +81,6 @@ bash migrate-database.sh --db-user myuser --db-password mypass --db-name mydb
---
-## Option C — Manual steps
-
-For users who prefer full control or whose platform doesn't support bash scripts (e.g. Windows without WSL2).
-
-### Step 1 — Stop the old all-in-one container
-
-Before mounting the `surfsense-data` volume into a new container, stop the existing one to prevent two PostgreSQL processes from writing to the same data directory:
-
-```bash
-docker stop surfsense 2>/dev/null || true
-```
-
-### Step 2 — Start a temporary PostgreSQL 14 container
-
-```bash
-docker run -d --name surfsense-pg14-temp \
- -v surfsense-data:/data \
- -e PGDATA=/data/postgres \
- -e POSTGRES_USER=surfsense \
- -e POSTGRES_PASSWORD=surfsense \
- -e POSTGRES_DB=surfsense \
- pgvector/pgvector:pg14
-```
-
-Wait ~10 seconds, then confirm it is healthy:
-
-```bash
-docker exec surfsense-pg14-temp pg_isready -U surfsense
-```
-
-### Step 3 — Dump the database
-
-```bash
-docker exec -e PGPASSWORD=surfsense surfsense-pg14-temp \
- pg_dump -U surfsense surfsense > surfsense_backup.sql
-```
-
-### Step 4 — Recover your SECRET\_KEY
-
-```bash
-docker run --rm -v surfsense-data:/data alpine cat /data/.secret_key
-```
-
-### Step 5 — Set up the new stack
-
-```bash
-mkdir -p surfsense/scripts
-curl -fsSL https://raw.githubusercontent.com/MODSetter/SurfSense/main/docker/docker-compose.yml -o surfsense/docker-compose.yml
-curl -fsSL https://raw.githubusercontent.com/MODSetter/SurfSense/main/docker/.env.example -o surfsense/.env.example
-curl -fsSL https://raw.githubusercontent.com/MODSetter/SurfSense/main/docker/postgresql.conf -o surfsense/postgresql.conf
-curl -fsSL https://raw.githubusercontent.com/MODSetter/SurfSense/main/docker/scripts/init-electric-user.sh -o surfsense/scripts/init-electric-user.sh
-chmod +x surfsense/scripts/init-electric-user.sh
-cp surfsense/.env.example surfsense/.env
-```
-
-Set `SECRET_KEY` in `surfsense/.env` to the value from Step 4.
-
-### Step 6 — Start PostgreSQL 17 and restore
-
-```bash
-cd surfsense
-docker compose up -d db
-docker compose exec db pg_isready -U surfsense # wait until ready
-docker compose exec -T db psql -U surfsense -d surfsense < ../surfsense_backup.sql
-```
-
-### Step 7 — Start all services
-
-```bash
-docker compose up -d
-```
-
-### Step 8 — Clean up
-
-```bash
-docker stop surfsense-pg14-temp && docker rm surfsense-pg14-temp
-docker volume rm surfsense-data # only after verifying migration succeeded
-```
-
----
-
## Troubleshooting
### `install.sh` runs normally with a blank database (no migration happened)
diff --git a/surfsense_web/content/docs/meta.json b/surfsense_web/content/docs/meta.json
index f73b59e18..8401417cf 100644
--- a/surfsense_web/content/docs/meta.json
+++ b/surfsense_web/content/docs/meta.json
@@ -10,7 +10,7 @@
"manual-installation",
"connectors",
"how-to",
- "---Development---",
+ "---Developers---",
"testing"
]
}
From dd231a455cdc3481142f7829ace796d960fae612 Mon Sep 17 00:00:00 2001
From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com>
Date: Tue, 10 Mar 2026 03:28:49 +0530
Subject: [PATCH 09/34] chore: restructure documentation to enhance user
experience with a new index page and dedicated prerequisites section
---
surfsense_web/content/docs/index.mdx | 131 +++++++------------
surfsense_web/content/docs/meta.json | 1 +
surfsense_web/content/docs/prerequisites.mdx | 86 ++++++++++++
3 files changed, 137 insertions(+), 81 deletions(-)
create mode 100644 surfsense_web/content/docs/prerequisites.mdx
diff --git a/surfsense_web/content/docs/index.mdx b/surfsense_web/content/docs/index.mdx
index 6c0450297..42f25465d 100644
--- a/surfsense_web/content/docs/index.mdx
+++ b/surfsense_web/content/docs/index.mdx
@@ -1,86 +1,55 @@
---
-title: Prerequisites
-description: Required setup's before setting up SurfSense
-icon: ClipboardCheck
+title: Documentation
+description: Welcome to SurfSense's documentation
+icon: BookOpen
---
+import { Card, Cards } from 'fumadocs-ui/components/card';
+import { ClipboardCheck, Download, Container, Wrench, Cable, BookOpen, FlaskConical } from 'lucide-react';
-## Auth Setup
+Welcome to **SurfSense's Documentation!** Here, you'll find everything you need to get the most out of SurfSense. Dive in to explore how SurfSense can be your AI-powered research companion.
-SurfSense supports both Google OAuth and local email/password authentication. Google OAuth is optional - if you prefer local authentication, you can skip this section.
-
-**Note**: Google OAuth setup is **required** in your `.env` files if you want to use the Gmail and Google Calendar connectors in SurfSense.
-
-To set up Google OAuth:
-
-1. Login to your [Google Developer Console](https://console.cloud.google.com/)
-2. Enable the required APIs:
- - **People API** (required for basic Google OAuth)
-
-3. Set up OAuth consent screen.
-
-4. Create OAuth client ID and secret.
-
-5. It should look like this.
-
-
----
-
-## File Upload's
-
-SurfSense supports three ETL (Extract, Transform, Load) services for converting files to LLM-friendly formats:
-
-### Option 1: Unstructured
-
-Files are converted using [Unstructured](https://github.com/Unstructured-IO/unstructured)
-
-1. Get an Unstructured.io API key from [Unstructured Platform](https://platform.unstructured.io/)
-2. You should be able to generate API keys once registered
-
-
-### Option 2: LlamaIndex (LlamaCloud)
-
-Files are converted using [LlamaIndex](https://www.llamaindex.ai/) which offers 50+ file format support.
-
-1. Get a LlamaIndex API key from [LlamaCloud](https://cloud.llamaindex.ai/)
-2. Sign up for a LlamaCloud account to access their parsing services
-3. LlamaCloud provides enhanced parsing capabilities for complex documents
-
-### Option 3: Docling (Recommended for Privacy)
-
-Files are processed locally using [Docling](https://github.com/DS4SD/docling) - IBM's open-source document parsing library.
-
-1. **No API key required** - all processing happens locally
-2. **Privacy-focused** - documents never leave your system
-3. **Supported formats**: PDF, Office documents (Word, Excel, PowerPoint), images (PNG, JPEG, TIFF, BMP, WebP), HTML, CSV, AsciiDoc
-4. **Enhanced features**: Advanced table detection, image extraction, and structured document parsing
-5. **GPU acceleration** support for faster processing (when available)
-
-**Note**: You only need to set up one of these services.
-
----
-
-## LLM Observability (Optional)
-
-This is not required for SurfSense to work. But it is always a good idea to monitor LLM interactions. So we do not have those WTH moments.
-
-1. Get a LangSmith API key from [smith.langchain.com](https://smith.langchain.com/)
-2. This helps in observing SurfSense Researcher Agent.
-
-
----
-
-## Crawler
-
-SurfSense have 2 options for saving webpages:
-- [SurfSense Extension](https://github.com/MODSetter/SurfSense/tree/main/surfsense_browser_extension) (Overall better experience & ability to save private webpages, recommended)
-- Crawler (If you want to save public webpages)
-
-**NOTE:** SurfSense currently uses [Firecrawl.py](https://www.firecrawl.dev/) for web crawling. If you plan on using the crawler, you will need to create a Firecrawl account and get an API key.
-
-
----
-
-## Next Steps
-
-Once you have all prerequisites in place, proceed to the [installation guide](/docs/installation) to set up SurfSense.
\ No newline at end of file
+
+ }
+ title="Prerequisites"
+ description="Required setup before installing SurfSense"
+ href="/docs/prerequisites"
+ />
+ }
+ title="Installation"
+ description="Choose your installation method"
+ href="/docs/installation"
+ />
+ }
+ title="Docker Installation"
+ description="Deploy SurfSense with Docker Compose"
+ href="/docs/docker-installation"
+ />
+ }
+ title="Manual Installation"
+ description="Set up SurfSense manually from source"
+ href="/docs/manual-installation"
+ />
+ }
+ title="Connectors"
+ description="Integrate with third-party services"
+ href="/docs/connectors"
+ />
+ }
+ title="How-To Guides"
+ description="Step-by-step guides for common tasks"
+ href="/docs/how-to"
+ />
+ }
+ title="Testing"
+ description="Running and writing tests for SurfSense"
+ href="/docs/testing"
+ />
+
diff --git a/surfsense_web/content/docs/meta.json b/surfsense_web/content/docs/meta.json
index 8401417cf..dee0cf6cb 100644
--- a/surfsense_web/content/docs/meta.json
+++ b/surfsense_web/content/docs/meta.json
@@ -5,6 +5,7 @@
"pages": [
"---Guides---",
"index",
+ "prerequisites",
"installation",
"docker-installation",
"manual-installation",
diff --git a/surfsense_web/content/docs/prerequisites.mdx b/surfsense_web/content/docs/prerequisites.mdx
new file mode 100644
index 000000000..6c0450297
--- /dev/null
+++ b/surfsense_web/content/docs/prerequisites.mdx
@@ -0,0 +1,86 @@
+---
+title: Prerequisites
+description: Required setup's before setting up SurfSense
+icon: ClipboardCheck
+---
+
+
+## Auth Setup
+
+SurfSense supports both Google OAuth and local email/password authentication. Google OAuth is optional - if you prefer local authentication, you can skip this section.
+
+**Note**: Google OAuth setup is **required** in your `.env` files if you want to use the Gmail and Google Calendar connectors in SurfSense.
+
+To set up Google OAuth:
+
+1. Login to your [Google Developer Console](https://console.cloud.google.com/)
+2. Enable the required APIs:
+ - **People API** (required for basic Google OAuth)
+
+3. Set up OAuth consent screen.
+
+4. Create OAuth client ID and secret.
+
+5. It should look like this.
+
+
+---
+
+## File Upload's
+
+SurfSense supports three ETL (Extract, Transform, Load) services for converting files to LLM-friendly formats:
+
+### Option 1: Unstructured
+
+Files are converted using [Unstructured](https://github.com/Unstructured-IO/unstructured)
+
+1. Get an Unstructured.io API key from [Unstructured Platform](https://platform.unstructured.io/)
+2. You should be able to generate API keys once registered
+
+
+### Option 2: LlamaIndex (LlamaCloud)
+
+Files are converted using [LlamaIndex](https://www.llamaindex.ai/) which offers 50+ file format support.
+
+1. Get a LlamaIndex API key from [LlamaCloud](https://cloud.llamaindex.ai/)
+2. Sign up for a LlamaCloud account to access their parsing services
+3. LlamaCloud provides enhanced parsing capabilities for complex documents
+
+### Option 3: Docling (Recommended for Privacy)
+
+Files are processed locally using [Docling](https://github.com/DS4SD/docling) - IBM's open-source document parsing library.
+
+1. **No API key required** - all processing happens locally
+2. **Privacy-focused** - documents never leave your system
+3. **Supported formats**: PDF, Office documents (Word, Excel, PowerPoint), images (PNG, JPEG, TIFF, BMP, WebP), HTML, CSV, AsciiDoc
+4. **Enhanced features**: Advanced table detection, image extraction, and structured document parsing
+5. **GPU acceleration** support for faster processing (when available)
+
+**Note**: You only need to set up one of these services.
+
+---
+
+## LLM Observability (Optional)
+
+This is not required for SurfSense to work. But it is always a good idea to monitor LLM interactions. So we do not have those WTH moments.
+
+1. Get a LangSmith API key from [smith.langchain.com](https://smith.langchain.com/)
+2. This helps in observing SurfSense Researcher Agent.
+
+
+---
+
+## Crawler
+
+SurfSense have 2 options for saving webpages:
+- [SurfSense Extension](https://github.com/MODSetter/SurfSense/tree/main/surfsense_browser_extension) (Overall better experience & ability to save private webpages, recommended)
+- Crawler (If you want to save public webpages)
+
+**NOTE:** SurfSense currently uses [Firecrawl.py](https://www.firecrawl.dev/) for web crawling. If you plan on using the crawler, you will need to create a Firecrawl account and get an API key.
+
+
+---
+
+## Next Steps
+
+Once you have all prerequisites in place, proceed to the [installation guide](/docs/installation) to set up SurfSense.
\ No newline at end of file
From 53de474e5e46cc10e9dc83667ef0a11baa548142 Mon Sep 17 00:00:00 2001
From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com>
Date: Tue, 10 Mar 2026 03:33:57 +0530
Subject: [PATCH 10/34] feat: add Code of Conduct document and update layout
with new icon for improved community guidelines visibility
---
surfsense_web/app/layout.config.tsx | 14 +-
.../content/docs/code-of-conduct.mdx | 132 ++++++++++++++++++
surfsense_web/content/docs/how-to/meta.json | 2 +-
surfsense_web/content/docs/index.mdx | 8 +-
surfsense_web/content/docs/meta.json | 5 +-
5 files changed, 156 insertions(+), 5 deletions(-)
create mode 100644 surfsense_web/content/docs/code-of-conduct.mdx
diff --git a/surfsense_web/app/layout.config.tsx b/surfsense_web/app/layout.config.tsx
index 214c5b940..ac1b210c6 100644
--- a/surfsense_web/app/layout.config.tsx
+++ b/surfsense_web/app/layout.config.tsx
@@ -1,7 +1,19 @@
+import Image from "next/image";
import type { BaseLayoutProps } from "fumadocs-ui/layouts/shared";
export const baseOptions: BaseLayoutProps = {
nav: {
- title: "SurfSense Docs",
+ title: (
+ <>
+
+ SurfSense Docs
+ >
+ ),
},
githubUrl: "https://github.com/MODSetter/SurfSense",
};
diff --git a/surfsense_web/content/docs/code-of-conduct.mdx b/surfsense_web/content/docs/code-of-conduct.mdx
new file mode 100644
index 000000000..1fd96dff5
--- /dev/null
+++ b/surfsense_web/content/docs/code-of-conduct.mdx
@@ -0,0 +1,132 @@
+---
+title: Code of Conduct
+description: Community guidelines and expectations for behavior
+icon: Heart
+---
+
+## Our Pledge
+
+We as members, contributors, and leaders pledge to make participation in our
+community a harassment-free experience for everyone, regardless of age, body
+size, visible or invisible disability, ethnicity, sex characteristics, gender
+identity and expression, level of experience, education, socio-economic status,
+nationality, personal appearance, race, religion, or sexual identity
+and orientation.
+
+We pledge to act and interact in ways that contribute to an open, welcoming,
+diverse, inclusive, and healthy community.
+
+## Our Standards
+
+Examples of behavior that contributes to a positive environment for our
+community include:
+
+* Demonstrating empathy and kindness toward other people
+* Being respectful of differing opinions, viewpoints, and experiences
+* Giving and gracefully accepting constructive feedback
+* Accepting responsibility and apologizing to those affected by our mistakes,
+ and learning from the experience
+* Focusing on what is best not just for us as individuals, but for the
+ overall community
+
+Examples of unacceptable behavior include:
+
+* The use of sexualized language or imagery, and sexual attention or
+ advances of any kind
+* Trolling, insulting or derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or email
+ address, without their explicit permission
+* Other conduct which could reasonably be considered inappropriate in a
+ professional setting
+
+## Enforcement Responsibilities
+
+Community leaders are responsible for clarifying and enforcing our standards of
+acceptable behavior and will take appropriate and fair corrective action in
+response to any behavior that they deem inappropriate, threatening, offensive,
+or harmful.
+
+Community leaders have the right and responsibility to remove, edit, or reject
+comments, commits, code, wiki edits, issues, and other contributions that are
+not aligned to this Code of Conduct, and will communicate reasons for moderation
+decisions when appropriate.
+
+## Scope
+
+This Code of Conduct applies within all community spaces, and also applies when
+an individual is officially representing the community in public spaces.
+Examples of representing our community include using an official e-mail address,
+posting via an official social media account, or acting as an appointed
+representative at an online or offline event.
+
+## Enforcement
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be
+reported to the community leaders responsible for enforcement at
+rohan@surfsense.com.
+All complaints will be reviewed and investigated promptly and fairly.
+
+All community leaders are obligated to respect the privacy and security of the
+reporter of any incident.
+
+## Enforcement Guidelines
+
+Community leaders will follow these Community Impact Guidelines in determining
+the consequences for any action they deem in violation of this Code of Conduct:
+
+### 1. Correction
+
+**Community Impact**: Use of inappropriate language or other behavior deemed
+unprofessional or unwelcome in the community.
+
+**Consequence**: A private, written warning from community leaders, providing
+clarity around the nature of the violation and an explanation of why the
+behavior was inappropriate. A public apology may be requested.
+
+### 2. Warning
+
+**Community Impact**: A violation through a single incident or series
+of actions.
+
+**Consequence**: A warning with consequences for continued behavior. No
+interaction with the people involved, including unsolicited interaction with
+those enforcing the Code of Conduct, for a specified period of time. This
+includes avoiding interactions in community spaces as well as external channels
+like social media. Violating these terms may lead to a temporary or
+permanent ban.
+
+### 3. Temporary Ban
+
+**Community Impact**: A serious violation of community standards, including
+sustained inappropriate behavior.
+
+**Consequence**: A temporary ban from any sort of interaction or public
+communication with the community for a specified period of time. No public or
+private interaction with the people involved, including unsolicited interaction
+with those enforcing the Code of Conduct, is allowed during this period.
+Violating these terms may lead to a permanent ban.
+
+### 4. Permanent Ban
+
+**Community Impact**: Demonstrating a pattern of violation of community
+standards, including sustained inappropriate behavior, harassment of an
+individual, or aggression toward or disparagement of classes of individuals.
+
+**Consequence**: A permanent ban from any sort of public interaction within
+the community.
+
+## Attribution
+
+This Code of Conduct is adapted from the [Contributor Covenant][homepage],
+version 2.0, available at
+https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
+
+Community Impact Guidelines were inspired by [Mozilla's code of conduct
+enforcement ladder](https://github.com/mozilla/diversity).
+
+[homepage]: https://www.contributor-covenant.org
+
+For answers to common questions about this code of conduct, see the FAQ at
+https://www.contributor-covenant.org/faq. Translations are available at
+https://www.contributor-covenant.org/translations.
diff --git a/surfsense_web/content/docs/how-to/meta.json b/surfsense_web/content/docs/how-to/meta.json
index aeb1bc3b3..16e1e9c81 100644
--- a/surfsense_web/content/docs/how-to/meta.json
+++ b/surfsense_web/content/docs/how-to/meta.json
@@ -1,6 +1,6 @@
{
"title": "How to",
"pages": ["electric-sql", "realtime-collaboration"],
- "icon": "BookOpen",
+ "icon": "Compass",
"defaultOpen": false
}
diff --git a/surfsense_web/content/docs/index.mdx b/surfsense_web/content/docs/index.mdx
index 42f25465d..2204e4e34 100644
--- a/surfsense_web/content/docs/index.mdx
+++ b/surfsense_web/content/docs/index.mdx
@@ -5,7 +5,7 @@ icon: BookOpen
---
import { Card, Cards } from 'fumadocs-ui/components/card';
-import { ClipboardCheck, Download, Container, Wrench, Cable, BookOpen, FlaskConical } from 'lucide-react';
+import { ClipboardCheck, Download, Container, Wrench, Cable, BookOpen, FlaskConical, Heart } from 'lucide-react';
Welcome to **SurfSense's Documentation!** Here, you'll find everything you need to get the most out of SurfSense. Dive in to explore how SurfSense can be your AI-powered research companion.
@@ -52,4 +52,10 @@ Welcome to **SurfSense's Documentation!** Here, you'll find everything you need
description="Running and writing tests for SurfSense"
href="/docs/testing"
/>
+ }
+ title="Code of Conduct"
+ description="Community guidelines and expectations"
+ href="/docs/code-of-conduct"
+ />
diff --git a/surfsense_web/content/docs/meta.json b/surfsense_web/content/docs/meta.json
index dee0cf6cb..a0b6f8a1b 100644
--- a/surfsense_web/content/docs/meta.json
+++ b/surfsense_web/content/docs/meta.json
@@ -7,11 +7,12 @@
"index",
"prerequisites",
"installation",
- "docker-installation",
"manual-installation",
+ "docker-installation",
"connectors",
"how-to",
"---Developers---",
- "testing"
+ "testing",
+ "code-of-conduct"
]
}
From 847564ab30334907505b9563b86d13f2dd00d1bd Mon Sep 17 00:00:00 2001
From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com>
Date: Tue, 10 Mar 2026 03:35:34 +0530
Subject: [PATCH 11/34] feat: add new documentation URLs to sitemap for
improved accessibility and user navigation
---
surfsense_web/app/sitemap.ts | 37 ++++++++++++++++++++++++++++++++++++
1 file changed, 37 insertions(+)
diff --git a/surfsense_web/app/sitemap.ts b/surfsense_web/app/sitemap.ts
index 414b41719..0e9288be0 100644
--- a/surfsense_web/app/sitemap.ts
+++ b/surfsense_web/app/sitemap.ts
@@ -54,6 +54,12 @@ export default function sitemap(): MetadataRoute.Sitemap {
changeFrequency: "daily",
priority: 0.9,
},
+ {
+ url: "https://www.surfsense.com/docs/prerequisites",
+ lastModified,
+ changeFrequency: "daily",
+ priority: 0.9,
+ },
{
url: "https://www.surfsense.com/docs/docker-installation/install-script",
lastModified,
@@ -78,6 +84,12 @@ export default function sitemap(): MetadataRoute.Sitemap {
changeFrequency: "daily",
priority: 0.9,
},
+ {
+ url: "https://www.surfsense.com/docs/docker-installation/migrate-from-allinone",
+ lastModified,
+ changeFrequency: "daily",
+ priority: 0.9,
+ },
{
url: "https://www.surfsense.com/docs/manual-installation",
lastModified,
@@ -181,6 +193,12 @@ export default function sitemap(): MetadataRoute.Sitemap {
changeFrequency: "daily",
priority: 0.8,
},
+ {
+ url: "https://www.surfsense.com/docs/connectors/obsidian",
+ lastModified,
+ changeFrequency: "daily",
+ priority: 0.8,
+ },
{
url: "https://www.surfsense.com/docs/connectors/slack",
lastModified,
@@ -200,5 +218,24 @@ export default function sitemap(): MetadataRoute.Sitemap {
changeFrequency: "daily",
priority: 0.8,
},
+ {
+ url: "https://www.surfsense.com/docs/how-to/realtime-collaboration",
+ lastModified,
+ changeFrequency: "daily",
+ priority: 0.8,
+ },
+ // Developer documentation
+ {
+ url: "https://www.surfsense.com/docs/testing",
+ lastModified,
+ changeFrequency: "daily",
+ priority: 0.7,
+ },
+ {
+ url: "https://www.surfsense.com/docs/code-of-conduct",
+ lastModified,
+ changeFrequency: "daily",
+ priority: 0.7,
+ },
];
}
From 74c95ee61f903989cf99081c80c4cc2c7d566de4 Mon Sep 17 00:00:00 2001
From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com>
Date: Tue, 10 Mar 2026 11:49:06 +0530
Subject: [PATCH 12/34] feat: update ChatListItem UI for improved styling and
accessibility; increase initial page size in use-documents hook
---
.../layout/ui/sidebar/ChatListItem.tsx | 18 +++++++++++-------
surfsense_web/hooks/use-documents.ts | 2 +-
2 files changed, 12 insertions(+), 8 deletions(-)
diff --git a/surfsense_web/components/layout/ui/sidebar/ChatListItem.tsx b/surfsense_web/components/layout/ui/sidebar/ChatListItem.tsx
index 078cea34e..82ba29ac9 100644
--- a/surfsense_web/components/layout/ui/sidebar/ChatListItem.tsx
+++ b/surfsense_web/components/layout/ui/sidebar/ChatListItem.tsx
@@ -2,7 +2,6 @@
import {
ArchiveIcon,
- MessageSquare,
MoreHorizontal,
PenLine,
RotateCcwIcon,
@@ -64,21 +63,26 @@ export function ChatListItem({
{...(isMobile ? longPressHandlers : {})}
className={cn(
"flex w-full items-center gap-2 overflow-hidden rounded-md p-2 text-sm text-left transition-colors",
- "[&>span:last-child]:truncate",
- "hover:bg-accent hover:text-accent-foreground",
+ "group-hover/item:bg-accent group-hover/item:text-accent-foreground",
"focus-visible:outline-none focus-visible:ring-1 focus-visible:ring-ring",
isActive && "bg-accent text-accent-foreground"
)}
>
-
- {animatedName}
+ {animatedName}
{/* Actions dropdown - trigger hidden on mobile, long-press opens it instead */}