-
- {containerWidth > 0 &&
- carouselItems.map((item, i) => {
- const style = getCardStyle(i);
- return (
- goTo(i) : undefined}
- animate={{
- x: style.x,
- rotateY: style.rotateY,
- }}
- transition={{ duration: 0.7, ease: [0.32, 0.72, 0, 1] }}
- >
-
-
-
-
-
- );
- })}
-
+
diff --git a/surfsense_web/components/ui/sonner.tsx b/surfsense_web/components/ui/sonner.tsx
index 9222e8d57..b463d9ac4 100644
--- a/surfsense_web/components/ui/sonner.tsx
+++ b/surfsense_web/components/ui/sonner.tsx
@@ -10,6 +10,8 @@ const Toaster = ({ ...props }: ToasterProps) => {
-Use `nickfedor/watchtower`. The original `containrrr/watchtower` is no longer maintained and may fail with newer Docker versions.
-
-
-**Option 3 — Manual:**
-
-```bash
-cd surfsense # or SurfSense/docker if you cloned manually
-docker compose pull && docker compose up -d
-```
-
-Database migrations are applied automatically on every startup.
-
----
-
-## Configuration
-
-All configuration lives in a single `docker/.env` file (or `surfsense/.env` if you used the install script). Copy `.env.example` to `.env` and edit the values you need.
-
-### Required
-
-| Variable | Description |
-|----------|-------------|
-| `SECRET_KEY` | JWT secret key. Generate with: `openssl rand -base64 32`. Auto-generated by the install script. |
-
-### Core Settings
-
-| Variable | Description | Default |
-|----------|-------------|---------|
-| `SURFSENSE_VERSION` | Image tag to deploy. Use `latest`, a clean version (e.g. `0.0.14`), or a specific build (e.g. `0.0.14.1`) | `latest` |
-| `AUTH_TYPE` | Authentication method: `LOCAL` (email/password) or `GOOGLE` (OAuth) | `LOCAL` |
-| `ETL_SERVICE` | Document parsing: `DOCLING` (local), `UNSTRUCTURED`, or `LLAMACLOUD` | `DOCLING` |
-| `EMBEDDING_MODEL` | Embedding model for vector search | `sentence-transformers/all-MiniLM-L6-v2` |
-| `TTS_SERVICE` | Text-to-speech provider for podcasts | `local/kokoro` |
-| `STT_SERVICE` | Speech-to-text provider for audio files | `local/base` |
-| `REGISTRATION_ENABLED` | Allow new user registrations | `TRUE` |
-
-### Ports
-
-| Variable | Description | Default |
-|----------|-------------|---------|
-| `FRONTEND_PORT` | Frontend service port | `3000` |
-| `BACKEND_PORT` | Backend API service port | `8000` |
-| `ELECTRIC_PORT` | Electric SQL service port | `5133` |
-
-### Custom Domain / Reverse Proxy
-
-Only set these if serving SurfSense on a real domain via a reverse proxy (Caddy, Nginx, Cloudflare Tunnel, etc.). Leave commented out for standard localhost deployments.
-
-| Variable | Description |
-|----------|-------------|
-| `NEXT_FRONTEND_URL` | Public frontend URL (e.g. `https://app.yourdomain.com`) |
-| `BACKEND_URL` | Public backend URL for OAuth callbacks (e.g. `https://api.yourdomain.com`) |
-| `NEXT_PUBLIC_FASTAPI_BACKEND_URL` | Backend URL used by the frontend (e.g. `https://api.yourdomain.com`) |
-| `NEXT_PUBLIC_ELECTRIC_URL` | Electric SQL URL used by the frontend (e.g. `https://electric.yourdomain.com`) |
-
-### Database
-
-Defaults work out of the box. Change for security in production.
-
-| Variable | Description | Default |
-|----------|-------------|---------|
-| `DB_USER` | PostgreSQL username | `surfsense` |
-| `DB_PASSWORD` | PostgreSQL password | `surfsense` |
-| `DB_NAME` | PostgreSQL database name | `surfsense` |
-| `DB_HOST` | PostgreSQL host | `db` |
-| `DB_PORT` | PostgreSQL port | `5432` |
-| `DB_SSLMODE` | SSL mode: `disable`, `require`, `verify-ca`, `verify-full` | `disable` |
-| `DATABASE_URL` | Full connection URL override. Use for managed databases (RDS, Supabase, etc.) | *(built from above)* |
-
-### Electric SQL
-
-| Variable | Description | Default |
-|----------|-------------|---------|
-| `ELECTRIC_DB_USER` | Replication user for Electric SQL | `electric` |
-| `ELECTRIC_DB_PASSWORD` | Replication password for Electric SQL | `electric_password` |
-| `ELECTRIC_DATABASE_URL` | Full connection URL override for Electric. Set to `host.docker.internal` when pointing at a local Postgres instance | *(built from above)* |
-
-### Authentication
-
-| Variable | Description |
-|----------|-------------|
-| `GOOGLE_OAUTH_CLIENT_ID` | Google OAuth client ID (required if `AUTH_TYPE=GOOGLE`) |
-| `GOOGLE_OAUTH_CLIENT_SECRET` | Google OAuth client secret (required if `AUTH_TYPE=GOOGLE`) |
-
-Create credentials at the [Google Cloud Console](https://console.cloud.google.com/apis/credentials).
-
-### External API Keys
-
-| Variable | Description |
-|----------|-------------|
-| `FIRECRAWL_API_KEY` | Firecrawl API key for web crawling |
-| `UNSTRUCTURED_API_KEY` | Unstructured.io API key (required if `ETL_SERVICE=UNSTRUCTURED`) |
-| `LLAMA_CLOUD_API_KEY` | LlamaCloud API key (required if `ETL_SERVICE=LLAMACLOUD`) |
-
-### Connector OAuth Keys
-
-Uncomment the connectors you want to use. Redirect URIs follow the pattern `http://localhost:8000/api/v1/auth//connector/callback`.
-
-| Connector | Variables |
-|-----------|-----------|
-| Google Drive / Gmail / Calendar | `GOOGLE_DRIVE_REDIRECT_URI`, `GOOGLE_GMAIL_REDIRECT_URI`, `GOOGLE_CALENDAR_REDIRECT_URI` |
-| Notion | `NOTION_CLIENT_ID`, `NOTION_CLIENT_SECRET`, `NOTION_REDIRECT_URI` |
-| Slack | `SLACK_CLIENT_ID`, `SLACK_CLIENT_SECRET`, `SLACK_REDIRECT_URI` |
-| Discord | `DISCORD_CLIENT_ID`, `DISCORD_CLIENT_SECRET`, `DISCORD_BOT_TOKEN`, `DISCORD_REDIRECT_URI` |
-| Jira & Confluence | `ATLASSIAN_CLIENT_ID`, `ATLASSIAN_CLIENT_SECRET`, `JIRA_REDIRECT_URI`, `CONFLUENCE_REDIRECT_URI` |
-| Linear | `LINEAR_CLIENT_ID`, `LINEAR_CLIENT_SECRET`, `LINEAR_REDIRECT_URI` |
-| ClickUp | `CLICKUP_CLIENT_ID`, `CLICKUP_CLIENT_SECRET`, `CLICKUP_REDIRECT_URI` |
-| Airtable | `AIRTABLE_CLIENT_ID`, `AIRTABLE_CLIENT_SECRET`, `AIRTABLE_REDIRECT_URI` |
-| Microsoft Teams | `TEAMS_CLIENT_ID`, `TEAMS_CLIENT_SECRET`, `TEAMS_REDIRECT_URI` |
-
-For Airtable, create an OAuth integration at the [Airtable Developer Hub](https://airtable.com/create/oauth).
-
-### Observability (optional)
-
-| Variable | Description |
-|----------|-------------|
-| `LANGSMITH_TRACING` | Enable LangSmith tracing (`true` / `false`) |
-| `LANGSMITH_ENDPOINT` | LangSmith API endpoint |
-| `LANGSMITH_API_KEY` | LangSmith API key |
-| `LANGSMITH_PROJECT` | LangSmith project name |
-
-### Advanced (optional)
-
-| Variable | Description | Default |
-|----------|-------------|---------|
-| `SCHEDULE_CHECKER_INTERVAL` | How often to check for scheduled connector tasks (e.g. `5m`, `1h`) | `5m` |
-| `RERANKERS_ENABLED` | Enable document reranking for improved search | `FALSE` |
-| `RERANKERS_MODEL_NAME` | Reranker model name (e.g. `ms-marco-MiniLM-L-12-v2`) | |
-| `RERANKERS_MODEL_TYPE` | Reranker model type (e.g. `flashrank`) | |
-| `PAGES_LIMIT` | Max pages per user for ETL services | unlimited |
-
----
-
-## Docker Services
-
-| Service | Description |
-|---------|-------------|
-| `db` | PostgreSQL with pgvector extension |
-| `redis` | Message broker for Celery |
-| `backend` | FastAPI application server |
-| `celery_worker` | Background task processing (document indexing, etc.) |
-| `celery_beat` | Periodic task scheduler (connector sync) |
-| `electric` | Electric SQL — real-time sync for the frontend |
-| `frontend` | Next.js web application |
-
-All services start automatically with `docker compose up -d`.
-
-The backend includes a health check — dependent services (workers, frontend) wait until the API is fully ready before starting. You can monitor startup progress with `docker compose ps` (look for `(health: starting)` → `(healthy)`).
-
----
-
-## Development Compose File
-
-If you're contributing to SurfSense and want to build from source, use `docker-compose.dev.yml` instead:
-
-```bash
-cd SurfSense/docker
-docker compose -f docker-compose.dev.yml up --build
-```
-
-This file builds the backend and frontend from your local source code (instead of pulling prebuilt images) and includes pgAdmin for database inspection at [http://localhost:5050](http://localhost:5050). Use the production `docker-compose.yml` for all other cases.
-
-The following `.env` variables are **only used by the dev compose file** (they have no effect on the production `docker-compose.yml`):
-
-| Variable | Description | Default |
-|----------|-------------|---------|
-| `PGADMIN_PORT` | pgAdmin web UI port | `5050` |
-| `PGADMIN_DEFAULT_EMAIL` | pgAdmin login email | `admin@surfsense.com` |
-| `PGADMIN_DEFAULT_PASSWORD` | pgAdmin login password | `surfsense` |
-| `REDIS_PORT` | Exposed Redis port (internal-only in prod) | `6379` |
-| `NEXT_PUBLIC_FASTAPI_BACKEND_AUTH_TYPE` | Frontend build arg for auth type | `LOCAL` |
-| `NEXT_PUBLIC_ETL_SERVICE` | Frontend build arg for ETL service | `DOCLING` |
-| `NEXT_PUBLIC_DEPLOYMENT_MODE` | Frontend build arg for deployment mode | `self-hosted` |
-| `NEXT_PUBLIC_ELECTRIC_AUTH_MODE` | Frontend build arg for Electric auth | `insecure` |
-
-In the production compose file, the `NEXT_PUBLIC_*` frontend variables are automatically derived from `AUTH_TYPE`, `ETL_SERVICE`, and the port settings. In the dev compose file, they are passed as build args since the frontend is built from source.
-
----
-
-## Migrating from the All-in-One Container
-
-
-If you were previously using `docker-compose.quickstart.yml` (the legacy all-in-one `surfsense` container), your data lives in a `surfsense-data` volume and requires a **one-time migration** before switching to the current setup. PostgreSQL has been upgraded from version 14 to 17, so a simple volume swap will not work.
-
-See the full step-by-step guide: [Migrate from the All-in-One Container](/docs/how-to/migrate-from-allinone).
-
-
----
-
-## Useful Commands
-
-```bash
-# View logs (all services)
-docker compose logs -f
-
-# View logs for a specific service
-docker compose logs -f backend
-docker compose logs -f electric
-
-# Stop all services
-docker compose down
-
-# Restart a specific service
-docker compose restart backend
-
-# Stop and remove all containers + volumes (destructive!)
-docker compose down -v
-```
-
----
-
-## Troubleshooting
-
-- **Ports already in use** — Change the relevant `*_PORT` variable in `.env` and restart.
-- **Permission errors on Linux** — You may need to prefix `docker` commands with `sudo`.
-- **Electric SQL not connecting** — Check `docker compose logs electric`. If it shows `domain does not exist: db`, ensure `ELECTRIC_DATABASE_URL` is not set to a stale value in `.env`.
-- **Real-time updates not working in browser** — Open DevTools → Console and look for `[Electric]` errors. Check that `NEXT_PUBLIC_ELECTRIC_URL` matches the running Electric SQL address.
-- **Line ending issues on Windows** — Run `git config --global core.autocrlf true` before cloning.
diff --git a/surfsense_web/content/docs/docker-installation/dev-compose.mdx b/surfsense_web/content/docs/docker-installation/dev-compose.mdx
new file mode 100644
index 000000000..302026c2a
--- /dev/null
+++ b/surfsense_web/content/docs/docker-installation/dev-compose.mdx
@@ -0,0 +1,30 @@
+---
+title: Docker Compose Development
+description: Building SurfSense from source using docker-compose.dev.yml
+---
+
+If you're contributing to SurfSense and want to build from source, use `docker-compose.dev.yml` instead:
+
+```bash
+cd SurfSense/docker
+docker compose -f docker-compose.dev.yml up --build
+```
+
+This file builds the backend and frontend from your local source code (instead of pulling prebuilt images) and includes pgAdmin for database inspection at [http://localhost:5050](http://localhost:5050). Use the production `docker-compose.yml` for all other cases.
+
+## Dev-Only Environment Variables
+
+The following `.env` variables are **only used by the dev compose file** (they have no effect on the production `docker-compose.yml`):
+
+| Variable | Description | Default |
+|----------|-------------|---------|
+| `PGADMIN_PORT` | pgAdmin web UI port | `5050` |
+| `PGADMIN_DEFAULT_EMAIL` | pgAdmin login email | `admin@surfsense.com` |
+| `PGADMIN_DEFAULT_PASSWORD` | pgAdmin login password | `surfsense` |
+| `REDIS_PORT` | Exposed Redis port (internal-only in prod) | `6379` |
+| `NEXT_PUBLIC_FASTAPI_BACKEND_AUTH_TYPE` | Frontend build arg for auth type | `LOCAL` |
+| `NEXT_PUBLIC_ETL_SERVICE` | Frontend build arg for ETL service | `DOCLING` |
+| `NEXT_PUBLIC_DEPLOYMENT_MODE` | Frontend build arg for deployment mode | `self-hosted` |
+| `NEXT_PUBLIC_ELECTRIC_AUTH_MODE` | Frontend build arg for Electric auth | `insecure` |
+
+In the production compose file, the `NEXT_PUBLIC_*` frontend variables are automatically derived from `AUTH_TYPE`, `ETL_SERVICE`, and the port settings. In the dev compose file, they are passed as build args since the frontend is built from source.
diff --git a/surfsense_web/content/docs/docker-installation/docker-compose.mdx b/surfsense_web/content/docs/docker-installation/docker-compose.mdx
new file mode 100644
index 000000000..c56f08106
--- /dev/null
+++ b/surfsense_web/content/docs/docker-installation/docker-compose.mdx
@@ -0,0 +1,188 @@
+---
+title: Docker Compose
+description: Manual Docker Compose setup for SurfSense
+---
+
+## Setup
+
+```bash
+git clone https://github.com/MODSetter/SurfSense.git
+cd SurfSense/docker
+cp .env.example .env
+# Edit .env, at minimum set SECRET_KEY
+docker compose up -d
+```
+
+After starting, access SurfSense at:
+
+- **Frontend**: [http://localhost:3929](http://localhost:3929)
+- **Backend API**: [http://localhost:8929](http://localhost:8929)
+- **API Docs**: [http://localhost:8929/docs](http://localhost:8929/docs)
+- **Electric SQL**: [http://localhost:5929](http://localhost:5929)
+
+---
+
+## Configuration
+
+All configuration lives in a single `docker/.env` file (or `surfsense/.env` if you used the install script). Copy `.env.example` to `.env` and edit the values you need.
+
+### Required
+
+| Variable | Description |
+|----------|-------------|
+| `SECRET_KEY` | JWT secret key. Generate with: `openssl rand -base64 32`. Auto-generated by the install script. |
+
+### Core Settings
+
+| Variable | Description | Default |
+|----------|-------------|---------|
+| `SURFSENSE_VERSION` | Image tag to deploy. Use `latest`, a clean version (e.g. `0.0.14`), or a specific build (e.g. `0.0.14.1`) | `latest` |
+| `AUTH_TYPE` | Authentication method: `LOCAL` (email/password) or `GOOGLE` (OAuth) | `LOCAL` |
+| `ETL_SERVICE` | Document parsing: `DOCLING` (local), `UNSTRUCTURED`, or `LLAMACLOUD` | `DOCLING` |
+| `EMBEDDING_MODEL` | Embedding model for vector search | `sentence-transformers/all-MiniLM-L6-v2` |
+| `TTS_SERVICE` | Text-to-speech provider for podcasts | `local/kokoro` |
+| `STT_SERVICE` | Speech-to-text provider for audio files | `local/base` |
+| `REGISTRATION_ENABLED` | Allow new user registrations | `TRUE` |
+
+### Ports
+
+| Variable | Description | Default |
+|----------|-------------|---------|
+| `FRONTEND_PORT` | Frontend service port | `3929` |
+| `BACKEND_PORT` | Backend API service port | `8929` |
+| `ELECTRIC_PORT` | Electric SQL service port | `5929` |
+
+### Custom Domain / Reverse Proxy
+
+Only set these if serving SurfSense on a real domain via a reverse proxy (Caddy, Nginx, Cloudflare Tunnel, etc.). Leave commented out for standard localhost deployments.
+
+| Variable | Description |
+|----------|-------------|
+| `NEXT_FRONTEND_URL` | Public frontend URL (e.g. `https://app.yourdomain.com`) |
+| `BACKEND_URL` | Public backend URL for OAuth callbacks (e.g. `https://api.yourdomain.com`) |
+| `NEXT_PUBLIC_FASTAPI_BACKEND_URL` | Backend URL used by the frontend (e.g. `https://api.yourdomain.com`) |
+| `NEXT_PUBLIC_ELECTRIC_URL` | Electric SQL URL used by the frontend (e.g. `https://electric.yourdomain.com`) |
+
+### Database
+
+Defaults work out of the box. Change for security in production.
+
+| Variable | Description | Default |
+|----------|-------------|---------|
+| `DB_USER` | PostgreSQL username | `surfsense` |
+| `DB_PASSWORD` | PostgreSQL password | `surfsense` |
+| `DB_NAME` | PostgreSQL database name | `surfsense` |
+| `DB_HOST` | PostgreSQL host | `db` |
+| `DB_PORT` | PostgreSQL port | `5432` |
+| `DB_SSLMODE` | SSL mode: `disable`, `require`, `verify-ca`, `verify-full` | `disable` |
+| `DATABASE_URL` | Full connection URL override. Use for managed databases (RDS, Supabase, etc.) | *(built from above)* |
+
+### Electric SQL
+
+| Variable | Description | Default |
+|----------|-------------|---------|
+| `ELECTRIC_DB_USER` | Replication user for Electric SQL | `electric` |
+| `ELECTRIC_DB_PASSWORD` | Replication password for Electric SQL | `electric_password` |
+| `ELECTRIC_DATABASE_URL` | Full connection URL override for Electric. Set to `host.docker.internal` when pointing at a local Postgres instance | *(built from above)* |
+
+### Authentication
+
+| Variable | Description |
+|----------|-------------|
+| `GOOGLE_OAUTH_CLIENT_ID` | Google OAuth client ID (required if `AUTH_TYPE=GOOGLE`) |
+| `GOOGLE_OAUTH_CLIENT_SECRET` | Google OAuth client secret (required if `AUTH_TYPE=GOOGLE`) |
+
+Create credentials at the [Google Cloud Console](https://console.cloud.google.com/apis/credentials).
+
+### External API Keys
+
+| Variable | Description |
+|----------|-------------|
+| `FIRECRAWL_API_KEY` | [Firecrawl](https://www.firecrawl.dev/) API key for web crawling |
+| `UNSTRUCTURED_API_KEY` | [Unstructured.io](https://unstructured.io/) API key (required if `ETL_SERVICE=UNSTRUCTURED`) |
+| `LLAMA_CLOUD_API_KEY` | [LlamaCloud](https://cloud.llamaindex.ai/) API key (required if `ETL_SERVICE=LLAMACLOUD`) |
+
+### Connector OAuth Keys
+
+Uncomment the connectors you want to use. Redirect URIs follow the pattern `http://localhost:8000/api/v1/auth//connector/callback`.
+
+| Connector | Variables |
+|-----------|-----------|
+| Google Drive / Gmail / Calendar | `GOOGLE_DRIVE_REDIRECT_URI`, `GOOGLE_GMAIL_REDIRECT_URI`, `GOOGLE_CALENDAR_REDIRECT_URI` |
+| Notion | `NOTION_CLIENT_ID`, `NOTION_CLIENT_SECRET`, `NOTION_REDIRECT_URI` |
+| Slack | `SLACK_CLIENT_ID`, `SLACK_CLIENT_SECRET`, `SLACK_REDIRECT_URI` |
+| Discord | `DISCORD_CLIENT_ID`, `DISCORD_CLIENT_SECRET`, `DISCORD_BOT_TOKEN`, `DISCORD_REDIRECT_URI` |
+| Jira & Confluence | `ATLASSIAN_CLIENT_ID`, `ATLASSIAN_CLIENT_SECRET`, `JIRA_REDIRECT_URI`, `CONFLUENCE_REDIRECT_URI` |
+| Linear | `LINEAR_CLIENT_ID`, `LINEAR_CLIENT_SECRET`, `LINEAR_REDIRECT_URI` |
+| ClickUp | `CLICKUP_CLIENT_ID`, `CLICKUP_CLIENT_SECRET`, `CLICKUP_REDIRECT_URI` |
+| Airtable | `AIRTABLE_CLIENT_ID`, `AIRTABLE_CLIENT_SECRET`, `AIRTABLE_REDIRECT_URI` |
+| Microsoft Teams | `TEAMS_CLIENT_ID`, `TEAMS_CLIENT_SECRET`, `TEAMS_REDIRECT_URI` |
+
+### Observability (optional)
+
+| Variable | Description |
+|----------|-------------|
+| `LANGSMITH_TRACING` | Enable LangSmith tracing (`true` / `false`) |
+| `LANGSMITH_ENDPOINT` | LangSmith API endpoint |
+| `LANGSMITH_API_KEY` | LangSmith API key |
+| `LANGSMITH_PROJECT` | LangSmith project name |
+
+### Advanced (optional)
+
+| Variable | Description | Default |
+|----------|-------------|---------|
+| `SCHEDULE_CHECKER_INTERVAL` | How often to check for scheduled connector tasks (e.g. `5m`, `1h`) | `5m` |
+| `RERANKERS_ENABLED` | Enable document reranking for improved search | `FALSE` |
+| `RERANKERS_MODEL_NAME` | Reranker model name (e.g. `ms-marco-MiniLM-L-12-v2`) | |
+| `RERANKERS_MODEL_TYPE` | Reranker model type (e.g. `flashrank`) | |
+| `PAGES_LIMIT` | Max pages per user for ETL services | unlimited |
+
+---
+
+## Docker Services
+
+| Service | Description |
+|---------|-------------|
+| `db` | PostgreSQL with pgvector extension |
+| `redis` | Message broker for Celery |
+| `backend` | FastAPI application server |
+| `celery_worker` | Background task processing (document indexing, etc.) |
+| `celery_beat` | Periodic task scheduler (connector sync) |
+| `electric` | Electric SQL (real-time sync for the frontend) |
+| `frontend` | Next.js web application |
+
+All services start automatically with `docker compose up -d`.
+
+The backend includes a health check. Dependent services (workers, frontend) wait until the API is fully ready before starting. You can monitor startup progress with `docker compose ps` (look for `(health: starting)` → `(healthy)`).
+
+---
+
+## Useful Commands
+
+```bash
+# View logs (all services)
+docker compose logs -f
+
+# View logs for a specific service
+docker compose logs -f backend
+docker compose logs -f electric
+
+# Stop all services
+docker compose down
+
+# Restart a specific service
+docker compose restart backend
+
+# Stop and remove all containers + volumes (destructive!)
+docker compose down -v
+```
+
+---
+
+## Troubleshooting
+
+- **Ports already in use**: Change the relevant `*_PORT` variable in `.env` and restart.
+- **Permission errors on Linux**: You may need to prefix `docker` commands with `sudo`.
+- **Electric SQL not connecting**: Check `docker compose logs electric`. If it shows `domain does not exist: db`, ensure `ELECTRIC_DATABASE_URL` is not set to a stale value in `.env`.
+- **Real-time updates not working in browser**: Open DevTools → Console and look for `[Electric]` errors. Check that `NEXT_PUBLIC_ELECTRIC_URL` matches the running Electric SQL address.
+- **Line ending issues on Windows**: Run `git config --global core.autocrlf true` before cloning.
diff --git a/surfsense_web/content/docs/docker-installation/install-script.mdx b/surfsense_web/content/docs/docker-installation/install-script.mdx
new file mode 100644
index 000000000..bbe95c230
--- /dev/null
+++ b/surfsense_web/content/docs/docker-installation/install-script.mdx
@@ -0,0 +1,41 @@
+---
+title: One-Line Install Script
+description: One-command installation of SurfSense using Docker
+---
+
+Downloads the compose files, generates a `SECRET_KEY`, starts all services, and sets up [Watchtower](https://github.com/nicholas-fedor/watchtower) for automatic daily updates.
+
+**Prerequisites:** [Docker Desktop](https://www.docker.com/products/docker-desktop/) must be installed and running.
+
+### For Linux/macOS users:
+
+```bash
+curl -fsSL https://raw.githubusercontent.com/MODSetter/SurfSense/main/docker/scripts/install.sh | bash
+```
+
+### For Windows users (PowerShell):
+
+```powershell
+irm https://raw.githubusercontent.com/MODSetter/SurfSense/main/docker/scripts/install.ps1 | iex
+```
+
+This creates a `./surfsense/` directory with `docker-compose.yml` and `.env`, then runs `docker compose up -d`.
+
+To skip Watchtower (e.g. in production where you manage updates yourself):
+
+```bash
+curl -fsSL https://raw.githubusercontent.com/MODSetter/SurfSense/main/docker/scripts/install.sh | bash -s -- --no-watchtower
+```
+
+To customise the check interval (default 24h), use `--watchtower-interval=SECONDS`.
+
+---
+
+## Access SurfSense
+
+After starting, access SurfSense at:
+
+- **Frontend**: [http://localhost:3929](http://localhost:3929)
+- **Backend API**: [http://localhost:8929](http://localhost:8929)
+- **API Docs**: [http://localhost:8929/docs](http://localhost:8929/docs)
+- **Electric SQL**: [http://localhost:5929](http://localhost:5929)
diff --git a/surfsense_web/content/docs/docker-installation/meta.json b/surfsense_web/content/docs/docker-installation/meta.json
new file mode 100644
index 000000000..13683547b
--- /dev/null
+++ b/surfsense_web/content/docs/docker-installation/meta.json
@@ -0,0 +1,6 @@
+{
+ "title": "Docker Installation",
+ "pages": ["install-script", "docker-compose", "updating", "dev-compose", "migrate-from-allinone"],
+ "icon": "Container",
+ "defaultOpen": false
+}
diff --git a/surfsense_web/content/docs/how-to/migrate-from-allinone.mdx b/surfsense_web/content/docs/docker-installation/migrate-from-allinone.mdx
similarity index 67%
rename from surfsense_web/content/docs/how-to/migrate-from-allinone.mdx
rename to surfsense_web/content/docs/docker-installation/migrate-from-allinone.mdx
index 3de0b043d..c623b59e7 100644
--- a/surfsense_web/content/docs/how-to/migrate-from-allinone.mdx
+++ b/surfsense_web/content/docs/docker-installation/migrate-from-allinone.mdx
@@ -81,87 +81,6 @@ bash migrate-database.sh --db-user myuser --db-password mypass --db-name mydb
---
-## Option C — Manual steps
-
-For users who prefer full control or whose platform doesn't support bash scripts (e.g. Windows without WSL2).
-
-### Step 1 — Stop the old all-in-one container
-
-Before mounting the `surfsense-data` volume into a new container, stop the existing one to prevent two PostgreSQL processes from writing to the same data directory:
-
-```bash
-docker stop surfsense 2>/dev/null || true
-```
-
-### Step 2 — Start a temporary PostgreSQL 14 container
-
-```bash
-docker run -d --name surfsense-pg14-temp \
- -v surfsense-data:/data \
- -e PGDATA=/data/postgres \
- -e POSTGRES_USER=surfsense \
- -e POSTGRES_PASSWORD=surfsense \
- -e POSTGRES_DB=surfsense \
- pgvector/pgvector:pg14
-```
-
-Wait ~10 seconds, then confirm it is healthy:
-
-```bash
-docker exec surfsense-pg14-temp pg_isready -U surfsense
-```
-
-### Step 3 — Dump the database
-
-```bash
-docker exec -e PGPASSWORD=surfsense surfsense-pg14-temp \
- pg_dump -U surfsense surfsense > surfsense_backup.sql
-```
-
-### Step 4 — Recover your SECRET\_KEY
-
-```bash
-docker run --rm -v surfsense-data:/data alpine cat /data/.secret_key
-```
-
-### Step 5 — Set up the new stack
-
-```bash
-mkdir -p surfsense/scripts
-curl -fsSL https://raw.githubusercontent.com/MODSetter/SurfSense/main/docker/docker-compose.yml -o surfsense/docker-compose.yml
-curl -fsSL https://raw.githubusercontent.com/MODSetter/SurfSense/main/docker/.env.example -o surfsense/.env.example
-curl -fsSL https://raw.githubusercontent.com/MODSetter/SurfSense/main/docker/postgresql.conf -o surfsense/postgresql.conf
-curl -fsSL https://raw.githubusercontent.com/MODSetter/SurfSense/main/docker/scripts/init-electric-user.sh -o surfsense/scripts/init-electric-user.sh
-chmod +x surfsense/scripts/init-electric-user.sh
-cp surfsense/.env.example surfsense/.env
-```
-
-Set `SECRET_KEY` in `surfsense/.env` to the value from Step 4.
-
-### Step 6 — Start PostgreSQL 17 and restore
-
-```bash
-cd surfsense
-docker compose up -d db
-docker compose exec db pg_isready -U surfsense # wait until ready
-docker compose exec -T db psql -U surfsense -d surfsense < ../surfsense_backup.sql
-```
-
-### Step 7 — Start all services
-
-```bash
-docker compose up -d
-```
-
-### Step 8 — Clean up
-
-```bash
-docker stop surfsense-pg14-temp && docker rm surfsense-pg14-temp
-docker volume rm surfsense-data # only after verifying migration succeeded
-```
-
----
-
## Troubleshooting
### `install.sh` runs normally with a blank database (no migration happened)
diff --git a/surfsense_web/content/docs/docker-installation/updating.mdx b/surfsense_web/content/docs/docker-installation/updating.mdx
new file mode 100644
index 000000000..6ef2fcecc
--- /dev/null
+++ b/surfsense_web/content/docs/docker-installation/updating.mdx
@@ -0,0 +1,50 @@
+---
+title: Updating
+description: How to update your SurfSense Docker deployment
+---
+
+## Watchtower Daemon (recommended)
+
+Auto-updates every 24 hours. If you used the [install script](/docs/docker-installation/install-script), Watchtower is already running. No extra setup needed.
+
+For [manual Docker Compose](/docs/docker-installation/docker-compose) installs, start Watchtower separately:
+
+```bash
+docker run -d --name watchtower \
+ --restart unless-stopped \
+ -v /var/run/docker.sock:/var/run/docker.sock \
+ nickfedor/watchtower \
+ --label-enable \
+ --interval 86400
+```
+
+## Watchtower One-Time Update
+
+```bash
+docker run --rm -v /var/run/docker.sock:/var/run/docker.sock \
+ nickfedor/watchtower --run-once \
+ --label-filter "com.docker.compose.project=surfsense"
+```
+
+
+Use `nickfedor/watchtower`. The original `containrrr/watchtower` is no longer maintained and may fail with newer Docker versions.
+
+
+## Manual Update
+
+```bash
+cd surfsense # or SurfSense/docker if you cloned manually
+docker compose pull && docker compose up -d
+```
+
+Database migrations are applied automatically on every startup.
+
+---
+
+## Migrating from the All-in-One Container
+
+
+If you were previously using `docker-compose.quickstart.yml` (the legacy all-in-one `surfsense` container), your data lives in a `surfsense-data` volume and requires a **one-time migration** before switching to the current setup. PostgreSQL has been upgraded from version 14 to 17, so a simple volume swap will not work.
+
+See the full step-by-step guide: [Migrate from the All-in-One Container](/docs/docker-installation/migrate-from-allinone).
+
diff --git a/surfsense_web/content/docs/how-to/electric-sql.mdx b/surfsense_web/content/docs/how-to/electric-sql.mdx
index fb2cf941a..f051a9ab5 100644
--- a/surfsense_web/content/docs/how-to/electric-sql.mdx
+++ b/surfsense_web/content/docs/how-to/electric-sql.mdx
@@ -5,7 +5,7 @@ description: Setting up Electric SQL for real-time data synchronization in SurfS
[Electric SQL](https://electric-sql.com/) enables real-time data synchronization in SurfSense, providing instant updates for inbox items, document indexing status, and connector sync progress without manual refresh. The frontend uses [PGlite](https://pglite.dev/) (a lightweight PostgreSQL in the browser) to maintain a local database that syncs with the backend via Electric SQL.
-## What Does Electric SQL Do?
+## What does Electric SQL do?
When you index documents or receive inbox updates, Electric SQL pushes updates to your browser in real-time. The data flows like this:
@@ -23,45 +23,24 @@ This means:
## Docker Setup
-The `docker-compose.yml` includes the Electric SQL service. It is pre-configured to connect to the Docker-managed `db` container out of the box.
+- The `docker-compose.yml` includes the Electric SQL service, pre-configured to connect to the Docker-managed `db` container.
+- No additional configuration is required. Electric SQL works with the Docker PostgreSQL instance out of the box.
-```bash
-docker compose up -d
-```
+## Manual Setup (Development Only)
-The Electric SQL service configuration in `docker-compose.yml`:
-
-```yaml
-electric:
- image: electricsql/electric:1.4.6
- ports:
- - "${ELECTRIC_PORT:-5133}:3000"
- environment:
- DATABASE_URL: ${ELECTRIC_DATABASE_URL:-postgresql://${ELECTRIC_DB_USER:-electric}:${ELECTRIC_DB_PASSWORD:-electric_password}@${DB_HOST:-db}:${DB_PORT:-5432}/${DB_NAME:-surfsense}?sslmode=${DB_SSLMODE:-disable}}
- ELECTRIC_INSECURE: "true"
- ELECTRIC_WRITE_TO_PG_MODE: direct
- depends_on:
- db:
- condition: service_healthy
-```
-
-No additional configuration is required — Electric SQL is pre-configured to work with the Docker PostgreSQL instance.
-
-## Manual Setup
-
-Follow the steps below based on your PostgreSQL setup.
+This section is intended for local development environments. Follow the steps below based on your PostgreSQL setup.
### Step 1: Configure Environment Variables
Ensure your environment files are configured. If you haven't set up SurfSense yet, follow the [Manual Installation Guide](/docs/manual-installation) first.
-For Electric SQL, verify these variables are set in `docker/.env`:
+For Electric SQL, verify these variables are set:
+
+**Backend (`surfsense_backend/.env`):**
```bash
-ELECTRIC_PORT=5133
ELECTRIC_DB_USER=electric
ELECTRIC_DB_PASSWORD=electric_password
-NEXT_PUBLIC_ELECTRIC_URL=http://localhost:5133
```
**Frontend (`surfsense_web/.env`):**
@@ -71,17 +50,19 @@ NEXT_PUBLIC_ELECTRIC_URL=http://localhost:5133
NEXT_PUBLIC_ELECTRIC_AUTH_MODE=insecure
```
+Next, choose the option that matches your PostgreSQL setup:
+
---
### Option A: Using Docker PostgreSQL
-If you're using the Docker-managed PostgreSQL instance, no extra configuration is needed. Just start the services:
+If you're using the Docker-managed PostgreSQL instance, no extra configuration is needed. Just start the services using the development compose file (which exposes the PostgreSQL port to your host machine):
```bash
-docker compose up -d db electric
+docker compose -f docker-compose.dev.yml up -d db electric
```
-Then run the database migration and start the backend:
+Then run the database migration, start the backend, and launch the frontend:
```bash
cd surfsense_backend
@@ -89,6 +70,13 @@ uv run alembic upgrade head
uv run main.py
```
+In a separate terminal, start the frontend:
+
+```bash
+cd surfsense_web
+pnpm run dev
+```
+
Electric SQL is now configured and connected to your Docker PostgreSQL database.
---
@@ -148,7 +136,7 @@ ELECTRIC_DATABASE_URL=postgresql://electric:electric_password@host.docker.intern
**4. Start Electric SQL only (skip the Docker `db` container):**
```bash
-docker compose up -d --no-deps electric
+docker compose -f docker-compose.dev.yml up -d --no-deps electric
```
The `--no-deps` flag starts only the `electric` service without starting the Docker-managed `db` container.
@@ -161,18 +149,32 @@ uv run alembic upgrade head
uv run main.py
```
+In a separate terminal, start the frontend:
+
+```bash
+cd surfsense_web
+pnpm run dev
+```
+
Electric SQL is now configured and connected to your local PostgreSQL database.
## Environment Variables Reference
+**Required for manual setup:**
+
| Variable | Location | Description | Default |
|----------|----------|-------------|---------|
-| `ELECTRIC_PORT` | `docker/.env` | Port to expose Electric SQL | `5133` |
-| `ELECTRIC_DB_USER` | `docker/.env` | Database user for Electric replication | `electric` |
-| `ELECTRIC_DB_PASSWORD` | `docker/.env` | Database password for Electric replication | `electric_password` |
-| `ELECTRIC_DATABASE_URL` | `docker/.env` | Full connection URL override for Electric. Set to use `host.docker.internal` when pointing at a local Postgres instance | *(built from above defaults)* |
-| `NEXT_PUBLIC_ELECTRIC_URL` | Frontend `.env` | Electric SQL server URL (PGlite connects to this) | `http://localhost:5133` |
-| `NEXT_PUBLIC_ELECTRIC_AUTH_MODE` | Frontend `.env` | Authentication mode (`insecure` for dev, `secure` for production) | `insecure` |
+| `ELECTRIC_DB_USER` | `surfsense_backend/.env` | Database user for Electric replication | `electric` |
+| `ELECTRIC_DB_PASSWORD` | `surfsense_backend/.env` | Database password for Electric replication | `electric_password` |
+| `NEXT_PUBLIC_ELECTRIC_URL` | `surfsense_web/.env` | Electric SQL server URL (PGlite connects to this) | `http://localhost:5133` |
+| `NEXT_PUBLIC_ELECTRIC_AUTH_MODE` | `surfsense_web/.env` | Authentication mode (`insecure` for dev, `secure` for production) | `insecure` |
+
+**Optional / Docker-only:**
+
+| Variable | Location | Description | Default |
+|----------|----------|-------------|---------|
+| `ELECTRIC_PORT` | `docker/.env` | Port to expose Electric SQL on the host | `5133` (dev), `5929` (production) |
+| `ELECTRIC_DATABASE_URL` | `docker/.env` | Full connection URL override for Electric. Only needed for Option B (local Postgres via `host.docker.internal`) | *(built from above defaults)* |
## Verify Setup
diff --git a/surfsense_web/content/docs/how-to/meta.json b/surfsense_web/content/docs/how-to/meta.json
index c8ecb05d9..16e1e9c81 100644
--- a/surfsense_web/content/docs/how-to/meta.json
+++ b/surfsense_web/content/docs/how-to/meta.json
@@ -1,6 +1,6 @@
{
"title": "How to",
- "pages": ["electric-sql", "realtime-collaboration", "migrate-from-allinone"],
- "icon": "BookOpen",
+ "pages": ["electric-sql", "realtime-collaboration"],
+ "icon": "Compass",
"defaultOpen": false
}
diff --git a/surfsense_web/content/docs/index.mdx b/surfsense_web/content/docs/index.mdx
index 6c0450297..2204e4e34 100644
--- a/surfsense_web/content/docs/index.mdx
+++ b/surfsense_web/content/docs/index.mdx
@@ -1,86 +1,61 @@
---
-title: Prerequisites
-description: Required setup's before setting up SurfSense
-icon: ClipboardCheck
+title: Documentation
+description: Welcome to SurfSense's documentation
+icon: BookOpen
---
+import { Card, Cards } from 'fumadocs-ui/components/card';
+import { ClipboardCheck, Download, Container, Wrench, Cable, BookOpen, FlaskConical, Heart } from 'lucide-react';
-## Auth Setup
+Welcome to **SurfSense's Documentation!** Here, you'll find everything you need to get the most out of SurfSense. Dive in to explore how SurfSense can be your AI-powered research companion.
-SurfSense supports both Google OAuth and local email/password authentication. Google OAuth is optional - if you prefer local authentication, you can skip this section.
-
-**Note**: Google OAuth setup is **required** in your `.env` files if you want to use the Gmail and Google Calendar connectors in SurfSense.
-
-To set up Google OAuth:
-
-1. Login to your [Google Developer Console](https://console.cloud.google.com/)
-2. Enable the required APIs:
- - **People API** (required for basic Google OAuth)
-
-3. Set up OAuth consent screen.
-
-4. Create OAuth client ID and secret.
-
-5. It should look like this.
-
-
----
-
-## File Upload's
-
-SurfSense supports three ETL (Extract, Transform, Load) services for converting files to LLM-friendly formats:
-
-### Option 1: Unstructured
-
-Files are converted using [Unstructured](https://github.com/Unstructured-IO/unstructured)
-
-1. Get an Unstructured.io API key from [Unstructured Platform](https://platform.unstructured.io/)
-2. You should be able to generate API keys once registered
-
-
-### Option 2: LlamaIndex (LlamaCloud)
-
-Files are converted using [LlamaIndex](https://www.llamaindex.ai/) which offers 50+ file format support.
-
-1. Get a LlamaIndex API key from [LlamaCloud](https://cloud.llamaindex.ai/)
-2. Sign up for a LlamaCloud account to access their parsing services
-3. LlamaCloud provides enhanced parsing capabilities for complex documents
-
-### Option 3: Docling (Recommended for Privacy)
-
-Files are processed locally using [Docling](https://github.com/DS4SD/docling) - IBM's open-source document parsing library.
-
-1. **No API key required** - all processing happens locally
-2. **Privacy-focused** - documents never leave your system
-3. **Supported formats**: PDF, Office documents (Word, Excel, PowerPoint), images (PNG, JPEG, TIFF, BMP, WebP), HTML, CSV, AsciiDoc
-4. **Enhanced features**: Advanced table detection, image extraction, and structured document parsing
-5. **GPU acceleration** support for faster processing (when available)
-
-**Note**: You only need to set up one of these services.
-
----
-
-## LLM Observability (Optional)
-
-This is not required for SurfSense to work. But it is always a good idea to monitor LLM interactions. So we do not have those WTH moments.
-
-1. Get a LangSmith API key from [smith.langchain.com](https://smith.langchain.com/)
-2. This helps in observing SurfSense Researcher Agent.
-
-
----
-
-## Crawler
-
-SurfSense have 2 options for saving webpages:
-- [SurfSense Extension](https://github.com/MODSetter/SurfSense/tree/main/surfsense_browser_extension) (Overall better experience & ability to save private webpages, recommended)
-- Crawler (If you want to save public webpages)
-
-**NOTE:** SurfSense currently uses [Firecrawl.py](https://www.firecrawl.dev/) for web crawling. If you plan on using the crawler, you will need to create a Firecrawl account and get an API key.
-
-
----
-
-## Next Steps
-
-Once you have all prerequisites in place, proceed to the [installation guide](/docs/installation) to set up SurfSense.
\ No newline at end of file
+
+ }
+ title="Prerequisites"
+ description="Required setup before installing SurfSense"
+ href="/docs/prerequisites"
+ />
+ }
+ title="Installation"
+ description="Choose your installation method"
+ href="/docs/installation"
+ />
+ }
+ title="Docker Installation"
+ description="Deploy SurfSense with Docker Compose"
+ href="/docs/docker-installation"
+ />
+ }
+ title="Manual Installation"
+ description="Set up SurfSense manually from source"
+ href="/docs/manual-installation"
+ />
+ }
+ title="Connectors"
+ description="Integrate with third-party services"
+ href="/docs/connectors"
+ />
+ }
+ title="How-To Guides"
+ description="Step-by-step guides for common tasks"
+ href="/docs/how-to"
+ />
+ }
+ title="Testing"
+ description="Running and writing tests for SurfSense"
+ href="/docs/testing"
+ />
+ }
+ title="Code of Conduct"
+ description="Community guidelines and expectations"
+ href="/docs/code-of-conduct"
+ />
+
diff --git a/surfsense_web/content/docs/installation.mdx b/surfsense_web/content/docs/installation.mdx
index 6aa2eeb90..aa3a2a72d 100644
--- a/surfsense_web/content/docs/installation.mdx
+++ b/surfsense_web/content/docs/installation.mdx
@@ -12,7 +12,7 @@ There are two ways to install SurfSense, but both require the repository to be c
This method provides a containerized environment with all dependencies pre-configured. Less Customization.
-[Learn more about Docker installation](/docs/docker-installation)
+[Learn more about Docker installation](/docs/docker-installation/install-script)
## Manual Installation (Preferred)
diff --git a/surfsense_web/content/docs/meta.json b/surfsense_web/content/docs/meta.json
index f73b59e18..a0b6f8a1b 100644
--- a/surfsense_web/content/docs/meta.json
+++ b/surfsense_web/content/docs/meta.json
@@ -5,12 +5,14 @@
"pages": [
"---Guides---",
"index",
+ "prerequisites",
"installation",
- "docker-installation",
"manual-installation",
+ "docker-installation",
"connectors",
"how-to",
- "---Development---",
- "testing"
+ "---Developers---",
+ "testing",
+ "code-of-conduct"
]
}
diff --git a/surfsense_web/content/docs/prerequisites.mdx b/surfsense_web/content/docs/prerequisites.mdx
new file mode 100644
index 000000000..6c0450297
--- /dev/null
+++ b/surfsense_web/content/docs/prerequisites.mdx
@@ -0,0 +1,86 @@
+---
+title: Prerequisites
+description: Required setup's before setting up SurfSense
+icon: ClipboardCheck
+---
+
+
+## Auth Setup
+
+SurfSense supports both Google OAuth and local email/password authentication. Google OAuth is optional - if you prefer local authentication, you can skip this section.
+
+**Note**: Google OAuth setup is **required** in your `.env` files if you want to use the Gmail and Google Calendar connectors in SurfSense.
+
+To set up Google OAuth:
+
+1. Login to your [Google Developer Console](https://console.cloud.google.com/)
+2. Enable the required APIs:
+ - **People API** (required for basic Google OAuth)
+
+3. Set up OAuth consent screen.
+
+4. Create OAuth client ID and secret.
+
+5. It should look like this.
+
+
+---
+
+## File Upload's
+
+SurfSense supports three ETL (Extract, Transform, Load) services for converting files to LLM-friendly formats:
+
+### Option 1: Unstructured
+
+Files are converted using [Unstructured](https://github.com/Unstructured-IO/unstructured)
+
+1. Get an Unstructured.io API key from [Unstructured Platform](https://platform.unstructured.io/)
+2. You should be able to generate API keys once registered
+
+
+### Option 2: LlamaIndex (LlamaCloud)
+
+Files are converted using [LlamaIndex](https://www.llamaindex.ai/) which offers 50+ file format support.
+
+1. Get a LlamaIndex API key from [LlamaCloud](https://cloud.llamaindex.ai/)
+2. Sign up for a LlamaCloud account to access their parsing services
+3. LlamaCloud provides enhanced parsing capabilities for complex documents
+
+### Option 3: Docling (Recommended for Privacy)
+
+Files are processed locally using [Docling](https://github.com/DS4SD/docling) - IBM's open-source document parsing library.
+
+1. **No API key required** - all processing happens locally
+2. **Privacy-focused** - documents never leave your system
+3. **Supported formats**: PDF, Office documents (Word, Excel, PowerPoint), images (PNG, JPEG, TIFF, BMP, WebP), HTML, CSV, AsciiDoc
+4. **Enhanced features**: Advanced table detection, image extraction, and structured document parsing
+5. **GPU acceleration** support for faster processing (when available)
+
+**Note**: You only need to set up one of these services.
+
+---
+
+## LLM Observability (Optional)
+
+This is not required for SurfSense to work. But it is always a good idea to monitor LLM interactions. So we do not have those WTH moments.
+
+1. Get a LangSmith API key from [smith.langchain.com](https://smith.langchain.com/)
+2. This helps in observing SurfSense Researcher Agent.
+
+
+---
+
+## Crawler
+
+SurfSense have 2 options for saving webpages:
+- [SurfSense Extension](https://github.com/MODSetter/SurfSense/tree/main/surfsense_browser_extension) (Overall better experience & ability to save private webpages, recommended)
+- Crawler (If you want to save public webpages)
+
+**NOTE:** SurfSense currently uses [Firecrawl.py](https://www.firecrawl.dev/) for web crawling. If you plan on using the crawler, you will need to create a Firecrawl account and get an API key.
+
+
+---
+
+## Next Steps
+
+Once you have all prerequisites in place, proceed to the [installation guide](/docs/installation) to set up SurfSense.
\ No newline at end of file
diff --git a/surfsense_web/hooks/use-documents.ts b/surfsense_web/hooks/use-documents.ts
index a972e1865..3d6ee9be4 100644
--- a/surfsense_web/hooks/use-documents.ts
+++ b/surfsense_web/hooks/use-documents.ts
@@ -61,7 +61,7 @@ export function toDisplayDoc(item: ApiDocumentInput): DocumentDisplay {
}
const EMPTY_TYPE_FILTER: DocumentTypeEnum[] = [];
-const INITIAL_PAGE_SIZE = 20;
+const INITIAL_PAGE_SIZE = 50;
const SCROLL_PAGE_SIZE = 5;
function isValidDocument(doc: DocumentElectric): boolean {
diff --git a/surfsense_web/hooks/use-inbox.ts b/surfsense_web/hooks/use-inbox.ts
index b29537698..f301dc90e 100644
--- a/surfsense_web/hooks/use-inbox.ts
+++ b/surfsense_web/hooks/use-inbox.ts
@@ -59,7 +59,7 @@ export function useInbox(
searchSpaceId: number | null,
category: NotificationCategory,
prefetchedUnread?: { total_unread: number; recent_unread: number } | null,
- prefetchedUnreadReady = true,
+ prefetchedUnreadReady = true
) {
const electricClient = useElectricClient();
diff --git a/surfsense_web/lib/apis/notifications-api.service.ts b/surfsense_web/lib/apis/notifications-api.service.ts
index 92775f672..bec28df29 100644
--- a/surfsense_web/lib/apis/notifications-api.service.ts
+++ b/surfsense_web/lib/apis/notifications-api.service.ts
@@ -156,9 +156,7 @@ class NotificationsApiService {
* Get unread counts for all categories in a single request.
* Replaces 2 separate getUnreadCount calls (comments + status).
*/
- getBatchUnreadCounts = async (
- searchSpaceId?: number
- ): Promise => {
+ getBatchUnreadCounts = async (searchSpaceId?: number): Promise => {
const params = new URLSearchParams();
if (searchSpaceId !== undefined) {
params.append("search_space_id", String(searchSpaceId));