2026-02-24 22:45:37 +05:30
#!/usr/bin/env bash
# =============================================================================
# SurfSense — One-line Install Script
2026-02-27 04:41:08 +05:30
#
#
2026-02-24 22:45:37 +05:30
# Usage: curl -fsSL https://raw.githubusercontent.com/MODSetter/SurfSense/main/docker/scripts/install.sh | bash
2026-02-26 17:24:02 +05:30
#
2026-02-27 04:41:08 +05:30
# Flags:
# --no-watchtower Skip automatic Watchtower setup
# --watchtower-interval=SECS Check interval in seconds (default: 86400 = 24h)
#
2026-02-26 17:24:02 +05:30
# Handles two cases automatically:
# 1. Fresh install — no prior SurfSense data detected
# 2. Migration from the legacy all-in-one container (surfsense-data volume)
# Downloads and runs migrate-database.sh --yes, then restores the dump
# into the new PostgreSQL 17 stack. The user runs one command for both.
#
# If you used custom database credentials in the old all-in-one container, run
# migrate-database.sh manually first (with --db-user / --db-password flags),
# then re-run this script:
# curl -fsSL .../docker/scripts/migrate-database.sh | bash -s -- --db-user X --db-password Y
2026-02-24 22:45:37 +05:30
# =============================================================================
set -euo pipefail
2026-02-27 21:40:49 +05:30
main( ) {
2026-03-03 13:53:28 -08:00
REPO_RAW = "https://raw.githubusercontent.com/MODSetter/SurfSense/main"
2026-02-24 22:45:37 +05:30
INSTALL_DIR = "./surfsense"
2026-02-26 17:24:02 +05:30
OLD_VOLUME = "surfsense-data"
DUMP_FILE = "./surfsense_migration_backup.sql"
KEY_FILE = "./surfsense_migration_secret.key"
2026-02-27 22:59:51 +05:30
MIGRATION_DONE_FILE = " ${ INSTALL_DIR } /.migration_done "
2026-02-26 17:24:02 +05:30
MIGRATION_MODE = false
2026-02-27 04:41:08 +05:30
SETUP_WATCHTOWER = true
WATCHTOWER_INTERVAL = 86400
WATCHTOWER_CONTAINER = "watchtower"
# ── Parse flags ─────────────────────────────────────────────────────────────
for arg in " $@ " ; do
case " $arg " in
--no-watchtower) SETUP_WATCHTOWER = false ; ;
--watchtower-interval= *) WATCHTOWER_INTERVAL = " ${ arg #*= } " ; ;
esac
done
2026-02-26 17:24:02 +05:30
2026-02-26 04:45:59 +05:30
CYAN = '\033[1;36m'
2026-02-24 22:45:37 +05:30
YELLOW = '\033[1;33m'
2026-02-26 17:24:02 +05:30
GREEN = '\033[0;32m'
2026-02-24 22:45:37 +05:30
RED = '\033[0;31m'
2026-02-26 17:24:02 +05:30
BOLD = '\033[1m'
2026-02-24 22:45:37 +05:30
NC = '\033[0m'
2026-02-26 17:24:02 +05:30
info( ) { printf " ${ CYAN } [SurfSense] ${ NC } %s\n " " $1 " ; }
success( ) { printf " ${ GREEN } [SurfSense] ${ NC } %s\n " " $1 " ; }
warn( ) { printf " ${ YELLOW } [SurfSense] ${ NC } %s\n " " $1 " ; }
error( ) { printf " ${ RED } [SurfSense] ${ NC } ERROR: %s\n " " $1 " >& 2; exit 1; }
step( ) { printf " \n ${ BOLD } ${ CYAN } ── %s ${ NC } \n " " $1 " ; }
# ── Pre-flight checks ────────────────────────────────────────────────────────
step "Checking prerequisites"
command -v docker >/dev/null 2>& 1 \
|| error "Docker is not installed. Install it at: https://docs.docker.com/get-docker/"
success "Docker found."
2026-02-27 21:22:15 +05:30
docker info >/dev/null 2>& 1 < /dev/null \
2026-02-26 17:24:02 +05:30
|| error "Docker daemon is not running. Please start Docker and try again."
success "Docker daemon is running."
2026-02-26 16:15:47 +05:30
2026-02-27 21:22:15 +05:30
if docker compose version >/dev/null 2>& 1 < /dev/null; then
2026-02-24 22:45:37 +05:30
DC = "docker compose"
elif command -v docker-compose >/dev/null 2>& 1; then
DC = "docker-compose"
else
2026-02-26 17:24:02 +05:30
error "Docker Compose is not installed. Install it at: https://docs.docker.com/compose/install/"
2026-02-24 22:45:37 +05:30
fi
2026-02-26 17:24:02 +05:30
success " Docker Compose found ( $DC ). "
# ── Wait-for-postgres helper ─────────────────────────────────────────────────
wait_for_pg( ) {
local db_user = " $1 "
local max_attempts = 45
local attempt = 0
info "Waiting for PostgreSQL to accept connections..."
2026-02-27 21:22:15 +05:30
until ( cd " ${ INSTALL_DIR } " && ${ DC } exec -T db pg_isready -U " ${ db_user } " -q 2>/dev/null) < /dev/null; do
2026-02-26 17:24:02 +05:30
attempt = $(( attempt + 1 ))
if [ [ $attempt -ge $max_attempts ] ] ; then
error " PostgreSQL did not become ready after $(( max_attempts * 2 )) seconds.\nCheck logs: cd ${ INSTALL_DIR } && ${ DC } logs db "
fi
printf "."
sleep 2
done
printf "\n"
success "PostgreSQL is ready."
}
2026-02-24 22:45:37 +05:30
# ── Download files ───────────────────────────────────────────────────────────
2026-02-26 17:24:02 +05:30
step "Downloading SurfSense files"
info " Installation directory: ${ INSTALL_DIR } "
2026-02-25 00:26:00 +05:30
mkdir -p " ${ INSTALL_DIR } /scripts "
2026-02-24 22:45:37 +05:30
FILES = (
"docker/docker-compose.yml:docker-compose.yml"
"docker/.env.example:.env.example"
"docker/postgresql.conf:postgresql.conf"
2026-02-25 00:26:00 +05:30
"docker/scripts/init-electric-user.sh:scripts/init-electric-user.sh"
2026-02-26 17:24:02 +05:30
"docker/scripts/migrate-database.sh:scripts/migrate-database.sh"
2026-02-24 22:45:37 +05:30
)
for entry in " ${ FILES [@] } " ; do
src = " ${ entry %% : * } "
dest = " ${ entry ##* : } "
info " Downloading ${ dest } ... "
2026-02-26 17:24:02 +05:30
curl -fsSL " ${ REPO_RAW } / ${ src } " -o " ${ INSTALL_DIR } / ${ dest } " \
|| error " Failed to download ${ dest } . Check your internet connection and try again. "
2026-02-24 22:45:37 +05:30
done
2026-02-25 00:26:00 +05:30
chmod +x " ${ INSTALL_DIR } /scripts/init-electric-user.sh "
2026-02-26 17:24:02 +05:30
chmod +x " ${ INSTALL_DIR } /scripts/migrate-database.sh "
success " All files downloaded to ${ INSTALL_DIR } / "
# ── Legacy all-in-one detection ──────────────────────────────────────────────
# Detect surfsense-data volume → migration mode.
# If a dump already exists (from a previous partial run) skip extraction and
# go straight to restore — this makes re-runs safe and idempotent.
2026-02-27 22:59:51 +05:30
if docker volume ls --format '{{.Name}}' 2>/dev/null < /dev/null | grep -q " ^ ${ OLD_VOLUME } $" \
&& [ [ ! -f " ${ MIGRATION_DONE_FILE } " ] ] ; then
2026-02-26 17:24:02 +05:30
MIGRATION_MODE = true
if [ [ -f " ${ DUMP_FILE } " ] ] ; then
step "Migration mode — using existing dump (skipping extraction)"
info " Found existing dump: ${ DUMP_FILE } "
info "Skipping data extraction — proceeding directly to restore."
info " To force a fresh extraction, remove the dump first: rm ${ DUMP_FILE } "
else
step "Migration mode — legacy all-in-one container detected"
warn " Volume ' ${ OLD_VOLUME } ' found. Your data will be migrated automatically. "
warn "PostgreSQL is being upgraded from version 14 to 17."
warn "Your original data will NOT be deleted."
printf "\n"
info "Running data extraction (migrate-database.sh --yes)..."
info "Full extraction log: ./surfsense-migration.log"
printf "\n"
# Run extraction non-interactively. On failure the error from
# migrate-database.sh is printed and install.sh exits here.
2026-02-27 21:40:49 +05:30
bash " ${ INSTALL_DIR } /scripts/migrate-database.sh " --yes < /dev/null \
2026-02-26 17:24:02 +05:30
|| error " Data extraction failed. See ./surfsense-migration.log for details.\nYou can also run migrate-database.sh manually with custom flags:\n bash ${ INSTALL_DIR } /scripts/migrate-database.sh --db-user X --db-password Y "
printf "\n"
success "Data extraction complete. Proceeding with installation and restore."
fi
fi
2026-02-24 22:45:37 +05:30
# ── Set up .env ──────────────────────────────────────────────────────────────
2026-02-26 17:24:02 +05:30
step "Configuring environment"
2026-02-24 22:45:37 +05:30
if [ ! -f " ${ INSTALL_DIR } /.env " ] ; then
cp " ${ INSTALL_DIR } /.env.example " " ${ INSTALL_DIR } /.env "
2026-02-26 17:24:02 +05:30
if $MIGRATION_MODE && [ [ -f " ${ KEY_FILE } " ] ] ; then
SECRET_KEY = $( cat " ${ KEY_FILE } " | tr -d '[:space:]' )
success "Using SECRET_KEY recovered from legacy container."
else
SECRET_KEY = $( openssl rand -base64 32 2>/dev/null \
|| head -c 32 /dev/urandom | base64 | tr -d '\n' )
success "Generated new random SECRET_KEY."
fi
2026-02-24 22:45:37 +05:30
if [ [ " $OSTYPE " = = "darwin" * ] ] ; then
sed -i '' " s|SECRET_KEY=replace_me_with_a_random_string|SECRET_KEY= ${ SECRET_KEY } | " " ${ INSTALL_DIR } /.env "
else
sed -i " s|SECRET_KEY=replace_me_with_a_random_string|SECRET_KEY= ${ SECRET_KEY } | " " ${ INSTALL_DIR } /.env "
fi
2026-02-26 17:24:02 +05:30
info " Created ${ INSTALL_DIR } /.env "
2026-02-24 22:45:37 +05:30
else
2026-02-26 17:24:02 +05:30
warn ".env already exists — keeping your existing configuration."
2026-02-24 22:45:37 +05:30
fi
# ── Start containers ─────────────────────────────────────────────────────────
2026-02-26 17:24:02 +05:30
if $MIGRATION_MODE ; then
# Read DB credentials from .env (fall back to defaults from docker-compose.yml)
2026-02-26 17:49:10 +05:30
DB_USER = $( grep '^DB_USER=' " ${ INSTALL_DIR } /.env " 2>/dev/null | cut -d= -f2 | tr -d '"' | head -1 || true )
DB_PASS = $( grep '^DB_PASSWORD=' " ${ INSTALL_DIR } /.env " 2>/dev/null | cut -d= -f2 | tr -d '"' | head -1 || true )
DB_NAME = $( grep '^DB_NAME=' " ${ INSTALL_DIR } /.env " 2>/dev/null | cut -d= -f2 | tr -d '"' | head -1 || true )
2026-02-26 17:24:02 +05:30
DB_USER = " ${ DB_USER :- surfsense } "
DB_PASS = " ${ DB_PASS :- surfsense } "
DB_NAME = " ${ DB_NAME :- surfsense } "
step "Starting PostgreSQL 17"
2026-02-27 21:22:15 +05:30
( cd " ${ INSTALL_DIR } " && ${ DC } up -d db) < /dev/null
2026-02-26 17:24:02 +05:30
wait_for_pg " ${ DB_USER } "
step "Restoring database"
2026-02-27 04:06:01 +05:30
[ [ -f " ${ DUMP_FILE } " ] ] \
|| error " Dump file ' ${ DUMP_FILE } ' not found. The migration script may have failed.\n Check: ./surfsense-migration.log\n Or run manually: bash ${ INSTALL_DIR } /scripts/migrate-database.sh --yes "
2026-02-26 17:24:02 +05:30
info "Restoring dump into PostgreSQL 17 — this may take a while for large databases..."
RESTORE_ERR = "/tmp/surfsense_restore_err.log"
( cd " ${ INSTALL_DIR } " && ${ DC } exec -T \
-e PGPASSWORD = " ${ DB_PASS } " \
db psql -U " ${ DB_USER } " -d " ${ DB_NAME } " \
2026-02-27 04:59:12 +05:30
>/dev/null 2>" ${ RESTORE_ERR } " ) < " ${ DUMP_FILE } " || true
2026-02-26 17:24:02 +05:30
# Surface real errors; ignore benign "already exists" noise from pg_dump headers
FATAL_ERRORS = $( grep -i "^ERROR:" " ${ RESTORE_ERR } " \
| grep -iv "already exists" \
| grep -iv "multiple primary keys" \
|| true )
if [ [ -n " ${ FATAL_ERRORS } " ] ] ; then
warn "Restore completed with errors (may be harmless pg_dump header noise):"
printf "%s\n" " ${ FATAL_ERRORS } "
warn "If SurfSense behaves incorrectly, inspect manually:"
warn " cd ${ INSTALL_DIR } && ${ DC } exec db psql -U ${ DB_USER } -d ${ DB_NAME } < ${ DUMP_FILE } "
else
success "Database restored with no fatal errors."
fi
# Smoke test — verify tables are present
TABLE_COUNT = $(
cd " ${ INSTALL_DIR } " && ${ DC } exec -T \
-e PGPASSWORD = " ${ DB_PASS } " \
db psql -U " ${ DB_USER } " -d " ${ DB_NAME } " -t \
-c "SELECT count(*) FROM information_schema.tables WHERE table_schema = 'public';" \
2026-02-27 21:22:15 +05:30
2>/dev/null < /dev/null | tr -d ' \n' || echo "0"
2026-02-26 17:24:02 +05:30
)
if [ [ " ${ TABLE_COUNT } " = = "0" || -z " ${ TABLE_COUNT } " ] ] ; then
warn "Smoke test: no tables found after restore."
warn " The restore may have failed silently. Check: cd ${ INSTALL_DIR } && ${ DC } logs db "
else
success " Smoke test passed: ${ TABLE_COUNT } table(s) restored successfully. "
2026-02-27 22:59:51 +05:30
touch " ${ MIGRATION_DONE_FILE } "
2026-02-26 17:24:02 +05:30
fi
step "Starting all SurfSense services"
2026-02-27 21:22:15 +05:30
( cd " ${ INSTALL_DIR } " && ${ DC } up -d) < /dev/null
2026-02-26 17:24:02 +05:30
success "All services started."
# Key file is no longer needed — SECRET_KEY is now in .env
rm -f " ${ KEY_FILE } "
else
step "Starting SurfSense"
2026-02-27 21:22:15 +05:30
( cd " ${ INSTALL_DIR } " && ${ DC } up -d) < /dev/null
2026-02-26 17:24:02 +05:30
success "All services started."
fi
2026-02-27 04:41:08 +05:30
# ── Watchtower (auto-update) ─────────────────────────────────────────────────
if $SETUP_WATCHTOWER ; then
step " Setting up Watchtower (auto-updates every $(( WATCHTOWER_INTERVAL / 3600 )) h) "
2026-02-27 21:22:15 +05:30
WT_STATE = $( docker inspect -f '{{.State.Running}}' " ${ WATCHTOWER_CONTAINER } " 2>/dev/null < /dev/null || echo "missing" )
2026-02-27 04:41:08 +05:30
if [ [ " ${ WT_STATE } " = = "true" ] ] ; then
success "Watchtower is already running — skipping."
else
if [ [ " ${ WT_STATE } " != "missing" ] ] ; then
info "Removing stopped Watchtower container..."
2026-02-27 21:22:15 +05:30
docker rm -f " ${ WATCHTOWER_CONTAINER } " >/dev/null 2>& 1 < /dev/null || true
2026-02-27 04:41:08 +05:30
fi
docker run -d \
--name " ${ WATCHTOWER_CONTAINER } " \
--restart unless-stopped \
-v /var/run/docker.sock:/var/run/docker.sock \
nickfedor/watchtower \
--label-enable \
2026-02-27 21:22:15 +05:30
--interval " ${ WATCHTOWER_INTERVAL } " >/dev/null 2>& 1 < /dev/null \
2026-02-27 04:41:08 +05:30
&& success "Watchtower started — labeled SurfSense containers will auto-update." \
|| warn "Could not start Watchtower. You can set it up manually or use: docker compose pull && docker compose up -d"
fi
else
info "Skipping Watchtower setup (--no-watchtower flag)."
fi
2026-02-26 17:24:02 +05:30
# ── Done ─────────────────────────────────────────────────────────────────────
2026-02-24 22:45:37 +05:30
echo ""
2026-02-26 04:45:59 +05:30
printf '\033[1;37m'
cat << 'EOF'
2026-02-26 16:18:13 +05:30
2026-02-26 04:45:59 +05:30
.d8888b. .d888 .d8888b.
d88P Y88b d88P" d88P Y88b
Y88b. 888 Y88b.
"Y888b. 888 888 888d888 888888 " Y888b. .d88b. 88888b. .d8888b .d88b.
"Y88b. 888 888 888P" 888 "Y88b. d8P Y8b 888 " 88b 88K d8P Y8b
"888 888 888 888 888 " 888 88888888 888 888 " Y8888b. 88888888
Y88b d88P Y88b 888 888 888 Y88b d88P Y8b. 888 888 X88 Y8b.
"Y8888P" "Y88888 888 888 " Y8888P" " Y8888 888 888 88888P' " Y8888
2026-02-26 16:18:13 +05:30
2026-02-26 04:45:59 +05:30
EOF
2026-03-03 19:43:55 +05:30
_version_display = $( grep '^SURFSENSE_VERSION=' " ${ INSTALL_DIR } /.env " 2>/dev/null | cut -d= -f2 | tr -d '"' | head -1 || true )
_version_display = " ${ _version_display :- latest } "
2026-03-03 13:51:13 -08:00
printf " OSS Alternative to NotebookLM for Teams ${ YELLOW } [%s] ${ NC } \n " " ${ _version_display } "
2026-02-26 04:45:59 +05:30
printf " ${ CYAN } ══════════════════════════════════════════════════════════════ ${ NC } \n\n "
2026-02-26 17:24:02 +05:30
2026-03-09 23:08:27 +05:30
info " Frontend: http://localhost:3929"
info " Backend: http://localhost:8929"
info " API Docs: http://localhost:8929/docs"
2026-02-24 22:45:37 +05:30
info ""
info " Config: ${ INSTALL_DIR } /.env "
info " Logs: cd ${ INSTALL_DIR } && ${ DC } logs -f "
info " Stop: cd ${ INSTALL_DIR } && ${ DC } down "
info " Update: cd ${ INSTALL_DIR } && ${ DC } pull && ${ DC } up -d "
info ""
2026-02-26 17:24:02 +05:30
2026-02-27 04:41:08 +05:30
if $SETUP_WATCHTOWER ; then
info " Watchtower: auto-updates every $(( WATCHTOWER_INTERVAL / 3600 )) h (stop: docker rm -f ${ WATCHTOWER_CONTAINER } ) "
else
warn " Watchtower skipped. For auto-updates, re-run without --no-watchtower."
fi
info ""
2026-02-26 17:24:02 +05:30
if $MIGRATION_MODE ; then
warn " Migration complete! Open frontend and verify your data."
2026-02-27 22:59:51 +05:30
warn " Once verified, clean up the legacy volume and migration files:"
2026-02-26 17:24:02 +05:30
warn " docker volume rm ${ OLD_VOLUME } "
warn " rm ${ DUMP_FILE } "
2026-02-27 22:59:51 +05:30
warn " rm ${ MIGRATION_DONE_FILE } "
2026-02-26 17:24:02 +05:30
else
warn " First startup may take a few minutes while images are pulled."
warn " Edit ${ INSTALL_DIR } /.env to configure API keys, OAuth, etc. "
fi
2026-02-27 21:40:49 +05:30
} # end main()
main " $@ "