mirror of
https://github.com/MODSetter/SurfSense.git
synced 2026-04-25 00:36:31 +02:00
feat: enhance installation and migration scripts to automate legacy data migration process
- Updated install.sh to handle fresh installations and migrations from the legacy all-in-one container. - Added checks for Docker and Docker Compose prerequisites. - Implemented a wait-for-postgres function to ensure database readiness. - Enhanced migration script to extract data and recover SECRET_KEY automatically. - Updated documentation to reflect the new automated migration process.
This commit is contained in:
parent
5030dec96b
commit
025643ffa2
3 changed files with 357 additions and 329 deletions
|
|
@ -2,50 +2,85 @@
|
|||
# =============================================================================
|
||||
# SurfSense — One-line Install Script
|
||||
# Usage: curl -fsSL https://raw.githubusercontent.com/MODSetter/SurfSense/main/docker/scripts/install.sh | bash
|
||||
#
|
||||
# Handles two cases automatically:
|
||||
# 1. Fresh install — no prior SurfSense data detected
|
||||
# 2. Migration from the legacy all-in-one container (surfsense-data volume)
|
||||
# Downloads and runs migrate-database.sh --yes, then restores the dump
|
||||
# into the new PostgreSQL 17 stack. The user runs one command for both.
|
||||
#
|
||||
# If you used custom database credentials in the old all-in-one container, run
|
||||
# migrate-database.sh manually first (with --db-user / --db-password flags),
|
||||
# then re-run this script:
|
||||
# curl -fsSL .../docker/scripts/migrate-database.sh | bash -s -- --db-user X --db-password Y
|
||||
# =============================================================================
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
REPO_RAW="https://raw.githubusercontent.com/MODSetter/SurfSense/main"
|
||||
INSTALL_DIR="./surfsense"
|
||||
OLD_VOLUME="surfsense-data"
|
||||
DUMP_FILE="./surfsense_migration_backup.sql"
|
||||
KEY_FILE="./surfsense_migration_secret.key"
|
||||
MIGRATION_MODE=false
|
||||
|
||||
CYAN='\033[1;36m'
|
||||
YELLOW='\033[1;33m'
|
||||
GREEN='\033[0;32m'
|
||||
RED='\033[0;31m'
|
||||
BOLD='\033[1m'
|
||||
NC='\033[0m'
|
||||
|
||||
info() { printf "${CYAN}[SurfSense]${NC} %s\n" "$1"; }
|
||||
warn() { printf "${YELLOW}[SurfSense]${NC} %s\n" "$1"; }
|
||||
error() { printf "${RED}[SurfSense]${NC} %s\n" "$1" >&2; exit 1; }
|
||||
info() { printf "${CYAN}[SurfSense]${NC} %s\n" "$1"; }
|
||||
success() { printf "${GREEN}[SurfSense]${NC} %s\n" "$1"; }
|
||||
warn() { printf "${YELLOW}[SurfSense]${NC} %s\n" "$1"; }
|
||||
error() { printf "${RED}[SurfSense]${NC} ERROR: %s\n" "$1" >&2; exit 1; }
|
||||
step() { printf "\n${BOLD}${CYAN}── %s${NC}\n" "$1"; }
|
||||
|
||||
# ── Pre-flight checks ───────────────────────────────────────────────────────
|
||||
# ── Pre-flight checks ────────────────────────────────────────────────────────
|
||||
|
||||
command -v docker >/dev/null 2>&1 || error "Docker is not installed. Please install Docker first: https://docs.docker.com/get-docker/"
|
||||
step "Checking prerequisites"
|
||||
|
||||
# Detect legacy all-in-one volume — must migrate before installing
|
||||
if docker volume ls --format '{{.Name}}' 2>/dev/null | grep -q '^surfsense-data$'; then
|
||||
printf "${RED}[SurfSense]${NC} Legacy volume 'surfsense-data' detected.\n" >&2
|
||||
printf "${YELLOW}[SurfSense]${NC} You appear to be upgrading from the old all-in-one SurfSense container.\n" >&2
|
||||
printf "${YELLOW}[SurfSense]${NC} The database has been upgraded from PostgreSQL 14 to 17 and your data\n" >&2
|
||||
printf "${YELLOW}[SurfSense]${NC} must be migrated before running the new stack.\n" >&2
|
||||
printf "\n" >&2
|
||||
printf "${YELLOW}[SurfSense]${NC} Run the migration script first:\n" >&2
|
||||
printf "${CYAN}[SurfSense]${NC} curl -fsSL https://raw.githubusercontent.com/MODSetter/SurfSense/main/docker/scripts/migrate-database.sh | bash\n" >&2
|
||||
printf "\n" >&2
|
||||
printf "${YELLOW}[SurfSense]${NC} See the full guide at: https://surfsense.net/docs/how-to/migrate-from-allinone\n" >&2
|
||||
exit 1
|
||||
fi
|
||||
command -v docker >/dev/null 2>&1 \
|
||||
|| error "Docker is not installed. Install it at: https://docs.docker.com/get-docker/"
|
||||
success "Docker found."
|
||||
|
||||
docker info >/dev/null 2>&1 \
|
||||
|| error "Docker daemon is not running. Please start Docker and try again."
|
||||
success "Docker daemon is running."
|
||||
|
||||
if docker compose version >/dev/null 2>&1; then
|
||||
DC="docker compose"
|
||||
elif command -v docker-compose >/dev/null 2>&1; then
|
||||
DC="docker-compose"
|
||||
else
|
||||
error "Docker Compose is not installed. Please install Docker Compose: https://docs.docker.com/compose/install/"
|
||||
error "Docker Compose is not installed. Install it at: https://docs.docker.com/compose/install/"
|
||||
fi
|
||||
success "Docker Compose found ($DC)."
|
||||
|
||||
# ── Wait-for-postgres helper ─────────────────────────────────────────────────
|
||||
wait_for_pg() {
|
||||
local db_user="$1"
|
||||
local max_attempts=45
|
||||
local attempt=0
|
||||
|
||||
info "Waiting for PostgreSQL to accept connections..."
|
||||
until (cd "${INSTALL_DIR}" && ${DC} exec -T db pg_isready -U "${db_user}" -q 2>/dev/null); do
|
||||
attempt=$((attempt + 1))
|
||||
if [[ $attempt -ge $max_attempts ]]; then
|
||||
error "PostgreSQL did not become ready after $((max_attempts * 2)) seconds.\nCheck logs: cd ${INSTALL_DIR} && ${DC} logs db"
|
||||
fi
|
||||
printf "."
|
||||
sleep 2
|
||||
done
|
||||
printf "\n"
|
||||
success "PostgreSQL is ready."
|
||||
}
|
||||
|
||||
# ── Download files ───────────────────────────────────────────────────────────
|
||||
|
||||
info "Creating installation directory: ${INSTALL_DIR}"
|
||||
step "Downloading SurfSense files"
|
||||
info "Installation directory: ${INSTALL_DIR}"
|
||||
mkdir -p "${INSTALL_DIR}/scripts"
|
||||
|
||||
FILES=(
|
||||
|
|
@ -53,39 +88,148 @@ FILES=(
|
|||
"docker/.env.example:.env.example"
|
||||
"docker/postgresql.conf:postgresql.conf"
|
||||
"docker/scripts/init-electric-user.sh:scripts/init-electric-user.sh"
|
||||
"docker/scripts/migrate-database.sh:scripts/migrate-database.sh"
|
||||
)
|
||||
|
||||
for entry in "${FILES[@]}"; do
|
||||
src="${entry%%:*}"
|
||||
dest="${entry##*:}"
|
||||
info "Downloading ${dest}..."
|
||||
curl -fsSL "${REPO_RAW}/${src}" -o "${INSTALL_DIR}/${dest}" || error "Failed to download ${src}"
|
||||
curl -fsSL "${REPO_RAW}/${src}" -o "${INSTALL_DIR}/${dest}" \
|
||||
|| error "Failed to download ${dest}. Check your internet connection and try again."
|
||||
done
|
||||
|
||||
chmod +x "${INSTALL_DIR}/scripts/init-electric-user.sh"
|
||||
chmod +x "${INSTALL_DIR}/scripts/migrate-database.sh"
|
||||
success "All files downloaded to ${INSTALL_DIR}/"
|
||||
|
||||
# ── Legacy all-in-one detection ──────────────────────────────────────────────
|
||||
# Detect surfsense-data volume → migration mode.
|
||||
# If a dump already exists (from a previous partial run) skip extraction and
|
||||
# go straight to restore — this makes re-runs safe and idempotent.
|
||||
|
||||
if docker volume ls --format '{{.Name}}' 2>/dev/null | grep -q "^${OLD_VOLUME}$"; then
|
||||
MIGRATION_MODE=true
|
||||
|
||||
if [[ -f "${DUMP_FILE}" ]]; then
|
||||
step "Migration mode — using existing dump (skipping extraction)"
|
||||
info "Found existing dump: ${DUMP_FILE}"
|
||||
info "Skipping data extraction — proceeding directly to restore."
|
||||
info "To force a fresh extraction, remove the dump first: rm ${DUMP_FILE}"
|
||||
else
|
||||
step "Migration mode — legacy all-in-one container detected"
|
||||
warn "Volume '${OLD_VOLUME}' found. Your data will be migrated automatically."
|
||||
warn "PostgreSQL is being upgraded from version 14 to 17."
|
||||
warn "Your original data will NOT be deleted."
|
||||
printf "\n"
|
||||
info "Running data extraction (migrate-database.sh --yes)..."
|
||||
info "Full extraction log: ./surfsense-migration.log"
|
||||
printf "\n"
|
||||
|
||||
# Run extraction non-interactively. On failure the error from
|
||||
# migrate-database.sh is printed and install.sh exits here.
|
||||
bash "${INSTALL_DIR}/scripts/migrate-database.sh" --yes \
|
||||
|| error "Data extraction failed. See ./surfsense-migration.log for details.\nYou can also run migrate-database.sh manually with custom flags:\n bash ${INSTALL_DIR}/scripts/migrate-database.sh --db-user X --db-password Y"
|
||||
|
||||
printf "\n"
|
||||
success "Data extraction complete. Proceeding with installation and restore."
|
||||
fi
|
||||
fi
|
||||
|
||||
# ── Set up .env ──────────────────────────────────────────────────────────────
|
||||
|
||||
step "Configuring environment"
|
||||
|
||||
if [ ! -f "${INSTALL_DIR}/.env" ]; then
|
||||
cp "${INSTALL_DIR}/.env.example" "${INSTALL_DIR}/.env"
|
||||
|
||||
SECRET_KEY=$(openssl rand -base64 32 2>/dev/null || head -c 32 /dev/urandom | base64)
|
||||
if $MIGRATION_MODE && [[ -f "${KEY_FILE}" ]]; then
|
||||
SECRET_KEY=$(cat "${KEY_FILE}" | tr -d '[:space:]')
|
||||
success "Using SECRET_KEY recovered from legacy container."
|
||||
else
|
||||
SECRET_KEY=$(openssl rand -base64 32 2>/dev/null \
|
||||
|| head -c 32 /dev/urandom | base64 | tr -d '\n')
|
||||
success "Generated new random SECRET_KEY."
|
||||
fi
|
||||
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
sed -i '' "s|SECRET_KEY=replace_me_with_a_random_string|SECRET_KEY=${SECRET_KEY}|" "${INSTALL_DIR}/.env"
|
||||
else
|
||||
sed -i "s|SECRET_KEY=replace_me_with_a_random_string|SECRET_KEY=${SECRET_KEY}|" "${INSTALL_DIR}/.env"
|
||||
fi
|
||||
|
||||
info "Generated random SECRET_KEY in .env"
|
||||
info "Created ${INSTALL_DIR}/.env"
|
||||
else
|
||||
warn ".env already exists — skipping (your existing config is preserved)"
|
||||
warn ".env already exists — keeping your existing configuration."
|
||||
fi
|
||||
|
||||
# ── Start containers ─────────────────────────────────────────────────────────
|
||||
|
||||
info "Starting SurfSense..."
|
||||
cd "${INSTALL_DIR}"
|
||||
${DC} up -d
|
||||
if $MIGRATION_MODE; then
|
||||
# Read DB credentials from .env (fall back to defaults from docker-compose.yml)
|
||||
DB_USER=$(grep '^DB_USER=' "${INSTALL_DIR}/.env" 2>/dev/null | cut -d= -f2 | tr -d '"' | head -1)
|
||||
DB_PASS=$(grep '^DB_PASSWORD=' "${INSTALL_DIR}/.env" 2>/dev/null | cut -d= -f2 | tr -d '"' | head -1)
|
||||
DB_NAME=$(grep '^DB_NAME=' "${INSTALL_DIR}/.env" 2>/dev/null | cut -d= -f2 | tr -d '"' | head -1)
|
||||
DB_USER="${DB_USER:-surfsense}"
|
||||
DB_PASS="${DB_PASS:-surfsense}"
|
||||
DB_NAME="${DB_NAME:-surfsense}"
|
||||
|
||||
step "Starting PostgreSQL 17"
|
||||
(cd "${INSTALL_DIR}" && ${DC} up -d db)
|
||||
wait_for_pg "${DB_USER}"
|
||||
|
||||
step "Restoring database"
|
||||
info "Restoring dump into PostgreSQL 17 — this may take a while for large databases..."
|
||||
|
||||
RESTORE_ERR="/tmp/surfsense_restore_err.log"
|
||||
(cd "${INSTALL_DIR}" && ${DC} exec -T \
|
||||
-e PGPASSWORD="${DB_PASS}" \
|
||||
db psql -U "${DB_USER}" -d "${DB_NAME}" \
|
||||
2>"${RESTORE_ERR}") < "${DUMP_FILE}" || true
|
||||
|
||||
# Surface real errors; ignore benign "already exists" noise from pg_dump headers
|
||||
FATAL_ERRORS=$(grep -i "^ERROR:" "${RESTORE_ERR}" \
|
||||
| grep -iv "already exists" \
|
||||
| grep -iv "multiple primary keys" \
|
||||
|| true)
|
||||
|
||||
if [[ -n "${FATAL_ERRORS}" ]]; then
|
||||
warn "Restore completed with errors (may be harmless pg_dump header noise):"
|
||||
printf "%s\n" "${FATAL_ERRORS}"
|
||||
warn "If SurfSense behaves incorrectly, inspect manually:"
|
||||
warn " cd ${INSTALL_DIR} && ${DC} exec db psql -U ${DB_USER} -d ${DB_NAME} < ${DUMP_FILE}"
|
||||
else
|
||||
success "Database restored with no fatal errors."
|
||||
fi
|
||||
|
||||
# Smoke test — verify tables are present
|
||||
TABLE_COUNT=$(
|
||||
cd "${INSTALL_DIR}" && ${DC} exec -T \
|
||||
-e PGPASSWORD="${DB_PASS}" \
|
||||
db psql -U "${DB_USER}" -d "${DB_NAME}" -t \
|
||||
-c "SELECT count(*) FROM information_schema.tables WHERE table_schema = 'public';" \
|
||||
2>/dev/null | tr -d ' \n' || echo "0"
|
||||
)
|
||||
if [[ "${TABLE_COUNT}" == "0" || -z "${TABLE_COUNT}" ]]; then
|
||||
warn "Smoke test: no tables found after restore."
|
||||
warn "The restore may have failed silently. Check: cd ${INSTALL_DIR} && ${DC} logs db"
|
||||
else
|
||||
success "Smoke test passed: ${TABLE_COUNT} table(s) restored successfully."
|
||||
fi
|
||||
|
||||
step "Starting all SurfSense services"
|
||||
(cd "${INSTALL_DIR}" && ${DC} up -d)
|
||||
success "All services started."
|
||||
|
||||
# Key file is no longer needed — SECRET_KEY is now in .env
|
||||
rm -f "${KEY_FILE}"
|
||||
|
||||
else
|
||||
step "Starting SurfSense"
|
||||
(cd "${INSTALL_DIR}" && ${DC} up -d)
|
||||
success "All services started."
|
||||
fi
|
||||
|
||||
# ── Done ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
echo ""
|
||||
printf '\033[1;37m'
|
||||
|
|
@ -105,6 +249,7 @@ Y88b d88P Y88b 888 888 888 Y88b d88P Y8b. 888 888 X88 Y8b.
|
|||
EOF
|
||||
printf " Your personal AI-powered search engine ${YELLOW}v${SURFSENSE_VERSION:-latest}${NC}\n"
|
||||
printf "${CYAN}══════════════════════════════════════════════════════════════${NC}\n\n"
|
||||
|
||||
info " Frontend: http://localhost:3000"
|
||||
info " Backend: http://localhost:8000"
|
||||
info " API Docs: http://localhost:8000/docs"
|
||||
|
|
@ -114,5 +259,13 @@ info " Logs: cd ${INSTALL_DIR} && ${DC} logs -f"
|
|||
info " Stop: cd ${INSTALL_DIR} && ${DC} down"
|
||||
info " Update: cd ${INSTALL_DIR} && ${DC} pull && ${DC} up -d"
|
||||
info ""
|
||||
warn " First startup may take sometime."
|
||||
warn " Edit .env to configure OAuth connectors, API keys, etc."
|
||||
|
||||
if $MIGRATION_MODE; then
|
||||
warn " Migration complete! Open frontend and verify your data."
|
||||
warn " Once verified, clean up the legacy volume and dump file:"
|
||||
warn " docker volume rm ${OLD_VOLUME}"
|
||||
warn " rm ${DUMP_FILE}"
|
||||
else
|
||||
warn " First startup may take a few minutes while images are pulled."
|
||||
warn " Edit ${INSTALL_DIR}/.env to configure API keys, OAuth, etc."
|
||||
fi
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue