2026-03-23 18:31:11 +01:00
|
|
|
services:
|
2026-04-11 00:10:38 -04:00
|
|
|
noxa:
|
2026-03-23 18:31:11 +01:00
|
|
|
build: .
|
|
|
|
|
ports:
|
2026-04-11 00:10:38 -04:00
|
|
|
- "${NOXA_PORT:-3000}:3000"
|
2026-03-23 18:31:11 +01:00
|
|
|
env_file:
|
|
|
|
|
- .env
|
|
|
|
|
environment:
|
|
|
|
|
- OLLAMA_HOST=http://ollama:11434
|
|
|
|
|
depends_on:
|
|
|
|
|
- ollama
|
|
|
|
|
restart: unless-stopped
|
|
|
|
|
healthcheck:
|
2026-04-11 00:10:38 -04:00
|
|
|
test: ["CMD", "noxa", "--help"]
|
2026-03-23 18:31:11 +01:00
|
|
|
interval: 30s
|
|
|
|
|
timeout: 5s
|
|
|
|
|
retries: 3
|
|
|
|
|
|
|
|
|
|
ollama:
|
|
|
|
|
image: ollama/ollama:latest
|
|
|
|
|
volumes:
|
|
|
|
|
- ollama_data:/root/.ollama
|
|
|
|
|
restart: unless-stopped
|
|
|
|
|
# CPU-only by default. For GPU, uncomment:
|
|
|
|
|
# deploy:
|
|
|
|
|
# resources:
|
|
|
|
|
# reservations:
|
|
|
|
|
# devices:
|
|
|
|
|
# - capabilities: [gpu]
|
|
|
|
|
#
|
|
|
|
|
# Pre-pull a model after starting:
|
|
|
|
|
# docker compose exec ollama ollama pull qwen3:1.7b
|
|
|
|
|
|
|
|
|
|
volumes:
|
|
|
|
|
ollama_data:
|