services: config_generator: build: context: ../../ dockerfile: config_generator/Dockerfile volumes: - ../../arch/envoy.template.yaml:/usr/src/app/envoy.template.yaml - ./arch_config.yaml:/usr/src/app/arch_config.yaml - ./generated:/usr/src/app/out arch: build: context: ../../ dockerfile: arch/Dockerfile ports: - "10000:10000" - "19901:9901" volumes: - ./generated/envoy.yaml:/etc/envoy/envoy.yaml - /etc/ssl/cert.pem:/etc/ssl/cert.pem - ./arch_log:/var/log/ depends_on: config_generator: condition: service_completed_successfully model_server: condition: service_healthy environment: - LOG_LEVEL=debug model_server: build: context: ../../model_server dockerfile: Dockerfile ports: - "18081:80" healthcheck: test: ["CMD", "curl" ,"http://localhost/healthz"] interval: 5s retries: 20 volumes: - ~/.cache/huggingface:/root/.cache/huggingface - ./arch_config.yaml:/root/arch_config.yaml function_resolver: build: context: ../../function_resolver dockerfile: Dockerfile ports: - "18082:80" healthcheck: test: ["CMD", "curl" ,"http://localhost:80/healthz"] interval: 5s retries: 20 volumes: - ~/.cache/huggingface:/root/.cache/huggingface environment: # use ollama endpoint that is hosted by host machine (no virtualization) - OLLAMA_ENDPOINT=${OLLAMA_ENDPOINT:-host.docker.internal} - OLLAMA_MODEL=Arch-Function-Calling-3B-Q4_K_M # uncomment following line to use ollama endpoint that is hosted by docker # - OLLAMA_ENDPOINT=ollama # - OLLAMA_MODEL=Arch-Function-Calling-1.5B:Q4_K_M api_server: build: context: api_server dockerfile: Dockerfile ports: - "18083:80" healthcheck: test: ["CMD", "curl" ,"http://localhost:80/healthz"] interval: 5s retries: 20 ollama: image: ollama/ollama container_name: ollama volumes: - ./ollama:/root/.ollama restart: unless-stopped ports: - '11434:11434' profiles: - manual open_webui: image: ghcr.io/open-webui/open-webui:${WEBUI_DOCKER_TAG-main} container_name: open-webui volumes: - ./open-webui:/app/backend/data # depends_on: # - ollama ports: - 18090:8080 environment: - OLLAMA_BASE_URL=http://${OLLAMA_ENDPOINT:-host.docker.internal}:11434 - WEBUI_AUTH=false extra_hosts: - host.docker.internal:host-gateway restart: unless-stopped profiles: - monitoring chatbot_ui: build: context: ../../chatbot_ui dockerfile: Dockerfile ports: - "18080:8080" environment: - OPENAI_API_KEY=${OPENAI_API_KEY:?error} - MISTRAL_API_KEY=${MISTRAL_API_KEY:?error} - CHAT_COMPLETION_ENDPOINT=http://arch:10000/v1 prometheus: image: prom/prometheus container_name: prometheus command: - '--config.file=/etc/prometheus/prometheus.yaml' ports: - 9090:9090 restart: unless-stopped volumes: - ./prometheus:/etc/prometheus - ./prom_data:/prometheus profiles: - monitoring grafana: image: grafana/grafana container_name: grafana ports: - 3000:3000 restart: unless-stopped environment: - GF_SECURITY_ADMIN_USER=admin - GF_SECURITY_ADMIN_PASSWORD=grafana volumes: - ./grafana:/etc/grafana/provisioning/datasources - ./grafana/dashboard.yaml:/etc/grafana/provisioning/dashboards/main.yaml - ./grafana/dashboards:/var/lib/grafana/dashboards profiles: - monitoring