plano/envoyfilter/docker-compose.yaml
José Ulises Niño Rivera 5b4143d580
Add initial logic to send prompts to LLM API (#9)
Signed-off-by: José Ulises Niño Rivera <junr03@users.noreply.github.com>
2024-07-19 13:14:48 -07:00

43 lines
894 B
YAML

services:
envoy:
image: envoyproxy/envoy:v1.30-latest
hostname: envoy
ports:
- "10000:10000"
- "19901:9901"
volumes:
- ./envoy.yaml:/etc/envoy/envoy.yaml
- ./target/wasm32-wasi/release:/etc/envoy/proxy-wasm-plugins
- /etc/ssl/cert.pem:/etc/ssl/cert.pem
networks:
- envoymesh
depends_on:
embeddingserver:
condition: service_healthy
embeddingserver:
build:
context: ../embedding-server
dockerfile: Dockerfile
ports:
- "18080:80"
healthcheck:
test: ["CMD", "curl" ,"http://localhost:80"]
interval: 5s
retries: 20
networks:
- envoymesh
qdrant:
image: qdrant/qdrant
hostname: vector-db
ports:
- 16333:6333
- 16334:6334
volumes:
- ./qdrant_data:/qdrant/storage
networks:
- envoymesh
networks:
envoymesh: {}