Run demos without Docker (#809)

This commit is contained in:
Adil Hafeez 2026-03-11 12:49:36 -07:00 committed by GitHub
parent 6610097659
commit b4313d93a4
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
35 changed files with 488 additions and 347 deletions

View file

@ -23,9 +23,10 @@ All agents use Plano's agent orchestration LLM to intelligently route user reque
## Prerequisites
- [Plano CLI](https://docs.planoai.dev/get_started/quickstart.html#prerequisites) installed (`pip install planoai`)
- Docker and Docker Compose (for agent services)
- [uv](https://docs.astral.sh/uv/) installed (for running agents natively)
- [OpenAI API key](https://platform.openai.com/api-keys)
- [FlightAware AeroAPI key](https://www.flightaware.com/aeroapi/portal)
- Docker and Docker Compose (optional, only needed for `--with-ui`)
> **Note:** You'll need to obtain a FlightAware AeroAPI key for live flight data. Visit [https://www.flightaware.com/aeroapi/portal](https://www.flightaware.com/aeroapi/portal) to get your API key.
@ -46,16 +47,34 @@ export OPENAI_API_KEY="your OpenAI api key"
./run_demo.sh
```
This starts Plano natively and brings up via Docker Compose:
This starts Plano natively and runs agents as local processes:
- Weather Agent on port 10510
- Flight Agent on port 10520
- Open WebUI on port 8080
Plano runs natively on the host (port 8001).
To also start Open WebUI, Jaeger tracing, and other optional services, pass `--with-ui`:
```bash
./run_demo.sh --with-ui
```
This additionally starts:
- Open WebUI on port 8080
- Jaeger tracing UI on port 16686
### 4. Test the System
Use Open WebUI at http://localhost:8080
**Option A: Using curl**
```bash
curl -X POST http://localhost:8001/v1/chat/completions \
-H "Content-Type: application/json" \
-d '{"model": "gpt-5.2", "messages": [{"role": "user", "content": "What is the weather in Istanbul?"}]}'
```
**Option B: Using Open WebUI (requires `--with-ui`)**
Navigate to http://localhost:8080
> **Note:** The Open WebUI may take a few minutes to start up and be fully ready. Please wait for the container to finish initializing before accessing the interface. Once ready, make sure to select the **gpt-5.2** model from the model dropdown menu in the UI.
@ -102,7 +121,7 @@ Each agent:
3. Generates response using GPT-5.2
4. Streams response back to user
Both agents run as Docker containers and communicate with Plano running natively on the host.
Both agents run as native local processes and communicate with Plano running natively on the host.
## Observability

View file

@ -1,32 +1,5 @@
services:
weather-agent:
build:
context: .
dockerfile: Dockerfile
container_name: weather-agent
restart: always
ports:
- "10510:10510"
environment:
- LLM_GATEWAY_ENDPOINT=http://host.docker.internal:12000/v1
command: ["uv", "run", "python", "src/travel_agents/weather_agent.py"]
extra_hosts:
- "host.docker.internal:host-gateway"
flight-agent:
build:
context: .
dockerfile: Dockerfile
container_name: flight-agent
restart: always
ports:
- "10520:10520"
environment:
- LLM_GATEWAY_ENDPOINT=http://host.docker.internal:12000/v1
- AEROAPI_KEY=${AEROAPI_KEY:? AEROAPI_KEY environment variable is required but not set}
command: ["uv", "run", "python", "src/travel_agents/flight_agent.py"]
extra_hosts:
- "host.docker.internal:host-gateway"
open-web-ui:
image: dyrnq/open-webui:main
restart: always
@ -40,9 +13,8 @@ services:
- ENABLE_TITLE_GENERATION=false
- ENABLE_TAGS_GENERATION=false
- ENABLE_AUTOCOMPLETE_GENERATION=false
depends_on:
- weather-agent
- flight-agent
extra_hosts:
- "host.docker.internal:host-gateway"
jaeger:
build:
context: ../../shared/jaeger

View file

@ -23,22 +23,32 @@ start_demo() {
echo ".env file created with API keys."
fi
# Step 3: Start Plano
# Step 3: Optionally start UI services (Open WebUI, Jaeger)
# Jaeger must start before Plano so it can bind the OTEL port (4317)
if [ "$1" == "--with-ui" ]; then
echo "Starting UI services (Open WebUI, Jaeger)..."
docker compose up -d
fi
# Step 4: Start Plano
echo "Starting Plano with config.yaml..."
planoai up config.yaml
# Step 4: Start agents and services
echo "Starting agents using Docker Compose..."
docker compose up -d
# Step 5: Start agents natively
echo "Starting agents..."
bash start_agents.sh &
}
# Function to stop the demo
stop_demo() {
# Step 1: Stop Docker Compose services
echo "Stopping Docker Compose services..."
docker compose down
# Stop agents
echo "Stopping agents..."
pkill -f start_agents.sh 2>/dev/null || true
# Step 2: Stop Plano
# Stop Docker Compose services if running
docker compose down 2>/dev/null || true
# Stop Plano
echo "Stopping Plano..."
planoai down
}
@ -47,5 +57,5 @@ stop_demo() {
if [ "$1" == "down" ]; then
stop_demo
else
start_demo
start_demo "$1"
fi

View file

@ -0,0 +1,30 @@
#!/bin/bash
set -e
PIDS=()
log() { echo "$(date '+%F %T') - $*"; }
cleanup() {
log "Stopping agents..."
for PID in "${PIDS[@]}"; do
kill $PID 2>/dev/null && log "Stopped process $PID"
done
exit 0
}
trap cleanup EXIT INT TERM
export LLM_GATEWAY_ENDPOINT=http://localhost:12000/v1
log "Starting weather_agent on port 10510..."
uv run python src/travel_agents/weather_agent.py &
PIDS+=($!)
log "Starting flight_agent on port 10520..."
uv run python src/travel_agents/flight_agent.py &
PIDS+=($!)
for PID in "${PIDS[@]}"; do
wait "$PID"
done