mirror of
https://github.com/katanemo/plano.git
synced 2026-05-09 15:52:44 +02:00
run demos without docker, keep docker optional via --with-ui
This commit is contained in:
parent
b9f01c8471
commit
1285bd083d
33 changed files with 447 additions and 316 deletions
|
|
@ -41,21 +41,36 @@ cd demos/agent_orchestration/multi_agent_crewai_langchain
|
|||
./run_demo.sh
|
||||
```
|
||||
|
||||
This starts Plano natively and brings up via Docker Compose:
|
||||
This starts Plano natively and runs agents as local processes:
|
||||
- **CrewAI Flight Agent** (port 10520) - flight search
|
||||
- **LangChain Weather Agent** (port 10510) - weather forecasts
|
||||
- **AnythingLLM** (port 3001) - chat interface
|
||||
- **Jaeger** (port 16686) - distributed tracing
|
||||
|
||||
Plano runs natively on the host (ports 12000, 8001).
|
||||
|
||||
To also start AnythingLLM (chat UI), Jaeger (tracing), and other optional services:
|
||||
|
||||
```bash
|
||||
./run_demo.sh --with-ui
|
||||
```
|
||||
|
||||
This additionally starts:
|
||||
- **AnythingLLM** (port 3001) - chat interface
|
||||
- **Jaeger** (port 16686) - distributed tracing
|
||||
|
||||
### Try It Out
|
||||
|
||||
1. **Open the Chat Interface**
|
||||
1. **Using curl**
|
||||
```bash
|
||||
curl -X POST http://localhost:8001/v1/chat/completions \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"model": "gpt-4o", "messages": [{"role": "user", "content": "What is the weather in San Francisco?"}]}'
|
||||
```
|
||||
|
||||
2. **Using AnythingLLM (requires `--with-ui`)**
|
||||
- Navigate to [http://localhost:3001](http://localhost:3001)
|
||||
- Create an account (stored locally)
|
||||
|
||||
2. **Ask Multi-Agent Questions**
|
||||
3. **Ask Multi-Agent Questions**
|
||||
```
|
||||
"What's the weather in San Francisco and can you find flights from Seattle to San Francisco?"
|
||||
```
|
||||
|
|
@ -65,7 +80,7 @@ Plano runs natively on the host (ports 12000, 8001).
|
|||
- Routes the flight part to the CrewAI agent
|
||||
- Combines responses seamlessly
|
||||
|
||||
3. **View Distributed Traces**
|
||||
4. **View Distributed Traces (requires `--with-ui`)**
|
||||
- Open [http://localhost:16686](http://localhost:16686) (Jaeger UI)
|
||||
- See how requests flow through both agents
|
||||
|
||||
|
|
|
|||
|
|
@ -2,9 +2,9 @@ version: v0.3.0
|
|||
|
||||
agents:
|
||||
- id: weather_agent
|
||||
url: http://langchain-weather-agent:10510
|
||||
url: http://localhost:10510
|
||||
- id: flight_agent
|
||||
url: http://crewai-flight-agent:10520
|
||||
url: http://localhost:10520
|
||||
|
||||
model_providers:
|
||||
- model: openai/gpt-4o
|
||||
|
|
|
|||
|
|
@ -1,27 +1,5 @@
|
|||
|
||||
services:
|
||||
crewai-flight-agent:
|
||||
build:
|
||||
dockerfile: Dockerfile
|
||||
restart: always
|
||||
ports:
|
||||
- "10520:10520"
|
||||
environment:
|
||||
- LLM_GATEWAY_ENDPOINT=http://host.docker.internal:12000/v1
|
||||
- AEROAPI_KEY=${AEROAPI_KEY:?AEROAPI_KEY environment variable is required but not set}
|
||||
- PYTHONUNBUFFERED=1
|
||||
command: ["python", "-u", "crewai/flight_agent.py"]
|
||||
|
||||
langchain-weather-agent:
|
||||
build:
|
||||
dockerfile: Dockerfile
|
||||
restart: always
|
||||
ports:
|
||||
- "10510:10510"
|
||||
environment:
|
||||
- LLM_GATEWAY_ENDPOINT=http://host.docker.internal:12000/v1
|
||||
command: ["python", "-u", "langchain/weather_agent.py"]
|
||||
|
||||
anythingllm:
|
||||
image: mintplexlabs/anythingllm
|
||||
restart: always
|
||||
|
|
@ -36,6 +14,8 @@ services:
|
|||
- GENERIC_OPEN_AI_MODEL_PREF=gpt-4o-mini
|
||||
- GENERIC_OPEN_AI_MODEL_TOKEN_LIMIT=128000
|
||||
- GENERIC_OPEN_AI_API_KEY=sk-placeholder
|
||||
extra_hosts:
|
||||
- "host.docker.internal:host-gateway"
|
||||
|
||||
jaeger:
|
||||
build:
|
||||
|
|
@ -44,3 +24,4 @@ services:
|
|||
ports:
|
||||
- "16686:16686" # Jaeger UI
|
||||
- "4317:4317" # OTLP gRPC receiver
|
||||
- "4318:4318" # OTLP HTTP receiver
|
||||
|
|
|
|||
|
|
@ -12,14 +12,9 @@ start_demo() {
|
|||
echo "Error: OPENAI_API_KEY environment variable is not set for the demo."
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "$AEROAPI_KEY" ]; then
|
||||
echo "Error: AEROAPI_KEY environment variable is not set for the demo."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Creating .env file..."
|
||||
echo "OPENAI_API_KEY=$OPENAI_API_KEY" > .env
|
||||
echo "AEROAPI_KEY=$AEROAPI_KEY" >> .env
|
||||
echo ".env file created with API keys."
|
||||
fi
|
||||
|
||||
|
|
@ -27,18 +22,27 @@ start_demo() {
|
|||
echo "Starting Plano with config.yaml..."
|
||||
planoai up config.yaml
|
||||
|
||||
# Step 4: Start agents and services
|
||||
echo "Starting agents using Docker Compose..."
|
||||
docker compose up -d
|
||||
# Step 4: Start agents natively
|
||||
echo "Starting agents..."
|
||||
bash start_agents.sh &
|
||||
|
||||
# Step 5: Optionally start UI services (AnythingLLM, Jaeger)
|
||||
if [ "$1" == "--with-ui" ]; then
|
||||
echo "Starting UI services (AnythingLLM, Jaeger)..."
|
||||
docker compose up -d
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to stop the demo
|
||||
stop_demo() {
|
||||
# Step 1: Stop Docker Compose services
|
||||
echo "Stopping Docker Compose services..."
|
||||
docker compose down
|
||||
# Stop agents
|
||||
echo "Stopping agents..."
|
||||
pkill -f start_agents.sh 2>/dev/null || true
|
||||
|
||||
# Step 2: Stop Plano
|
||||
# Stop Docker Compose services if running
|
||||
docker compose down 2>/dev/null || true
|
||||
|
||||
# Stop Plano
|
||||
echo "Stopping Plano..."
|
||||
planoai down
|
||||
}
|
||||
|
|
@ -47,5 +51,5 @@ stop_demo() {
|
|||
if [ "$1" == "down" ]; then
|
||||
stop_demo
|
||||
else
|
||||
start_demo
|
||||
start_demo "$1"
|
||||
fi
|
||||
|
|
|
|||
30
demos/agent_orchestration/multi_agent_crewai_langchain/start_agents.sh
Executable file
30
demos/agent_orchestration/multi_agent_crewai_langchain/start_agents.sh
Executable file
|
|
@ -0,0 +1,30 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
PIDS=()
|
||||
|
||||
log() { echo "$(date '+%F %T') - $*"; }
|
||||
|
||||
cleanup() {
|
||||
log "Stopping agents..."
|
||||
for PID in "${PIDS[@]}"; do
|
||||
kill $PID 2>/dev/null && log "Stopped process $PID"
|
||||
done
|
||||
exit 0
|
||||
}
|
||||
|
||||
trap cleanup EXIT INT TERM
|
||||
|
||||
export LLM_GATEWAY_ENDPOINT=http://localhost:12000/v1
|
||||
|
||||
log "Starting langchain weather_agent on port 10510..."
|
||||
uv run python langchain/weather_agent.py &
|
||||
PIDS+=($!)
|
||||
|
||||
log "Starting crewai flight_agent on port 10520..."
|
||||
uv run python crewai/flight_agent.py &
|
||||
PIDS+=($!)
|
||||
|
||||
for PID in "${PIDS[@]}"; do
|
||||
wait "$PID"
|
||||
done
|
||||
Loading…
Add table
Add a link
Reference in a new issue