diff --git a/demos/shared/logfire/Dockerfile b/demos/shared/logfire/Dockerfile
new file mode 100644
index 00000000..eec82c08
--- /dev/null
+++ b/demos/shared/logfire/Dockerfile
@@ -0,0 +1,5 @@
+FROM otel/opentelemetry-collector:latest
+
+COPY otel-collector-config.yaml /etc/otel-collector-config.yaml
+
+ENTRYPOINT ["/otelcol", "--config=/etc/otel-collector-config.yaml"]
diff --git a/demos/shared/logfire/otel-collector-config.yaml b/demos/shared/logfire/otel-collector-config.yaml
new file mode 100644
index 00000000..d26939e4
--- /dev/null
+++ b/demos/shared/logfire/otel-collector-config.yaml
@@ -0,0 +1,24 @@
+receivers:
+ otlp:
+ protocols:
+ grpc:
+ endpoint: 0.0.0.0:4317
+ http:
+ endpoint: 0.0.0.0:4318
+
+exporters:
+ otlphttp:
+ endpoint: "https://logfire-api.pydantic.dev"
+ headers:
+ Authorization: "${LOGFIRE_API_KEY}"
+
+processors:
+ batch:
+ timeout: 5s
+
+service:
+ pipelines:
+ traces:
+ receivers: [otlp]
+ processors: [batch]
+ exporters: [otlphttp]
diff --git a/demos/shared/weather_forecast_service/Dockerfile b/demos/weather_forecast/Dockerfile
similarity index 100%
rename from demos/shared/weather_forecast_service/Dockerfile
rename to demos/weather_forecast/Dockerfile
diff --git a/demos/weather_forecast/README.md b/demos/weather_forecast/README.md
index 5334d4a5..b9e9809e 100644
--- a/demos/weather_forecast/README.md
+++ b/demos/weather_forecast/README.md
@@ -1,27 +1,82 @@
# Function calling
+
This demo shows how you can use Arch's core function calling capabilites.
# Starting the demo
+
1. Please make sure the [pre-requisites](https://github.com/katanemo/arch/?tab=readme-ov-file#prerequisites) are installed correctly
2. Start Arch
-3.
- ```sh
+3. ```sh
sh run_demo.sh
```
4. Navigate to http://localhost:18080/
5. You can type in queries like "how is the weather?"
# Observability
+
Arch gateway publishes stats endpoint at http://localhost:19901/stats. In this demo we are using prometheus to pull stats from arch and we are using grafana to visalize the stats in dashboard. To see grafana dashboard follow instructions below,
1. Start grafana and prometheus using following command
```yaml
docker compose --profile monitoring up
```
-1. Navigate to http://localhost:3000/ to open grafana UI (use admin/grafana as credentials)
-1. From grafana left nav click on dashboards and select "Intelligent Gateway Overview" to view arch gateway stats
-
+2. Navigate to http://localhost:3000/ to open grafana UI (use admin/grafana as credentials)
+3. From grafana left nav click on dashboards and select "Intelligent Gateway Overview" to view arch gateway stats
Here is a sample interaction,
+
+## Tracing
+
+To see a tracing dashboard follow instructions below,
+
+1. For Jaeger, you can either use the default run_demo.sh script or run the following command:
+
+```sh
+sh run_demo.sh jaeger
+```
+
+2. For Logfire, first make sure to add a LOGFIRE_API_KEY to the .env file. You can either use the default run_demo.sh script or run the following command:
+
+```sh
+sh run_demo.sh logfire
+```
+
+3. For Signoz, you can either use the default run_demo.sh script or run the following command:
+
+```sh
+sh run_demo.sh signoz
+```
+
+If using Jaeger, navigate to http://localhost:16686/ to open Jaeger UI
+
+If using Signoz, navigate to http://localhost:3301/ to open Signoz UI
+
+If using Logfire, navigate to your logfire dashboard that you got the write key from to view the dashboard
+
+### Stopping Demo
+
+1. To shut down the Jaeger demo, run the following command:
+
+```sh
+sh run_demo.sh down jaeger
+```
+
+or run:
+
+```sh
+sh run_demo.sh down
+```
+
+2. To shut down the Logfire demo, run the following command:
+
+```sh
+sh run_demo.sh down logfire
+```
+
+3. To shut down the Signoz demo, run the following command:
+
+```sh
+sh run_demo.sh down signoz
+```
diff --git a/demos/weather_forecast/docker-compose-jaeger.yaml b/demos/weather_forecast/docker-compose-jaeger.yaml
new file mode 100644
index 00000000..15441de3
--- /dev/null
+++ b/demos/weather_forecast/docker-compose-jaeger.yaml
@@ -0,0 +1,41 @@
+services:
+ weather_forecast_service:
+ build:
+ context: ./
+ environment:
+ - OLTP_HOST=http://jaeger:4317
+ extra_hosts:
+ - "host.docker.internal:host-gateway"
+ ports:
+ - "18083:80"
+
+ chatbot_ui:
+ build:
+ context: ../shared/chatbot_ui
+ ports:
+ - "18080:8080"
+ environment:
+ # this is only because we are running the sample app in the same docker container environemtn as archgw
+ - CHAT_COMPLETION_ENDPOINT=http://host.docker.internal:10000/v1
+ extra_hosts:
+ - "host.docker.internal:host-gateway"
+ volumes:
+ - ./arch_config.yaml:/app/arch_config.yaml
+
+ jaeger:
+ build:
+ context: ../shared/jaeger
+ ports:
+ - "16686:16686"
+ - "4317:4317"
+ - "4318:4318"
+
+ prometheus:
+ build:
+ context: ../shared/prometheus
+
+ grafana:
+ build:
+ context: ../shared/grafana
+ ports:
+ - "3000:3000"
diff --git a/demos/weather_forecast/docker-compose-logfire.yaml b/demos/weather_forecast/docker-compose-logfire.yaml
new file mode 100644
index 00000000..92371d51
--- /dev/null
+++ b/demos/weather_forecast/docker-compose-logfire.yaml
@@ -0,0 +1,46 @@
+services:
+ weather_forecast_service:
+ build:
+ context: ./weather_forecast_service
+ environment:
+ - OLTP_HOST=http://otel-collector:4317
+ extra_hosts:
+ - "host.docker.internal:host-gateway"
+ ports:
+ - "18083:80"
+
+ chatbot_ui:
+ build:
+ context: ../shared/chatbot_ui
+ ports:
+ - "18080:8080"
+ environment:
+ # this is only because we are running the sample app in the same docker container environment as archgw
+ - CHAT_COMPLETION_ENDPOINT=http://host.docker.internal:10000/v1
+ extra_hosts:
+ - "host.docker.internal:host-gateway"
+ volumes:
+ - ./arch_config.yaml:/app/arch_config.yaml
+
+ otel-collector:
+ build:
+ context: ../shared/logfire/
+ ports:
+ - "4317:4317"
+ - "4318:4318"
+ volumes:
+ - ../shared/logfire/otel-collector-config.yaml:/etc/otel-collector-config.yaml
+ env_file:
+ - .env
+ environment:
+ - LOGFIRE_API_KEY
+
+ prometheus:
+ build:
+ context: ../shared/prometheus
+
+ grafana:
+ build:
+ context: ../shared/grafana
+ ports:
+ - "3000:3000"
diff --git a/demos/weather_forecast_signoz/docker-compose.yaml b/demos/weather_forecast/docker-compose-signoz.yaml
similarity index 94%
rename from demos/weather_forecast_signoz/docker-compose.yaml
rename to demos/weather_forecast/docker-compose-signoz.yaml
index 1c23f464..e9e2e536 100644
--- a/demos/weather_forecast_signoz/docker-compose.yaml
+++ b/demos/weather_forecast/docker-compose-signoz.yaml
@@ -4,7 +4,7 @@ include:
services:
weather_forecast_service:
build:
- context: ../shared/weather_forecast_service
+ context: ./weather_forecast_service
environment:
- OLTP_HOST=http://otel-collector:4317
extra_hosts:
diff --git a/demos/weather_forecast/docker-compose.yaml b/demos/weather_forecast/docker-compose.yaml
index fdaa7fcd..15441de3 100644
--- a/demos/weather_forecast/docker-compose.yaml
+++ b/demos/weather_forecast/docker-compose.yaml
@@ -1,7 +1,7 @@
services:
weather_forecast_service:
build:
- context: ../shared/weather_forecast_service
+ context: ./
environment:
- OLTP_HOST=http://jaeger:4317
extra_hosts:
diff --git a/demos/shared/weather_forecast_service/main.py b/demos/weather_forecast/main.py
similarity index 100%
rename from demos/shared/weather_forecast_service/main.py
rename to demos/weather_forecast/main.py
diff --git a/demos/shared/weather_forecast_service/poetry.lock b/demos/weather_forecast/poetry.lock
similarity index 100%
rename from demos/shared/weather_forecast_service/poetry.lock
rename to demos/weather_forecast/poetry.lock
diff --git a/demos/shared/weather_forecast_service/pyproject.toml b/demos/weather_forecast/pyproject.toml
similarity index 100%
rename from demos/shared/weather_forecast_service/pyproject.toml
rename to demos/weather_forecast/pyproject.toml
diff --git a/demos/weather_forecast/run_demo.sh b/demos/weather_forecast/run_demo.sh
index e6c678e8..a129e2c6 100644
--- a/demos/weather_forecast/run_demo.sh
+++ b/demos/weather_forecast/run_demo.sh
@@ -1,47 +1,95 @@
#!/bin/bash
set -e
+# Function to load environment variables from the .env file
+load_env() {
+ if [ -f ".env" ]; then
+ export $(grep -v '^#' .env | xargs)
+ fi
+}
+
+# Function to determine the docker-compose file based on the argument
+get_compose_file() {
+ case "$1" in
+ jaeger)
+ echo "docker-compose-jaeger.yaml"
+ ;;
+ logfire)
+ echo "docker-compose-logfire.yaml"
+ ;;
+ signoz)
+ echo "docker-compose-signoz.yaml"
+ ;;
+ *)
+ echo "docker-compose.yaml"
+ ;;
+ esac
+}
+
# Function to start the demo
start_demo() {
- # Step 1: Check if .env file exists
+ # Step 1: Determine the docker-compose file
+ COMPOSE_FILE=$(get_compose_file "$1" 2>/dev/null)
+
+ # Step 2: Check if .env file exists
if [ -f ".env" ]; then
echo ".env file already exists. Skipping creation."
else
- # Step 2: Create `.env` file and set OpenAI key
+ # Step 3: Check for required environment variables
if [ -z "$OPENAI_API_KEY" ]; then
echo "Error: OPENAI_API_KEY environment variable is not set for the demo."
exit 1
fi
+ if [ "$1" == "logfire" ] && [ -z "$LOGFIRE_API_KEY" ]; then
+ echo "Error: LOGFIRE_API_KEY environment variable is required for Logfire."
+ exit 1
+ fi
+ # Create .env file
echo "Creating .env file..."
echo "OPENAI_API_KEY=$OPENAI_API_KEY" > .env
- echo ".env file created with OPENAI_API_KEY."
+ if [ "$1" == "logfire" ]; then
+ echo "LOGFIRE_API_KEY=$LOGFIRE_API_KEY" >> .env
+ fi
+ echo ".env file created with required API keys."
fi
- # Step 3: Start Arch
+ load_env
+
+ if [ "$1" == "logfire" ] && [ -z "$LOGFIRE_API_KEY"]; then
+ echo "Error: LOGFIRE_API_KEY environment variable is required for Logfire."
+ exit 1
+ fi
+
+ # Step 4: Start Arch
echo "Starting Arch with arch_config.yaml..."
archgw up arch_config.yaml
- # Step 4: Start Network Agent
- echo "Starting Network Agent using Docker Compose..."
- docker compose up -d # Run in detached mode
+ # Step 5: Start Network Agent with the chosen Docker Compose file
+ echo "Starting Network Agent with $COMPOSE_FILE..."
+ docker compose -f "$COMPOSE_FILE" up -d # Run in detached mode
}
# Function to stop the demo
stop_demo() {
- # Step 1: Stop Docker Compose services
- echo "Stopping Network Agent using Docker Compose..."
- docker compose down
+ echo "Stopping all Docker Compose services..."
- # Step 2: Stop Arch
+ # Stop all services by iterating through all configurations
+ for compose_file in ./docker-compose*.yaml; do
+ echo "Stopping services in $compose_file..."
+ docker compose -f "$compose_file" down
+ done
+
+ # Stop Arch
echo "Stopping Arch..."
archgw down
}
# Main script logic
if [ "$1" == "down" ]; then
+ # Call stop_demo with the second argument as the demo to stop
stop_demo
else
- # Default action is to bring the demo up
- start_demo
+ # Use the argument (jaeger, logfire, signoz) to determine the compose file
+ start_demo "$1"
fi
diff --git a/demos/weather_forecast_signoz/README.md b/demos/weather_forecast_signoz/README.md
deleted file mode 100644
index d4d9175e..00000000
--- a/demos/weather_forecast_signoz/README.md
+++ /dev/null
@@ -1,29 +0,0 @@
-# Function calling
-This demo shows how you can use Arch's core function calling capabilites.
-
-# Starting the demo
-1. Please make sure the [pre-requisites](https://github.com/katanemo/arch/?tab=readme-ov-file#prerequisites) are installed correctly
-2. Start Arch
-
-3.
- ```sh
- sh run_demo.sh
- ```
-4. Navigate to http://localhost:18080/
-5. You can type in queries like "how is the weather?"
-
-# Observability
-Arch gateway publishes stats endpoint at http://localhost:19901/stats. In this demo we are using prometheus to pull stats from arch and we are using grafana to visalize the stats in dashboard. To see grafana dashboard follow instructions below,
-
-1. Start grafana and prometheus using following command
- ```yaml
- docker compose --profile monitoring up
- ```
-1. Navigate to http://localhost:3000/ to open grafana UI (use admin/grafana as credentials)
-1. From grafana left nav click on dashboards and select "Intelligent Gateway Overview" to view arch gateway stats
-
-
-Here is a sample interaction,
-
-
-1. Signoz UI: http://localhost:3301
diff --git a/demos/weather_forecast_signoz/arch_config.yaml b/demos/weather_forecast_signoz/arch_config.yaml
deleted file mode 100644
index 787a71b3..00000000
--- a/demos/weather_forecast_signoz/arch_config.yaml
+++ /dev/null
@@ -1,72 +0,0 @@
-version: "0.1-beta"
-
-listener:
- address: 0.0.0.0
- port: 10000
- message_format: huggingface
- connect_timeout: 0.005s
-
-endpoints:
- weather_forecast_service:
- endpoint: host.docker.internal:18083
- connect_timeout: 0.005s
-
-overrides:
- # confidence threshold for prompt target intent matching
- prompt_target_intent_matching_threshold: 0.6
-
-llm_providers:
- - name: gpt-4o-mini
- access_key: $OPENAI_API_KEY
- provider: openai
- model: gpt-4o-mini
- default: true
-
- - name: gpt-3.5-turbo-0125
- access_key: $OPENAI_API_KEY
- provider: openai
- model: gpt-3.5-turbo-0125
-
- - name: gpt-4o
- access_key: $OPENAI_API_KEY
- provider: openai
- model: gpt-4o
-
-system_prompt: |
- You are a helpful assistant.
-
-prompt_targets:
- - name: weather_forecast
- description: Check weather information for a given city.
- parameters:
- - name: city
- description: the name of the city
- required: true
- type: str
- - name: days
- description: the number of days
- type: int
- required: true
- - name: units
- description: the temperature unit, e.g., Celsius and Fahrenheit
- type: str
- default: Fahrenheit
- endpoint:
- name: weather_forecast_service
- path: /weather
-
- - name: default_target
- default: true
- description: This is the default target for all unmatched prompts.
- endpoint:
- name: weather_forecast_service
- path: /default_target
- system_prompt: |
- You are a helpful assistant! Summarize the user's request and provide a helpful response.
- # if it is set to false arch will send response that it received from this prompt target to the user
- # if true arch will forward the response to the default LLM
- auto_llm_dispatch_on_response: false
-
-tracing:
- random_sampling: 100
- # trace_arch: true
diff --git a/demos/weather_forecast_signoz/run_demo.sh b/demos/weather_forecast_signoz/run_demo.sh
deleted file mode 100644
index e6c678e8..00000000
--- a/demos/weather_forecast_signoz/run_demo.sh
+++ /dev/null
@@ -1,47 +0,0 @@
-#!/bin/bash
-set -e
-
-# Function to start the demo
-start_demo() {
- # Step 1: Check if .env file exists
- if [ -f ".env" ]; then
- echo ".env file already exists. Skipping creation."
- else
- # Step 2: Create `.env` file and set OpenAI key
- if [ -z "$OPENAI_API_KEY" ]; then
- echo "Error: OPENAI_API_KEY environment variable is not set for the demo."
- exit 1
- fi
-
- echo "Creating .env file..."
- echo "OPENAI_API_KEY=$OPENAI_API_KEY" > .env
- echo ".env file created with OPENAI_API_KEY."
- fi
-
- # Step 3: Start Arch
- echo "Starting Arch with arch_config.yaml..."
- archgw up arch_config.yaml
-
- # Step 4: Start Network Agent
- echo "Starting Network Agent using Docker Compose..."
- docker compose up -d # Run in detached mode
-}
-
-# Function to stop the demo
-stop_demo() {
- # Step 1: Stop Docker Compose services
- echo "Stopping Network Agent using Docker Compose..."
- docker compose down
-
- # Step 2: Stop Arch
- echo "Stopping Arch..."
- archgw down
-}
-
-# Main script logic
-if [ "$1" == "down" ]; then
- stop_demo
-else
- # Default action is to bring the demo up
- start_demo
-fi