2024-09-30 17:49:05 -07:00
|
|
|
version: "0.1-beta"
|
2024-09-10 14:24:46 -07:00
|
|
|
|
2025-02-14 19:28:10 -08:00
|
|
|
listeners:
|
|
|
|
|
ingress_traffic:
|
|
|
|
|
address: 0.0.0.0
|
|
|
|
|
port: 10000
|
|
|
|
|
message_format: openai
|
|
|
|
|
timeout: 30s
|
2024-09-10 14:24:46 -07:00
|
|
|
|
2024-09-30 17:49:05 -07:00
|
|
|
endpoints:
|
2024-11-07 22:11:00 -06:00
|
|
|
weather_forecast_service:
|
2024-10-03 18:21:27 -07:00
|
|
|
endpoint: host.docker.internal:18083
|
2024-09-30 17:49:05 -07:00
|
|
|
connect_timeout: 0.005s
|
2024-09-10 14:24:46 -07:00
|
|
|
|
2024-10-03 10:57:01 -07:00
|
|
|
overrides:
|
|
|
|
|
# confidence threshold for prompt target intent matching
|
|
|
|
|
prompt_target_intent_matching_threshold: 0.6
|
|
|
|
|
|
2024-09-30 17:49:05 -07:00
|
|
|
llm_providers:
|
2025-04-13 14:00:16 -07:00
|
|
|
- name: groq
|
|
|
|
|
access_key: $GROQ_API_KEY
|
2025-05-22 22:55:46 -07:00
|
|
|
provider_interface: groq
|
2025-04-13 14:00:16 -07:00
|
|
|
model: llama-3.2-3b-preview
|
2024-09-10 14:24:46 -07:00
|
|
|
|
2024-10-28 20:05:06 -04:00
|
|
|
- name: gpt-4o
|
|
|
|
|
access_key: $OPENAI_API_KEY
|
2025-01-24 16:34:11 -08:00
|
|
|
provider_interface: openai
|
2024-10-28 20:05:06 -04:00
|
|
|
model: gpt-4o
|
2025-04-15 14:39:12 -07:00
|
|
|
default: true
|
2024-10-28 20:05:06 -04:00
|
|
|
|
2024-10-03 18:21:27 -07:00
|
|
|
system_prompt: |
|
|
|
|
|
You are a helpful assistant.
|
2024-09-30 17:49:05 -07:00
|
|
|
|
2024-11-25 17:16:35 -08:00
|
|
|
prompt_guards:
|
|
|
|
|
input_guards:
|
|
|
|
|
jailbreak:
|
|
|
|
|
on_exception:
|
|
|
|
|
message: Looks like you're curious about my abilities, but I can only provide assistance for weather forecasting.
|
|
|
|
|
|
2024-09-10 14:24:46 -07:00
|
|
|
prompt_targets:
|
2024-12-20 13:25:01 -08:00
|
|
|
- name: get_current_weather
|
|
|
|
|
description: Get current weather at a location.
|
2024-09-10 14:24:46 -07:00
|
|
|
parameters:
|
2024-12-20 13:25:01 -08:00
|
|
|
- name: location
|
|
|
|
|
description: The location to get the weather for
|
2024-09-10 14:24:46 -07:00
|
|
|
required: true
|
2024-12-20 13:25:01 -08:00
|
|
|
type: string
|
|
|
|
|
format: City, State
|
2024-09-10 14:24:46 -07:00
|
|
|
- name: days
|
2024-12-20 13:25:01 -08:00
|
|
|
description: the number of days for the request
|
2024-10-07 16:01:12 -07:00
|
|
|
required: true
|
2024-12-20 13:25:01 -08:00
|
|
|
type: int
|
2024-09-10 14:24:46 -07:00
|
|
|
endpoint:
|
2024-11-07 22:11:00 -06:00
|
|
|
name: weather_forecast_service
|
2024-09-10 14:24:46 -07:00
|
|
|
path: /weather
|
2024-12-06 14:37:33 -08:00
|
|
|
http_method: POST
|
2024-09-30 17:49:05 -07:00
|
|
|
|
2024-10-02 20:43:16 -07:00
|
|
|
- name: default_target
|
|
|
|
|
default: true
|
|
|
|
|
description: This is the default target for all unmatched prompts.
|
|
|
|
|
endpoint:
|
2024-11-07 22:11:00 -06:00
|
|
|
name: weather_forecast_service
|
2024-10-02 20:43:16 -07:00
|
|
|
path: /default_target
|
2024-12-06 14:37:33 -08:00
|
|
|
http_method: POST
|
2024-10-02 20:43:16 -07:00
|
|
|
system_prompt: |
|
2024-10-28 20:05:06 -04:00
|
|
|
You are a helpful assistant! Summarize the user's request and provide a helpful response.
|
2024-10-02 20:43:16 -07:00
|
|
|
# if it is set to false arch will send response that it received from this prompt target to the user
|
|
|
|
|
# if true arch will forward the response to the default LLM
|
2024-10-28 20:05:06 -04:00
|
|
|
auto_llm_dispatch_on_response: false
|
2024-10-02 20:43:16 -07:00
|
|
|
|
2024-10-08 16:24:08 -07:00
|
|
|
tracing:
|
|
|
|
|
random_sampling: 100
|
2024-11-07 22:11:00 -06:00
|
|
|
trace_arch_internal: true
|