mirror of
https://github.com/katanemo/plano.git
synced 2026-04-25 08:46:24 +02:00
rename cli to plano (#647)
This commit is contained in:
parent
e224cba3e3
commit
e7ce00b5a7
70 changed files with 226 additions and 212 deletions
75
demos/samples_python/weather_forecast/config.yaml
Normal file
75
demos/samples_python/weather_forecast/config.yaml
Normal file
|
|
@ -0,0 +1,75 @@
|
|||
version: v0.1.0
|
||||
|
||||
listeners:
|
||||
ingress_traffic:
|
||||
address: 0.0.0.0
|
||||
port: 10000
|
||||
message_format: openai
|
||||
timeout: 30s
|
||||
|
||||
egress_traffic:
|
||||
address: 0.0.0.0
|
||||
port: 12000
|
||||
message_format: openai
|
||||
timeout: 30s
|
||||
|
||||
endpoints:
|
||||
weather_forecast_service:
|
||||
endpoint: host.docker.internal:18083
|
||||
connect_timeout: 0.005s
|
||||
|
||||
overrides:
|
||||
# confidence threshold for prompt target intent matching
|
||||
prompt_target_intent_matching_threshold: 0.6
|
||||
|
||||
llm_providers:
|
||||
- access_key: $GROQ_API_KEY
|
||||
model: groq/llama-3.2-3b-preview
|
||||
|
||||
- access_key: $OPENAI_API_KEY
|
||||
model: openai/gpt-4o
|
||||
default: true
|
||||
|
||||
- access_key: $OPENAI_API_KEY
|
||||
model: openai/gpt-4o-mini
|
||||
|
||||
- access_key: $ANTHROPIC_API_KEY
|
||||
model: anthropic/claude-sonnet-4-20250514
|
||||
|
||||
system_prompt: |
|
||||
You are a helpful assistant.
|
||||
|
||||
prompt_targets:
|
||||
- name: get_current_weather
|
||||
description: Get current weather at a location.
|
||||
parameters:
|
||||
- name: location
|
||||
description: The location to get the weather for
|
||||
required: true
|
||||
type: string
|
||||
format: City, State
|
||||
- name: days
|
||||
description: the number of days for the request
|
||||
required: true
|
||||
type: int
|
||||
endpoint:
|
||||
name: weather_forecast_service
|
||||
path: /weather
|
||||
http_method: POST
|
||||
|
||||
- name: default_target
|
||||
default: true
|
||||
description: This is the default target for all unmatched prompts.
|
||||
endpoint:
|
||||
name: weather_forecast_service
|
||||
path: /default_target
|
||||
http_method: POST
|
||||
system_prompt: |
|
||||
You are a helpful assistant! Summarize the user's request and provide a helpful response.
|
||||
# if it is set to false arch will send response that it received from this prompt target to the user
|
||||
# if true arch will forward the response to the default LLM
|
||||
auto_llm_dispatch_on_response: false
|
||||
|
||||
tracing:
|
||||
random_sampling: 100
|
||||
trace_arch_internal: true
|
||||
Loading…
Add table
Add a link
Reference in a new issue