mirror of
https://github.com/katanemo/plano.git
synced 2026-04-25 00:36:34 +02:00
Remove top level container and start snake-case for config files (#27)
t
This commit is contained in:
parent
b8ea65d858
commit
9774148c75
6 changed files with 71 additions and 133 deletions
|
|
@ -1,32 +0,0 @@
|
|||
katanemo-prompt-config:
|
||||
default-prompt-endpoint: "127.0.0.1"
|
||||
load-balancing: "round-robin"
|
||||
timeout-ms: 5000
|
||||
|
||||
embedding-provider:
|
||||
name: "SentenceTransformer"
|
||||
model: "all-MiniLM-L6-v2"
|
||||
|
||||
llm-providers:
|
||||
|
||||
- name: "open-ai-gpt-4"
|
||||
api-key: "$OPEN_AI_API_KEY"
|
||||
model: gpt-4
|
||||
|
||||
system-prompt: |
|
||||
You are a helpful weather forecaster. Please following following guidelines when responding to user queries:
|
||||
- Use farenheight for temperature
|
||||
- Use miles per hour for wind speed
|
||||
|
||||
prompt-targets:
|
||||
|
||||
- type: context-resolver
|
||||
name: weather-forecast
|
||||
few-shot-examples:
|
||||
- what is the weather in New York?
|
||||
endpoint: "POST:$WEATHER_FORECAST_API_ENDPOINT"
|
||||
cache-response: true
|
||||
cache-response-settings:
|
||||
- cache-ttl-secs: 3600 # cache expiry in seconds
|
||||
- cache-max-size: 1000 # in number of items
|
||||
- cache-eviction-strategy: LRU
|
||||
Loading…
Add table
Add a link
Reference in a new issue