fixbug: llm client error

This commit is contained in:
莘权 马 2024-07-17 12:30:13 +08:00
parent bc63e0aa89
commit 9a96d443b6
2 changed files with 6 additions and 3 deletions

View file

@ -3,7 +3,7 @@ llm:
base_url: "YOUR_gpt-3.5-turbo_BASE_URL"
api_key: "YOUR_gpt-3.5-turbo_API_KEY"
model: "gpt-3.5-turbo" # or gpt-3.5-turbo
proxy: "YOUR_gpt-3.5-turbo_PROXY" # for LLM API requests
# proxy: "YOUR_gpt-3.5-turbo_PROXY" # for LLM API requests
# timeout: 600 # Optional. If set to 0, default value is 300.
# Details: https://azure.microsoft.com/en-us/pricing/details/cognitive-services/openai-service/
pricing_plan: "" # Optional. Use for Azure LLM when its model name is not the same as OpenAI's
@ -13,7 +13,7 @@ models:
api_type: "openai" # or azure / ollama / groq etc.
base_url: "YOUR_MODEL_1_BASE_URL"
api_key: "YOUR_MODEL_1_API_KEY"
proxy: "YOUR_MODEL_1_PROXY" # for LLM API requests
# proxy: "YOUR_MODEL_1_PROXY" # for LLM API requests
# timeout: 600 # Optional. If set to 0, default value is 300.
# Details: https://azure.microsoft.com/en-us/pricing/details/cognitive-services/openai-service/
pricing_plan: "" # Optional. Use for Azure LLM when its model name is not the same as OpenAI's