fix config when open llm model hosts as openai interface

This commit is contained in:
better629 2023-11-20 14:46:31 +08:00
parent 6ef3b213c3
commit f8f938f333
2 changed files with 6 additions and 4 deletions

View file

@ -7,7 +7,7 @@
## Or, you can configure OPENAI_PROXY to access official OPENAI_API_BASE.
OPENAI_API_BASE: "https://api.openai.com/v1"
#OPENAI_PROXY: "http://127.0.0.1:8118"
#OPENAI_API_KEY: "YOUR_API_KEY"
#OPENAI_API_KEY: "YOUR_API_KEY" # set the value to sk-xxx if you host the openai interface for open llm model
OPENAI_API_MODEL: "gpt-4"
MAX_TOKENS: 1500
RPM: 10
@ -32,7 +32,7 @@ RPM: 10
#DEPLOYMENT_ID: "YOUR_DEPLOYMENT_ID"
#### if zhipuai from `https://open.bigmodel.cn`. You can set here or export API_KEY="YOUR_API_KEY"
ZHIPUAI_API_KEY: "YOUR_API_KEY"
# ZHIPUAI_API_KEY: "YOUR_API_KEY"
#### for Search