fixbug: llm.timeout not working

This commit is contained in:
莘权 马 2024-03-20 21:24:41 +08:00
parent adb42f44d6
commit af3a409ac4
14 changed files with 72 additions and 69 deletions

View file

@ -74,7 +74,7 @@ class LLMConfig(YamlModel):
stream: bool = False
logprobs: Optional[bool] = None # https://cookbook.openai.com/examples/using_logprobs
top_logprobs: Optional[int] = None
timeout: int = 60
timeout: int = 600
# For Network
proxy: Optional[str] = None