From 1ff50e85c2909b38946a9731d52ca86d8f4646ad Mon Sep 17 00:00:00 2001 From: XueFeng <1158231926@qq.com> Date: Mon, 8 Apr 2024 16:54:00 +0800 Subject: [PATCH] Update `zhipuai_api.py` for custom `max_tokens` and `temperature` config. --- metagpt/provider/zhipuai_api.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/metagpt/provider/zhipuai_api.py b/metagpt/provider/zhipuai_api.py index 2db441991..a45081fcf 100644 --- a/metagpt/provider/zhipuai_api.py +++ b/metagpt/provider/zhipuai_api.py @@ -43,7 +43,9 @@ class ZhiPuAILLM(BaseLLM): self.llm = ZhiPuModelAPI(api_key=self.api_key) def _const_kwargs(self, messages: list[dict], stream: bool = False) -> dict: - kwargs = {"model": self.model, "messages": messages, "stream": stream, "temperature": 0.3} + max_tokens = self.config.max_token if self.config.max_token > 0 else 1024 + temperature = self.config.temperature if self.config.temperature > 0.0 else 0.3 + kwargs = {"model": self.model, "max_tokens": max_tokens, "messages": messages, "stream": stream, "temperature": temperature} return kwargs def completion(self, messages: list[dict], timeout=USE_CONFIG_TIMEOUT) -> dict: