diff --git a/metagpt/llm.py b/metagpt/llm.py index 1f6a6bb1a..e9b80d7a8 100644 --- a/metagpt/llm.py +++ b/metagpt/llm.py @@ -14,7 +14,7 @@ from metagpt.provider.zhipuai_api import ZhiPuAIGPTAPI from metagpt.provider.spark_api import SparkAPI -def LLM(): +def LLM() -> "BaseGPTAPI": """ initialize different LLM instance according to the key field existence""" # TODO a little trick, can use registry to initialize LLM instance further if CONFIG.openai_api_key and CONFIG.openai_api_key.starswith("sk-"): diff --git a/metagpt/provider/zhipuai/async_sse_client.py b/metagpt/provider/zhipuai/async_sse_client.py index 7a4275982..b819fdc63 100644 --- a/metagpt/provider/zhipuai/async_sse_client.py +++ b/metagpt/provider/zhipuai/async_sse_client.py @@ -1,6 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- # @Desc : async_sse_client to make keep the use of Event to access response +# refs to `https://github.com/zhipuai/zhipuai-sdk-python/blob/main/zhipuai/utils/sse_client.py` from zhipuai.utils.sse_client import SSEClient, Event, _FIELD_SEPARATOR diff --git a/metagpt/provider/zhipuai/zhipu_model_api.py b/metagpt/provider/zhipuai/zhipu_model_api.py index e1d52061d..618b2e865 100644 --- a/metagpt/provider/zhipuai/zhipu_model_api.py +++ b/metagpt/provider/zhipuai/zhipu_model_api.py @@ -29,8 +29,12 @@ class ZhiPuModelAPI(ModelAPI): @classmethod def split_zhipu_api_url(cls, invoke_type: InvokeType, kwargs): # use this method to prevent zhipu api upgrading to different version. + # and follow the GeneralAPIRequestor implemented based on openai sdk zhipu_api_url = cls._build_api_url(kwargs, invoke_type) - # example: https://open.bigmodel.cn/api/paas/v3/model-api/{model}/{invoke_method} + """ + example: + zhipu_api_url: https://open.bigmodel.cn/api/paas/v3/model-api/{model}/{invoke_method} + """ arr = zhipu_api_url.split("/api/") # ("https://open.bigmodel.cn/api/" , "/paas/v3/model-api/chatglm_turbo/invoke") return f"{arr[0]}/api", f"/{arr[1]}" diff --git a/metagpt/provider/zhipuai_api.py b/metagpt/provider/zhipuai_api.py index 4e8e6b760..2ad1944c2 100644 --- a/metagpt/provider/zhipuai_api.py +++ b/metagpt/provider/zhipuai_api.py @@ -68,7 +68,7 @@ class ZhiPuAIGPTAPI(BaseGPTAPI): def get_choice_text(self, resp: dict) -> str: """ get the first text of choice from llm response """ - assist_msg = resp.get("data").get("choices")[-1] + assist_msg = resp.get("data", {}).get("choices", [{"role": "error"}])[-1] assert assist_msg["role"] == "assistant" return assist_msg.get("content") @@ -121,16 +121,15 @@ class ZhiPuAIGPTAPI(BaseGPTAPI): self._update_costs(usage) full_content = "".join(collected_content) - logger.info(f"full_content: {full_content} !!") return full_content - # @retry( - # stop=stop_after_attempt(3), - # wait=wait_fixed(1), - # after=after_log(logger, logger.level("WARNING").name), - # retry=retry_if_exception_type(ConnectionError), - # retry_error_callback=log_and_reraise - # ) + @retry( + stop=stop_after_attempt(3), + wait=wait_fixed(1), + after=after_log(logger, logger.level("WARNING").name), + retry=retry_if_exception_type(ConnectionError), + retry_error_callback=log_and_reraise + ) async def acompletion_text(self, messages: list[dict], stream=False) -> str: """ response in async with stream or non-stream mode """ if stream: