From cc774db7b31eba440f16b077741da53c4480cf4f Mon Sep 17 00:00:00 2001 From: better629 Date: Sat, 1 Mar 2025 18:00:30 +0800 Subject: [PATCH] simplify code --- examples/hello_world.py | 4 +++- metagpt/provider/openai_api.py | 26 +++++++++++++------------- metagpt/utils/cost_manager.py | 2 +- requirements.txt | 4 ++-- 4 files changed, 19 insertions(+), 17 deletions(-) diff --git a/examples/hello_world.py b/examples/hello_world.py index 04bb88091..5c1082353 100644 --- a/examples/hello_world.py +++ b/examples/hello_world.py @@ -13,7 +13,9 @@ from metagpt.logs import logger async def ask_and_print(question: str, llm: LLM, system_prompt) -> str: logger.info(f"Q: {question}") - rsp = await llm.aask(question, system_msgs=[system_prompt]) + rsp = await llm.aask(question, system_msgs=[system_prompt], stream=True) + if llm.reasoning_content: + logger.info(f"A reasoning: {llm.reasoning_content}") logger.info(f"A: {rsp}") return rsp diff --git a/metagpt/provider/openai_api.py b/metagpt/provider/openai_api.py index 5e718f45f..2a237f655 100644 --- a/metagpt/provider/openai_api.py +++ b/metagpt/provider/openai_api.py @@ -79,7 +79,7 @@ class OpenAILLM(BaseLLM): def _get_proxy_params(self) -> dict: params = {} if self.config.proxy: - params = {"proxies": self.config.proxy} + params = {"proxy": self.config.proxy} if self.config.base_url: params["base_url"] = self.config.base_url @@ -94,13 +94,16 @@ class OpenAILLM(BaseLLM): collected_reasoning_messages = [] has_finished = False async for chunk in response: - if hasattr(chunk.choices[0].delta, "reasoning_content"): - collected_reasoning_messages.append(chunk.choices[0].delta.reasoning_content) # for deepseek + if not chunk.choices: continue - chunk_message = chunk.choices[0].delta.content or "" if chunk.choices else "" # extract the message - finish_reason = ( - chunk.choices[0].finish_reason if chunk.choices and hasattr(chunk.choices[0], "finish_reason") else None - ) + + choice0 = chunk.choices[0] + choice_delta = choice0.delta + if hasattr(choice_delta, "reasoning_content") and choice_delta.reasoning_content: + collected_reasoning_messages.append(choice_delta.reasoning_content) # for deepseek + continue + chunk_message = choice_delta.content or "" # extract the message + finish_reason = choice0.finish_reason if hasattr(choice0, "finish_reason") else None log_llm_stream(chunk_message) collected_messages.append(chunk_message) chunk_has_usage = hasattr(chunk, "usage") and chunk.usage @@ -111,13 +114,10 @@ class OpenAILLM(BaseLLM): if finish_reason: if chunk_has_usage: # Some services have usage as an attribute of the chunk, such as Fireworks - if isinstance(chunk.usage, CompletionUsage): - usage = chunk.usage - else: - usage = CompletionUsage(**chunk.usage) - elif hasattr(chunk.choices[0], "usage"): + usage = CompletionUsage(**chunk.usage) if isinstance(chunk.usage, dict) else chunk.usage + elif hasattr(choice0, "usage"): # The usage of some services is an attribute of chunk.choices[0], such as Moonshot - usage = CompletionUsage(**chunk.choices[0].usage) + usage = CompletionUsage(**choice0.usage) has_finished = True log_llm_stream("\n") diff --git a/metagpt/utils/cost_manager.py b/metagpt/utils/cost_manager.py index 7149ab94e..05df1e09a 100644 --- a/metagpt/utils/cost_manager.py +++ b/metagpt/utils/cost_manager.py @@ -144,6 +144,6 @@ class FireworksCostManager(CostManager): cost = (prompt_tokens * token_costs["prompt"] + completion_tokens * token_costs["completion"]) / 1000000 self.total_cost += cost logger.info( - f"Total running cost: ${self.total_cost:.4f}" + f"Total running cost: ${self.total_cost:.4f}, " f"Current cost: ${cost:.4f}, prompt_tokens: {prompt_tokens}, completion_tokens: {completion_tokens}" ) diff --git a/requirements.txt b/requirements.txt index ffdaa21fb..ede38907c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -13,7 +13,7 @@ lancedb==0.4.0 loguru==0.6.0 meilisearch==0.21.0 numpy~=1.26.4 -openai~=1.39.0 +openai~=1.64.0 openpyxl~=3.1.5 beautifulsoup4==4.12.3 pandas==2.1.1 @@ -59,7 +59,7 @@ nbformat==5.9.2 ipython==8.17.2 ipykernel==6.27.1 scikit_learn==1.3.2 -typing-extensions==4.9.0 +typing-extensions==4.11.0 socksio~=1.0.0 gitignore-parser==0.1.9 # connexion[uvicorn]~=3.0.5 # Used by metagpt/tools/openapi_v3_hello.py