diff --git a/metagpt/logs.py b/metagpt/logs.py index ab1bc4e94..fb0fdd553 100644 --- a/metagpt/logs.py +++ b/metagpt/logs.py @@ -8,6 +8,7 @@ import sys from datetime import datetime +from functools import partial from loguru import logger as _logger @@ -26,3 +27,15 @@ def define_log_level(print_level="INFO", logfile_level="DEBUG"): logger = define_log_level() + + +def log_llm_stream(msg): + _llm_stream_log(msg) + + +def set_llm_stream_logfunc(func): + global _llm_stream_log + _llm_stream_log = func + + +_llm_stream_log = partial(print, end="") diff --git a/metagpt/provider/google_gemini_api.py b/metagpt/provider/google_gemini_api.py index e9d3ea70d..3cfd426d5 100644 --- a/metagpt/provider/google_gemini_api.py +++ b/metagpt/provider/google_gemini_api.py @@ -20,7 +20,7 @@ from tenacity import ( ) from metagpt.config import CONFIG, LLMProviderEnum -from metagpt.logs import logger +from metagpt.logs import log_llm_stream, logger from metagpt.provider.base_gpt_api import BaseGPTAPI from metagpt.provider.llm_provider_registry import register_provider from metagpt.provider.openai_api import log_and_reraise @@ -121,7 +121,7 @@ class GeminiGPTAPI(BaseGPTAPI): collected_content = [] async for chunk in resp: content = chunk.text - print(content, end="") + log_llm_stream(content, end="") collected_content.append(content) full_content = "".join(collected_content) diff --git a/metagpt/provider/ollama_api.py b/metagpt/provider/ollama_api.py index 7d858e769..c12edbd61 100644 --- a/metagpt/provider/ollama_api.py +++ b/metagpt/provider/ollama_api.py @@ -15,7 +15,7 @@ from tenacity import ( from metagpt.config import CONFIG, LLMProviderEnum from metagpt.const import LLM_API_TIMEOUT -from metagpt.logs import logger +from metagpt.logs import log_llm_stream, logger from metagpt.provider.base_gpt_api import BaseGPTAPI from metagpt.provider.general_api_requestor import GeneralAPIRequestor from metagpt.provider.llm_provider_registry import register_provider @@ -131,7 +131,7 @@ class OllamaGPTAPI(BaseGPTAPI): if not chunk.get("done", False): content = self.get_choice_text(chunk) collected_content.append(content) - print(content, end="") + log_llm_stream(content, end="") else: # stream finished usage = self.get_usage(chunk) diff --git a/metagpt/provider/openai_api.py b/metagpt/provider/openai_api.py index 1d2cdb591..195d2ea16 100644 --- a/metagpt/provider/openai_api.py +++ b/metagpt/provider/openai_api.py @@ -29,7 +29,7 @@ from tenacity import ( from metagpt.config import CONFIG, Config, LLMProviderEnum from metagpt.const import DEFAULT_MAX_TOKENS, DEFAULT_TOKEN_SIZE -from metagpt.logs import logger +from metagpt.logs import log_llm_stream, logger from metagpt.provider.base_gpt_api import BaseGPTAPI from metagpt.provider.constant import GENERAL_FUNCTION_SCHEMA, GENERAL_TOOL_CHOICE from metagpt.provider.llm_provider_registry import register_provider @@ -180,7 +180,7 @@ class OpenAIGPTAPI(BaseGPTAPI, RateLimiter): collected_messages = [] async for i in resp: - print(i, end="") + log_llm_stream(i) collected_messages.append(i) full_reply_content = "".join(collected_messages) diff --git a/metagpt/provider/zhipuai_api.py b/metagpt/provider/zhipuai_api.py index 0d5663431..8d57cd444 100644 --- a/metagpt/provider/zhipuai_api.py +++ b/metagpt/provider/zhipuai_api.py @@ -16,7 +16,7 @@ from tenacity import ( ) from metagpt.config import CONFIG, LLMProviderEnum -from metagpt.logs import logger +from metagpt.logs import log_llm_stream, logger from metagpt.provider.base_gpt_api import BaseGPTAPI from metagpt.provider.llm_provider_registry import register_provider from metagpt.provider.openai_api import log_and_reraise @@ -96,7 +96,7 @@ class ZhiPuAIGPTAPI(BaseGPTAPI): if event.event == ZhiPuEvent.ADD.value: content = event.data collected_content.append(content) - print(content, end="") + log_llm_stream(content) elif event.event == ZhiPuEvent.ERROR.value or event.event == ZhiPuEvent.INTERRUPTED.value: content = event.data logger.error(f"event error: {content}", end="")