From 9ce0182fab54dcfc562925e8defe25f466d2a6e4 Mon Sep 17 00:00:00 2001 From: shenchucheng Date: Fri, 5 Jan 2024 16:50:03 +0800 Subject: [PATCH] Log newline character after receiving llm stream response --- metagpt/provider/google_gemini_api.py | 1 + metagpt/provider/ollama_api.py | 1 + metagpt/provider/openai_api.py | 1 + metagpt/provider/zhipuai_api.py | 1 + 4 files changed, 4 insertions(+) diff --git a/metagpt/provider/google_gemini_api.py b/metagpt/provider/google_gemini_api.py index 795687773..c36c677ef 100644 --- a/metagpt/provider/google_gemini_api.py +++ b/metagpt/provider/google_gemini_api.py @@ -120,6 +120,7 @@ class GeminiLLM(BaseLLM): content = chunk.text log_llm_stream(content) collected_content.append(content) + log_llm_stream("\n") full_content = "".join(collected_content) usage = await self.aget_usage(messages, full_content) diff --git a/metagpt/provider/ollama_api.py b/metagpt/provider/ollama_api.py index 8ee04de7d..25086737f 100644 --- a/metagpt/provider/ollama_api.py +++ b/metagpt/provider/ollama_api.py @@ -119,6 +119,7 @@ class OllamaLLM(BaseLLM): else: # stream finished usage = self.get_usage(chunk) + log_llm_stream("\n") self._update_costs(usage) full_content = "".join(collected_content) diff --git a/metagpt/provider/openai_api.py b/metagpt/provider/openai_api.py index 20dde9ea5..747e36480 100644 --- a/metagpt/provider/openai_api.py +++ b/metagpt/provider/openai_api.py @@ -134,6 +134,7 @@ class OpenAILLM(BaseLLM): async for i in resp: log_llm_stream(i) collected_messages.append(i) + log_llm_stream("\n") full_reply_content = "".join(collected_messages) usage = self._calc_usage(messages, full_reply_content) diff --git a/metagpt/provider/zhipuai_api.py b/metagpt/provider/zhipuai_api.py index 865b7fce1..e1ccf0de5 100644 --- a/metagpt/provider/zhipuai_api.py +++ b/metagpt/provider/zhipuai_api.py @@ -118,6 +118,7 @@ class ZhiPuAILLM(BaseLLM): usage = meta.get("usage") else: print(f"zhipuapi else event: {event.data}", end="") + log_llm_stream("\n") self._update_costs(usage) full_content = "".join(collected_content)