add llm stream log

This commit is contained in:
shenchucheng 2023-12-23 22:45:20 +08:00
parent 7671935741
commit 0eef8a8607
5 changed files with 21 additions and 8 deletions

View file

@ -20,7 +20,7 @@ from tenacity import (
)
from metagpt.config import CONFIG, LLMProviderEnum
from metagpt.logs import logger
from metagpt.logs import log_llm_stream, logger
from metagpt.provider.base_gpt_api import BaseGPTAPI
from metagpt.provider.llm_provider_registry import register_provider
from metagpt.provider.openai_api import log_and_reraise
@ -121,7 +121,7 @@ class GeminiGPTAPI(BaseGPTAPI):
collected_content = []
async for chunk in resp:
content = chunk.text
print(content, end="")
log_llm_stream(content, end="")
collected_content.append(content)
full_content = "".join(collected_content)

View file

@ -15,7 +15,7 @@ from tenacity import (
from metagpt.config import CONFIG, LLMProviderEnum
from metagpt.const import LLM_API_TIMEOUT
from metagpt.logs import logger
from metagpt.logs import log_llm_stream, logger
from metagpt.provider.base_gpt_api import BaseGPTAPI
from metagpt.provider.general_api_requestor import GeneralAPIRequestor
from metagpt.provider.llm_provider_registry import register_provider
@ -131,7 +131,7 @@ class OllamaGPTAPI(BaseGPTAPI):
if not chunk.get("done", False):
content = self.get_choice_text(chunk)
collected_content.append(content)
print(content, end="")
log_llm_stream(content, end="")
else:
# stream finished
usage = self.get_usage(chunk)

View file

@ -29,7 +29,7 @@ from tenacity import (
from metagpt.config import CONFIG, Config, LLMProviderEnum
from metagpt.const import DEFAULT_MAX_TOKENS, DEFAULT_TOKEN_SIZE
from metagpt.logs import logger
from metagpt.logs import log_llm_stream, logger
from metagpt.provider.base_gpt_api import BaseGPTAPI
from metagpt.provider.constant import GENERAL_FUNCTION_SCHEMA, GENERAL_TOOL_CHOICE
from metagpt.provider.llm_provider_registry import register_provider
@ -180,7 +180,7 @@ class OpenAIGPTAPI(BaseGPTAPI, RateLimiter):
collected_messages = []
async for i in resp:
print(i, end="")
log_llm_stream(i)
collected_messages.append(i)
full_reply_content = "".join(collected_messages)

View file

@ -16,7 +16,7 @@ from tenacity import (
)
from metagpt.config import CONFIG, LLMProviderEnum
from metagpt.logs import logger
from metagpt.logs import log_llm_stream, logger
from metagpt.provider.base_gpt_api import BaseGPTAPI
from metagpt.provider.llm_provider_registry import register_provider
from metagpt.provider.openai_api import log_and_reraise
@ -96,7 +96,7 @@ class ZhiPuAIGPTAPI(BaseGPTAPI):
if event.event == ZhiPuEvent.ADD.value:
content = event.data
collected_content.append(content)
print(content, end="")
log_llm_stream(content)
elif event.event == ZhiPuEvent.ERROR.value or event.event == ZhiPuEvent.INTERRUPTED.value:
content = event.data
logger.error(f"event error: {content}", end="")