Merge branch 'fixbug/issues/1016' into HEAD

This commit is contained in:
莘权 马 2024-03-20 17:46:48 +08:00
commit a6f31bf3e6
16 changed files with 178 additions and 93 deletions

View file

@ -802,29 +802,6 @@ def decode_image(img_url_or_b64: str) -> Image:
return img
def process_message(messages: Union[str, Message, list[dict], list[Message], list[str]]) -> list[dict]:
"""convert messages to list[dict]."""
from metagpt.schema import Message
# 全部转成list
if not isinstance(messages, list):
messages = [messages]
# 转成list[dict]
processed_messages = []
for msg in messages:
if isinstance(msg, str):
processed_messages.append({"role": "user", "content": msg})
elif isinstance(msg, dict):
assert set(msg.keys()) == set(["role", "content"])
processed_messages.append(msg)
elif isinstance(msg, Message):
processed_messages.append(msg.to_dict())
else:
raise ValueError(f"Only support message type are: str, Message, dict, but got {type(messages).__name__}!")
return processed_messages
def log_and_reraise(retry_state: RetryCallState):
logger.error(f"Retry attempts exhausted. Last exception: {retry_state.outcome.exception()}")
logger.warning(

View file

@ -229,7 +229,7 @@ def count_message_tokens(messages, model="gpt-3.5-turbo-0125"):
else:
raise NotImplementedError(
f"num_tokens_from_messages() is not implemented for model {model}. "
f"See https://github.com/openai/openai-python/blob/main/chatml.md "
f"See https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken "
f"for information on how messages are converted to tokens."
)
num_tokens = 0