Merge pull request #156 from martcpp/main

English comment translation
This commit is contained in:
stellaHSR 2023-08-09 11:19:31 +08:00 committed by GitHub
commit 0be49113a0
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 39 additions and 12 deletions

View file

@ -16,7 +16,10 @@ from metagpt.schema import Message
class Environment(BaseModel):
"""环境,承载一批角色,角色可以向环境发布消息,可以被其他角色观察到"""
"""环境,承载一批角色,角色可以向环境发布消息,可以被其他角色观察到
Environment, hosting a batch of roles, roles can publish messages to the environment, and can be observed by other roles
"""
roles: dict[str, Role] = Field(default_factory=dict)
memory: Memory = Field(default_factory=Memory)
@ -26,23 +29,31 @@ class Environment(BaseModel):
arbitrary_types_allowed = True
def add_role(self, role: Role):
"""增加一个在当前环境的Role"""
"""增加一个在当前环境的角色
Add a role in the current environment
"""
role.set_env(self)
self.roles[role.profile] = role
def add_roles(self, roles: Iterable[Role]):
"""增加一批在当前环境的Role"""
"""增加一批在当前环境的角色
Add a batch of characters in the current environment
"""
for role in roles:
self.add_role(role)
def publish_message(self, message: Message):
"""向当前环境发布信息"""
"""向当前环境发布信息
Post information to the current environment
"""
# self.message_queue.put(message)
self.memory.add(message)
self.history += f"\n{message}"
async def run(self, k=1):
"""处理一次所有Role的运行"""
"""处理一次所有信息的运行
Process all Role runs at once
"""
# while not self.message_queue.empty():
# message = self.message_queue.get()
# rsp = await self.manager.handle(message, self)
@ -56,9 +67,13 @@ class Environment(BaseModel):
await asyncio.gather(*futures)
def get_roles(self) -> dict[str, Role]:
"""获得环境内的所有Role"""
"""获得环境内的所有角色
Process all Role runs at once
"""
return self.roles
def get_role(self, name: str) -> Role:
"""获得环境内的指定Role"""
"""获得环境内的指定角色
get all the environment roles
"""
return self.roles.get(name, None)

View file

@ -14,5 +14,7 @@ CLAUDE_LLM = Claude()
async def ai_func(prompt):
"""使用LLM进行QA"""
"""使用LLM进行QA
QA with LLMs
"""
return await DEFAULT_LLM.aask(prompt)

View file

@ -14,7 +14,9 @@ from metagpt.const import PROJECT_ROOT
def define_log_level(print_level="INFO", logfile_level="DEBUG"):
"""调整日志级别到level之上"""
"""调整日志级别到level之上
Adjust the log level to above level
"""
_logger.remove()
_logger.add(sys.stderr, level=print_level)
_logger.add(PROJECT_ROOT / 'logs/log.txt', level=logfile_level)

View file

@ -33,6 +33,7 @@ class Manager:
async def handle(self, message: Message, environment):
"""
管理员处理信息现在简单的将信息递交给下一个人
The administrator processes the information, now simply passes the information on to the next person
:param message:
:param environment:
:return:
@ -50,6 +51,7 @@ class Manager:
# chosen_role_name = self.llm.ask(self.prompt_template.format(context))
# FIXME: 现在通过简单的字典决定流向,但之后还是应该有思考过程
#The direction of flow is now determined by a simple dictionary, but there should still be a thought process afterwards
next_role_profile = self.role_directions[message.role]
# logger.debug(f"{next_role_profile}")
for _, role in roles.items():

View file

@ -46,21 +46,27 @@ class Message:
@dataclass
class UserMessage(Message):
"""便于支持OpenAI的消息"""
"""便于支持OpenAI的消息
Facilitate support for OpenAI messages
"""
def __init__(self, content: str):
super().__init__(content, 'user')
@dataclass
class SystemMessage(Message):
"""便于支持OpenAI的消息"""
"""便于支持OpenAI的消息
Facilitate support for OpenAI messages
"""
def __init__(self, content: str):
super().__init__(content, 'system')
@dataclass
class AIMessage(Message):
"""便于支持OpenAI的消息"""
"""便于支持OpenAI的消息
Facilitate support for OpenAI messages
"""
def __init__(self, content: str):
super().__init__(content, 'assistant')