Merge branch 'feat-memory-opt' into 'mgx_ops'

make the longterm-memory path configurable

See merge request pub/MetaGPT!388
This commit is contained in:
王金淋 2024-09-29 07:09:39 +00:00
commit f8e4e3a320
4 changed files with 39 additions and 30 deletions

View file

@ -85,6 +85,7 @@ exp_pool:
role_zero:
enable_longterm_memory: false # Whether to use long-term memory. Default is `false`.
longterm_memory_persist_path: .role_memory_data # The directory to save data.
azure_tts_subscription_key: "YOUR_SUBSCRIPTION_KEY"
azure_tts_region: "eastus"

View file

@ -5,3 +5,4 @@ from metagpt.utils.yaml_model import YamlModel
class RoleZeroConfig(YamlModel):
enable_longterm_memory: bool = Field(default=False, description="Whether to use long-term memory.")
longterm_memory_persist_path: str = Field(default=".role_memory_data", description="The directory to save data.")

View file

@ -114,23 +114,56 @@ class RoleZeroLongTermMemory(Memory):
item = self._get_longterm_memory_item()
self._add_to_longterm_memory(item)
@handle_exception
def _get_longterm_memory_item(self) -> Optional[LongTermMemoryItem]:
"""Retrieves the most recent message before the last k messages."""
index = -(self.memory_k + 1)
message = self.get_by_position(index)
return LongTermMemoryItem(message=message)
return LongTermMemoryItem(message=message) if message else None
@handle_exception
def _add_to_longterm_memory(self, item: LongTermMemoryItem):
"""Adds a long-term memory item to the RAG engine."""
"""Adds a long-term memory item to the RAG engine.
If adding long-term memory fails, it will only log the error without interrupting program execution.
"""
if not item:
return
self.rag_engine.add_objs([item])
@handle_exception(default_return=[])
def _fetch_longterm_memories(self, query: str) -> list[Message]:
"""Fetches long-term memories based on a query.
If fetching long-term memories fails, it will return the default value (an empty list) without interrupting program execution.
Args:
query (str): The query string to search for relevant memories.
Returns:
list[Message]: A list of user and AI messages related to the query.
"""
if not query:
return []
nodes = self.rag_engine.retrieve(query)
items = self._get_items_from_nodes(nodes)
memories = [item.message for item in items]
return memories
def _get_items_from_nodes(self, nodes: list["NodeWithScore"]) -> list[LongTermMemoryItem]:
"""Get items from nodes and arrange them in order of their `created_at`."""
items: list[LongTermMemoryItem] = [node.metadata["obj"] for node in nodes]
items.sort(key=lambda item: item.created_at)
return items
def _build_longterm_memory_query(self) -> str:
"""Build the content used to query related long-term memory.
@ -160,30 +193,3 @@ class RoleZeroLongTermMemory(Memory):
sent_from_team_leader = message.sent_from == TEAMLEADER_NAME
return is_user_message and (cause_by_user_requirement or sent_from_team_leader)
def _fetch_longterm_memories(self, query: str) -> list[Message]:
"""Fetches long-term memories based on a query.
Args:
query (str): The query string to search for relevant memories.
Returns:
list[Message]: A list of user and AI messages related to the query.
"""
if not query:
return []
nodes = self.rag_engine.retrieve(query)
items = self._get_items_from_nodes(nodes)
memories = [item.message for item in items]
return memories
def _get_items_from_nodes(self, nodes: list["NodeWithScore"]) -> list[LongTermMemoryItem]:
"""Get items from nodes and arrange them in order of their `created_at`."""
items: list[LongTermMemoryItem] = [node.metadata["obj"] for node in nodes]
items.sort(key=lambda item: item.created_at)
return items

View file

@ -181,6 +181,7 @@ class RoleZero(Role):
if self.config.role_zero.enable_longterm_memory:
self.rc.memory = RoleZeroLongTermMemory(
**self.rc.memory.model_dump(),
persist_path=self.config.role_zero.longterm_memory_persist_path,
collection_name=self.name.replace(" ", ""),
memory_k=self.memory_k,
)