Merge branch 'main' into llm_mock

This commit is contained in:
yzlin 2024-01-05 15:28:03 +08:00
commit a3dc6aa7e3
21 changed files with 237 additions and 56 deletions

View file

@ -34,7 +34,10 @@ # MetaGPT: The Multi-Agent Framework
<p align="center">Software Company Multi-Role Schematic (Gradually Implementing)</p>
## News
- Dec 15: [v0.5.0](https://github.com/geekan/MetaGPT/releases/tag/v0.5.0) is released! We introduce **incremental development**, facilitating agents to build up larger projects on top of their previous efforts or existing codebase. We also launch a whole collection of important features, including **multilingual support** (experimental), multiple **programming languages support** (experimental), **incremental development** (experimental), CLI support, pip support, enhanced code review, documentation mechanism, and optimized messaging mechanism!
🚀 Jan 03: Here comes [v0.6.0](https://github.com/geekan/MetaGPT/releases/tag/v0.6.0)! In this version, we added serialization and deserialization of important objects and enabled breakpoint recovery. We upgraded OpenAI package to v1.6.0 and supported Gemini, ZhipuAI, Ollama, OpenLLM, etc. Moreover, we provided extremely simple examples where you need only 7 lines to implement a general election [debate](https://github.com/geekan/MetaGPT/blob/main/examples/debate_simple.py). Check out more details [here](https://github.com/geekan/MetaGPT/releases/tag/v0.6.0)!
🚀 Dec 15: [v0.5.0](https://github.com/geekan/MetaGPT/releases/tag/v0.5.0) is released! We introduced **incremental development**, facilitating agents to build up larger projects on top of their previous efforts or existing codebase. We also launched a whole collection of important features, including **multilingual support** (experimental), multiple **programming languages support** (experimental), **incremental development** (experimental), CLI support, pip support, enhanced code review, documentation mechanism, and optimized messaging mechanism!
## Install

View file

@ -15,6 +15,7 @@ OPENAI_API_MODEL: "gpt-4-1106-preview"
MAX_TOKENS: 4096
RPM: 10
TIMEOUT: 60 # Timeout for llm invocation
#DEFAULT_PROVIDER: openai
#### if Spark
#SPARK_APPID : "YOUR_APPID"

View file

@ -1,7 +1,7 @@
role:
name: Teacher # Referenced the `Teacher` in `metagpt/roles/teacher.py`.
module: metagpt.roles.teacher # Referenced `metagpt/roles/teacher.py`.
skills: # Refer to the skill `name` of the published skill in `.well-known/skills.yaml`.
skills: # Refer to the skill `name` of the published skill in `docs/.well-known/skills.yaml`.
- name: text_to_speech
description: Text-to-speech
- name: text_to_image

View file

@ -35,7 +35,6 @@ class PrepareDocuments(Action):
if path.exists() and not CONFIG.inc:
shutil.rmtree(path)
CONFIG.project_path = path
CONFIG.project_name = path.name
CONFIG.git_repo = GitRepository(local_path=path, auto_init=True)
async def run(self, with_messages, **kwargs):

View file

@ -33,11 +33,16 @@ class RebuildClassView(Action):
graph_repo_pathname = CONFIG.git_repo.workdir / GRAPH_REPO_FILE_REPO / CONFIG.git_repo.workdir.name
graph_db = await DiGraphRepository.load_from(str(graph_repo_pathname.with_suffix(".json")))
repo_parser = RepoParser(base_directory=Path(self.context))
class_views, relationship_views = await repo_parser.rebuild_class_views(path=Path(self.context)) # use pylint
# use pylint
class_views, relationship_views, package_root = await repo_parser.rebuild_class_views(path=Path(self.context))
await GraphRepository.update_graph_db_with_class_views(graph_db, class_views)
await GraphRepository.update_graph_db_with_class_relationship_views(graph_db, relationship_views)
symbols = repo_parser.generate_symbols() # use ast
# use ast
direction, diff_path = self._diff_path(path_root=Path(self.context).resolve(), package_root=package_root)
symbols = repo_parser.generate_symbols()
for file_info in symbols:
# Align to the same root directory in accordance with `class_views`.
file_info.file = self._align_root(file_info.file, direction, diff_path)
await GraphRepository.update_graph_db_with_file_info(graph_db, file_info)
await self._create_mermaid_class_views(graph_db=graph_db)
await graph_db.save()
@ -193,3 +198,20 @@ class RebuildClassView(Action):
method.args.append(ClassAttribute(name=parts[0].strip()))
continue
method.args.append(ClassAttribute(name=parts[0].strip(), value_type=parts[-1].strip()))
@staticmethod
def _diff_path(path_root: Path, package_root: Path) -> (str, str):
if len(str(path_root)) > len(str(package_root)):
return "+", str(path_root.relative_to(package_root))
if len(str(path_root)) < len(str(package_root)):
return "-", str(package_root.relative_to(path_root))
return "=", "."
@staticmethod
def _align_root(path: str, direction: str, diff_path: str):
if direction == "=":
return path
if direction == "+":
return diff_path + "/" + path
else:
return path[len(diff_path) + 1 :]

View file

@ -1,33 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@Time : 2023/12/19
@Author : mashenquan
@File : rebuild_class_view_an.py
@Desc : Defines `ActionNode` objects used by rebuild_class_view.py
"""
from metagpt.actions.action_node import ActionNode
CLASS_SOURCE_CODE_BLOCK = ActionNode(
key="Class View",
expected_type=str,
instruction='Generate the mermaid class diagram corresponding to source code in "context."',
example="""
classDiagram
class A {
-int x
+int y
-int speed
-int direction
+__init__(x: int, y: int, speed: int, direction: int)
+change_direction(new_direction: int) None
+move() None
}
""",
)
REBUILD_CLASS_VIEW_NODES = [
CLASS_SOURCE_CODE_BLOCK,
]
REBUILD_CLASS_VIEW_NODE = ActionNode.from_children("RebuildClassView", REBUILD_CLASS_VIEW_NODES)

View file

@ -0,0 +1,60 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@Time : 2024/1/4
@Author : mashenquan
@File : rebuild_sequence_view.py
@Desc : Rebuild sequence view info
"""
from __future__ import annotations
from pathlib import Path
from typing import List
from metagpt.actions import Action
from metagpt.config import CONFIG
from metagpt.const import GRAPH_REPO_FILE_REPO
from metagpt.logs import logger
from metagpt.utils.common import aread, list_files
from metagpt.utils.di_graph_repository import DiGraphRepository
from metagpt.utils.graph_repository import GraphKeyword
class RebuildSequenceView(Action):
async def run(self, with_messages=None, format=CONFIG.prompt_schema):
graph_repo_pathname = CONFIG.git_repo.workdir / GRAPH_REPO_FILE_REPO / CONFIG.git_repo.workdir.name
graph_db = await DiGraphRepository.load_from(str(graph_repo_pathname.with_suffix(".json")))
entries = await RebuildSequenceView._search_main_entry(graph_db)
for entry in entries:
await self._rebuild_sequence_view(entry, graph_db)
await graph_db.save()
@staticmethod
async def _search_main_entry(graph_db) -> List:
rows = await graph_db.select(predicate=GraphKeyword.HAS_PAGE_INFO)
tag = "__name__:__main__"
entries = []
for r in rows:
if tag in r.subject or tag in r.object_:
entries.append(r)
return entries
async def _rebuild_sequence_view(self, entry, graph_db):
filename = entry.subject.split(":", 1)[0]
src_filename = RebuildSequenceView._get_full_filename(root=self.context, pathname=filename)
content = await aread(filename=src_filename, encoding="utf-8")
content = f"```python\n{content}\n```\n\n---\nTranslate the code above into Mermaid Sequence Diagram."
data = await self.llm.aask(
msg=content, system_msgs=["You are a python code to Mermaid Sequence Diagram translator in function detail"]
)
await graph_db.insert(subject=filename, predicate=GraphKeyword.HAS_SEQUENCE_VIEW, object_=data)
logger.info(data)
@staticmethod
def _get_full_filename(root: str | Path, pathname: str | Path) -> Path | None:
files = list_files(root=root)
postfix = "/" + str(pathname)
for i in files:
if str(i).endswith(postfix):
return i
return None

View file

@ -0,0 +1,16 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@Time : 2024/1/4
@Author : mashenquan
@File : rebuild_sequence_view_an.py
"""
from metagpt.actions.action_node import ActionNode
from metagpt.utils.mermaid import MMC2
CODE_2_MERMAID_SEQUENCE_DIAGRAM = ActionNode(
key="Program call flow",
expected_type=str,
instruction='Translate the "context" content into "format example" format.',
example=MMC2,
)

View file

@ -14,6 +14,7 @@
from __future__ import annotations
import json
import uuid
from pathlib import Path
from typing import Optional
@ -117,7 +118,7 @@ class WritePRD(Action):
# if sas.result:
# logger.info(sas.result)
# logger.info(rsp)
project_name = CONFIG.project_name if CONFIG.project_name else ""
project_name = CONFIG.project_name or ""
context = CONTEXT_TEMPLATE.format(requirements=requirements, project_name=project_name)
exclude = [PROJECT_NAME.key] if project_name else []
node = await WRITE_PRD_NODE.fill(context=context, llm=self.llm, exclude=exclude) # schema=schema
@ -183,6 +184,8 @@ class WritePRD(Action):
ws_name = CodeParser.parse_str(block="Project Name", text=prd)
if ws_name:
CONFIG.project_name = ws_name
if not CONFIG.project_name: # The LLM failed to provide a project name, and the user didn't provide one either.
CONFIG.project_name = "app" + uuid.uuid4().hex[:16]
CONFIG.git_repo.rename_root(CONFIG.project_name)
async def _is_bugfix(self, context) -> bool:

View file

@ -11,7 +11,6 @@
from typing import Optional
from metagpt.actions.action import Action
from metagpt.config import CONFIG
from metagpt.const import TEST_CODES_FILE_REPO
from metagpt.logs import logger
from metagpt.schema import Document, TestingContext
@ -60,11 +59,12 @@ class WriteTest(Action):
self.context.test_doc = Document(
filename="test_" + self.context.code_doc.filename, root_path=TEST_CODES_FILE_REPO
)
fake_root = "/data"
prompt = PROMPT_TEMPLATE.format(
code_to_test=self.context.code_doc.content,
test_file_name=self.context.test_doc.filename,
source_file_path=self.context.code_doc.root_relative_path,
workspace=CONFIG.git_repo.workdir,
source_file_path=fake_root + "/" + self.context.code_doc.root_relative_path,
workspace=fake_root,
)
self.context.test_doc.content = await self.write_code(prompt)
return self.context

View file

@ -50,6 +50,9 @@ class LLMProviderEnum(Enum):
AZURE_OPENAI = "azure_openai"
OLLAMA = "ollama"
def __missing__(self, key):
return self.OPENAI
class Config(metaclass=Singleton):
"""
@ -108,6 +111,11 @@ class Config(metaclass=Singleton):
if v:
provider = k
break
if provider is None:
if self.DEFAULT_PROVIDER:
provider = LLMProviderEnum(self.DEFAULT_PROVIDER)
else:
raise NotConfiguredException("You should config a LLM configuration first")
if provider is LLMProviderEnum.GEMINI and not require_python_version(req_version=(3, 10)):
warnings.warn("Use Gemini requires Python >= 3.10")
@ -117,7 +125,6 @@ class Config(metaclass=Singleton):
if provider:
logger.info(f"API: {provider}")
return provider
raise NotConfiguredException("You should config a LLM configuration first")
def get_model_name(self, provider=None) -> str:
provider = provider or self.get_default_llm_provider_enum()

View file

@ -67,7 +67,7 @@ class SkillsDeclaration(BaseModel):
@staticmethod
async def load(skill_yaml_file_name: Path = None) -> "SkillsDeclaration":
if not skill_yaml_file_name:
skill_yaml_file_name = Path(__file__).parent.parent.parent / ".well-known/skills.yaml"
skill_yaml_file_name = Path(__file__).parent.parent.parent / "docs/.well-known/skills.yaml"
async with aiofiles.open(str(skill_yaml_file_name), mode="r") as reader:
data = await reader.read(-1)
skill_data = yaml.safe_load(data)

View file

@ -240,12 +240,12 @@ class RepoParser(BaseModel):
class_views = await self._parse_classes(class_view_pathname)
relationship_views = await self._parse_class_relationships(class_view_pathname)
packages_pathname = path / "packages.dot"
class_views, relationship_views = RepoParser._repair_namespaces(
class_views, relationship_views, package_root = RepoParser._repair_namespaces(
class_views=class_views, relationship_views=relationship_views, path=path
)
class_view_pathname.unlink(missing_ok=True)
packages_pathname.unlink(missing_ok=True)
return class_views, relationship_views
return class_views, relationship_views, package_root
async def _parse_classes(self, class_view_pathname):
class_views = []
@ -364,9 +364,9 @@ class RepoParser(BaseModel):
@staticmethod
def _repair_namespaces(
class_views: List[ClassInfo], relationship_views: List[ClassRelationship], path: str | Path
) -> (List[ClassInfo], List[ClassRelationship]):
) -> (List[ClassInfo], List[ClassRelationship], str):
if not class_views:
return []
return [], [], ""
c = class_views[0]
full_key = str(path).lstrip("/").replace("/", ".")
root_namespace = RepoParser._find_root(full_key, c.package)
@ -388,7 +388,7 @@ class RepoParser(BaseModel):
v.src = RepoParser._repair_ns(v.src, new_mappings)
v.dest = RepoParser._repair_ns(v.dest, new_mappings)
relationship_views[i] = v
return class_views, relationship_views
return class_views, relationship_views, root_path
@staticmethod
def _repair_ns(package, mappings):

View file

@ -550,3 +550,20 @@ async def read_file_block(filename: str | Path, lineno: int, end_lineno: int):
break
lines.append(line)
return "".join(lines)
def list_files(root: str | Path) -> List[Path]:
files = []
try:
directory_path = Path(root)
if not directory_path.exists():
return []
for file_path in directory_path.iterdir():
if file_path.is_file():
files.append(file_path)
else:
subfolder_files = list_files(root=file_path)
files.extend(subfolder_files)
except Exception as e:
logger.error(f"Error: {e}")
return files

View file

@ -135,12 +135,12 @@ class GraphRepository(ABC):
@staticmethod
async def update_graph_db_with_class_views(graph_db: "GraphRepository", class_views: List[ClassInfo]):
for c in class_views:
filename, class_name = c.package.split(":", 1)
filename, _ = c.package.split(":", 1)
await graph_db.insert(subject=filename, predicate=GraphKeyword.IS, object_=GraphKeyword.SOURCE_CODE)
file_types = {".py": "python", ".js": "javascript"}
file_type = file_types.get(Path(filename).suffix, GraphKeyword.NULL)
await graph_db.insert(subject=filename, predicate=GraphKeyword.IS, object_=file_type)
await graph_db.insert(subject=filename, predicate=GraphKeyword.HAS_CLASS, object_=class_name)
await graph_db.insert(subject=filename, predicate=GraphKeyword.HAS_CLASS, object_=c.package)
await graph_db.insert(
subject=c.package,
predicate=GraphKeyword.IS,

View file

@ -22,7 +22,7 @@ pandas==2.0.3
pydantic==2.5.3
#pygame==2.1.3
#pymilvus==2.2.8
pytest==7.2.2
# pytest==7.2.2 # test extras require
python_docx==0.8.11
PyYAML==6.0.1
# sentence_transformers==2.2.2
@ -38,7 +38,7 @@ typing-inspect==0.8.0
typing_extensions==4.9.0
libcst==1.0.1
qdrant-client==1.7.0
pytest-mock==3.11.1
# pytest-mock==3.11.1 # test extras require
# open-interpreter==0.1.7; python_version>"3.9" # Conflict with openai 1.x
ta==0.10.2
semantic-kernel==0.4.3.dev0
@ -57,5 +57,5 @@ gitignore-parser==0.1.9
websockets~=12.0
networkx~=3.2.1
google-generativeai==0.3.2
playwright==1.40.0
# playwright==1.40.0 # playwright extras require
anytree

View file

@ -57,7 +57,7 @@ extras_require["dev"] = (["pylint~=3.0.3", "black~=23.3.0", "isort~=5.12.0", "pr
setup(
name="metagpt",
version="0.5.2",
version="0.6.0",
description="The Multi-Agent Framework",
long_description=long_description,
long_description_content_type="text/markdown",

File diff suppressed because one or more lines are too long

View file

@ -26,5 +26,32 @@ async def test_rebuild():
assert graph_file_repo.changed_files
@pytest.mark.parametrize(
("path", "direction", "diff", "want"),
[
("metagpt/startup.py", "=", ".", "metagpt/startup.py"),
("metagpt/startup.py", "+", "MetaGPT", "MetaGPT/metagpt/startup.py"),
("metagpt/startup.py", "-", "metagpt", "startup.py"),
],
)
def test_align_path(path, direction, diff, want):
res = RebuildClassView._align_root(path=path, direction=direction, diff_path=diff)
assert res == want
@pytest.mark.parametrize(
("path_root", "package_root", "want_direction", "want_diff"),
[
("/Users/x/github/MetaGPT/metagpt", "/Users/x/github/MetaGPT/metagpt", "=", "."),
("/Users/x/github/MetaGPT", "/Users/x/github/MetaGPT/metagpt", "-", "metagpt"),
("/Users/x/github/MetaGPT/metagpt", "/Users/x/github/MetaGPT", "+", "metagpt"),
],
)
def test_diff_path(path_root, package_root, want_direction, want_diff):
direction, diff = RebuildClassView._diff_path(path_root=Path(path_root), package_root=Path(package_root))
assert direction == want_direction
assert diff == want_diff
if __name__ == "__main__":
pytest.main([__file__, "-s"])

View file

@ -0,0 +1,55 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@Time : 2024/1/4
@Author : mashenquan
@File : test_rebuild_sequence_view.py
"""
from pathlib import Path
import pytest
from metagpt.actions.rebuild_sequence_view import RebuildSequenceView
from metagpt.config import CONFIG
from metagpt.const import GRAPH_REPO_FILE_REPO
from metagpt.llm import LLM
from metagpt.utils.common import aread
from metagpt.utils.file_repository import FileRepository
from metagpt.utils.git_repository import ChangeType
@pytest.mark.asyncio
async def test_rebuild():
# Mock
data = await aread(filename=Path(__file__).parent / "../../data/graph_db/networkx.json")
graph_db_filename = Path(CONFIG.git_repo.workdir.name).with_suffix(".json")
await FileRepository.save_file(
filename=str(graph_db_filename),
relative_path=GRAPH_REPO_FILE_REPO,
content=data,
)
CONFIG.git_repo.add_change({f"{GRAPH_REPO_FILE_REPO}/{graph_db_filename}": ChangeType.UNTRACTED})
CONFIG.git_repo.commit("commit1")
action = RebuildSequenceView(
name="RedBean", context=str(Path(__file__).parent.parent.parent.parent / "metagpt"), llm=LLM()
)
await action.run()
graph_file_repo = CONFIG.git_repo.new_file_repository(relative_path=GRAPH_REPO_FILE_REPO)
assert graph_file_repo.changed_files
@pytest.mark.parametrize(
("root", "pathname", "want"),
[
(Path(__file__).parent.parent.parent, "/".join(__file__.split("/")[-2:]), Path(__file__)),
(Path(__file__).parent.parent.parent, "f/g.txt", None),
],
)
def test_get_full_filename(root, pathname, want):
res = RebuildSequenceView._get_full_filename(root=root, pathname=pathname)
assert res == want
if __name__ == "__main__":
pytest.main([__file__, "-s"])

View file

@ -6,6 +6,8 @@
@File : test_skill_loader.py
@Desc : Unit tests.
"""
from pathlib import Path
import pytest
from metagpt.config import CONFIG
@ -23,7 +25,8 @@ async def test_suite():
{"id": 6, "name": "knowledge", "type": "builtin", "config": {}, "enabled": True},
{"id": 6, "name": "web_search", "type": "builtin", "config": {}, "enabled": True},
]
loader = await SkillsDeclaration.load()
pathname = Path(__file__).parent / "../../../docs/.well-known/skills.yaml"
loader = await SkillsDeclaration.load(skill_yaml_file_name=pathname)
skills = loader.get_skill_list()
assert skills
assert len(skills) >= 3