merge main

This commit is contained in:
geekan 2024-01-08 15:19:38 +08:00
commit 193fc5535a
107 changed files with 2029 additions and 374 deletions

View file

@ -116,7 +116,6 @@ if __name__ == '__main__':
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_debug_error():
context.src_workspace = context.git_repo.workdir / uuid.uuid4().hex
ctx = RunCodeContext(

View file

@ -16,7 +16,6 @@ from metagpt.schema import Message
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_design_api():
inputs = ["我们需要一个音乐播放器,它应该有播放、暂停、上一曲、下一曲等功能。"] # PRD_SAMPLE
repo = context.file_repo

View file

@ -11,7 +11,6 @@ from metagpt.actions.design_api_review import DesignReview
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_design_api_review():
prd = "我们需要一个音乐播放器,它应该有播放、暂停、上一曲、下一曲等功能。"
api_design = """

View file

@ -20,7 +20,6 @@ context = """
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_generate_questions():
action = GenerateQuestions()
rsp = await action.run(context)

View file

@ -54,7 +54,6 @@ async def test_generate_table(invoice_path: Path, expected_result: dict):
("invoice_path", "query", "expected_result"),
[(Path("invoices/invoice-1.pdf"), "Invoicing date", "2023年02月03日")],
)
@pytest.mark.usefixtures("llm_mock")
async def test_reply_question(invoice_path: Path, query: dict, expected_result: str):
invoice_path = TEST_DATA_PATH / invoice_path
ocr_result = await InvoiceOCR().run(file_path=Path(invoice_path))

View file

@ -12,7 +12,6 @@ from metagpt.logs import logger
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_prepare_interview():
action = PrepareInterview()
rsp = await action.run("I just graduated and hope to find a job as a Python engineer")

View file

@ -17,7 +17,6 @@ from tests.metagpt.actions.mock_json import DESIGN, PRD
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_design_api():
await context.file_repo.save_file("1.txt", content=str(PRD), relative_path=PRDS_FILE_REPO)
await context.file_repo.save_file("1.txt", content=str(DESIGN), relative_path=SYSTEM_DESIGN_FILE_REPO)

View file

@ -11,13 +11,46 @@ from pathlib import Path
import pytest
from metagpt.actions.rebuild_class_view import RebuildClassView
from metagpt.config import CONFIG
from metagpt.const import GRAPH_REPO_FILE_REPO
from metagpt.llm import LLM
@pytest.mark.asyncio
async def test_rebuild():
action = RebuildClassView(name="RedBean", context=Path(__file__).parent.parent, llm=LLM())
action = RebuildClassView(
name="RedBean", context=str(Path(__file__).parent.parent.parent.parent / "metagpt"), llm=LLM()
)
await action.run()
graph_file_repo = CONFIG.git_repo.new_file_repository(relative_path=GRAPH_REPO_FILE_REPO)
assert graph_file_repo.changed_files
@pytest.mark.parametrize(
("path", "direction", "diff", "want"),
[
("metagpt/startup.py", "=", ".", "metagpt/startup.py"),
("metagpt/startup.py", "+", "MetaGPT", "MetaGPT/metagpt/startup.py"),
("metagpt/startup.py", "-", "metagpt", "startup.py"),
],
)
def test_align_path(path, direction, diff, want):
res = RebuildClassView._align_root(path=path, direction=direction, diff_path=diff)
assert res == want
@pytest.mark.parametrize(
("path_root", "package_root", "want_direction", "want_diff"),
[
("/Users/x/github/MetaGPT/metagpt", "/Users/x/github/MetaGPT/metagpt", "=", "."),
("/Users/x/github/MetaGPT", "/Users/x/github/MetaGPT/metagpt", "-", "metagpt"),
("/Users/x/github/MetaGPT/metagpt", "/Users/x/github/MetaGPT", "+", "metagpt"),
],
)
def test_diff_path(path_root, package_root, want_direction, want_diff):
direction, diff = RebuildClassView._diff_path(path_root=Path(path_root), package_root=Path(package_root))
assert direction == want_direction
assert diff == want_diff
if __name__ == "__main__":

View file

@ -0,0 +1,55 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@Time : 2024/1/4
@Author : mashenquan
@File : test_rebuild_sequence_view.py
"""
from pathlib import Path
import pytest
from metagpt.actions.rebuild_sequence_view import RebuildSequenceView
from metagpt.config import CONFIG
from metagpt.const import GRAPH_REPO_FILE_REPO
from metagpt.llm import LLM
from metagpt.utils.common import aread
from metagpt.utils.file_repository import FileRepository
from metagpt.utils.git_repository import ChangeType
@pytest.mark.asyncio
async def test_rebuild():
# Mock
data = await aread(filename=Path(__file__).parent / "../../data/graph_db/networkx.json")
graph_db_filename = Path(CONFIG.git_repo.workdir.name).with_suffix(".json")
await FileRepository.save_file(
filename=str(graph_db_filename),
relative_path=GRAPH_REPO_FILE_REPO,
content=data,
)
CONFIG.git_repo.add_change({f"{GRAPH_REPO_FILE_REPO}/{graph_db_filename}": ChangeType.UNTRACTED})
CONFIG.git_repo.commit("commit1")
action = RebuildSequenceView(
name="RedBean", context=str(Path(__file__).parent.parent.parent.parent / "metagpt"), llm=LLM()
)
await action.run()
graph_file_repo = CONFIG.git_repo.new_file_repository(relative_path=GRAPH_REPO_FILE_REPO)
assert graph_file_repo.changed_files
@pytest.mark.parametrize(
("root", "pathname", "want"),
[
(Path(__file__).parent.parent.parent, "/".join(__file__.split("/")[-2:]), Path(__file__)),
(Path(__file__).parent.parent.parent, "f/g.txt", None),
],
)
def test_get_full_filename(root, pathname, want):
res = RebuildSequenceView._get_full_filename(root=root, pathname=pathname)
assert res == want
if __name__ == "__main__":
pytest.main([__file__, "-s"])

View file

@ -8,14 +8,7 @@
import pytest
from metagpt.actions import CollectLinks, research
@pytest.mark.asyncio
async def test_action():
action = CollectLinks()
result = await action.run(topic="baidu")
assert result
from metagpt.actions import research
@pytest.mark.asyncio

View file

@ -177,7 +177,6 @@ class Snake:
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_summarize_code():
context.src_workspace = context.git_repo.workdir / "src"
await context.file_repo.save_file(filename="1.json", relative_path=SYSTEM_DESIGN_FILE_REPO, content=DESIGN_CONTENT)

View file

@ -33,7 +33,6 @@ from metagpt.schema import Message
),
],
)
@pytest.mark.usefixtures("llm_mock")
async def test_prompt(agent_description, language, context, knowledge, history_summary):
# Prerequisites
g_context = Context()

View file

@ -27,7 +27,6 @@ from tests.metagpt.actions.mock_markdown import TASKS_2, WRITE_CODE_PROMPT_SAMPL
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_write_code():
ccontext = CodingContext(
filename="task_filename.py", design_doc=Document(content="设计一个名为'add'的函数,该函数接受两个整数作为输入,并返回它们的和。")
@ -44,7 +43,6 @@ async def test_write_code():
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_write_code_directly():
prompt = WRITE_CODE_PROMPT_SAMPLE + "\n" + TASKS_2[0]
llm = LLM()
@ -53,7 +51,6 @@ async def test_write_code_directly():
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_write_code_deps():
# Prerequisites
context.src_workspace = context.git_repo.workdir / "snake1/snake1"

View file

@ -12,7 +12,6 @@ from metagpt.schema import CodingContext, Document
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_write_code_review(capfd):
code = """
def add(a, b):

View file

@ -27,14 +27,12 @@ class Person:
],
ids=["google", "numpy", "sphinx"],
)
@pytest.mark.usefixtures("llm_mock")
async def test_write_docstring(style: str, part: str):
ret = await WriteDocstring().run(code, style=style)
assert part in ret
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_write():
code = await WriteDocstring.write_docstring(__file__)
assert code

View file

@ -8,21 +8,25 @@
"""
import pytest
from metagpt.actions import UserRequirement
from metagpt.actions import UserRequirement, WritePRD
from metagpt.const import DOCS_FILE_REPO, PRDS_FILE_REPO, REQUIREMENT_FILENAME
from metagpt.context import context
from metagpt.logs import logger
from metagpt.roles.product_manager import ProductManager
from metagpt.roles.role import RoleReactMode
from metagpt.schema import Message
from metagpt.utils.common import any_to_str
from metagpt.utils.file_repository import FileRepository
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_write_prd():
async def test_write_prd(new_filename):
product_manager = ProductManager()
requirements = "开发一个基于大语言模型与私有知识库的搜索引擎,希望可以基于大语言模型进行搜索总结"
await context.file_repo.save_file(filename=REQUIREMENT_FILENAME, content=requirements, relative_path=DOCS_FILE_REPO)
await FileRepository.save_file(filename=REQUIREMENT_FILENAME, content=requirements, relative_path=DOCS_FILE_REPO)
product_manager.rc.react_mode = RoleReactMode.BY_ORDER
prd = await product_manager.run(Message(content=requirements, cause_by=UserRequirement))
assert prd.cause_by == any_to_str(WritePRD)
logger.info(requirements)
logger.info(prd)
@ -30,3 +34,7 @@ async def test_write_prd():
assert prd is not None
assert prd.content != ""
assert context.git_repo.new_file_repository(relative_path=PRDS_FILE_REPO).changed_files
if __name__ == "__main__":
pytest.main([__file__, "-s"])

View file

@ -11,7 +11,6 @@ from metagpt.actions.write_prd_review import WritePRDReview
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_write_prd_review():
prd = """
Introduction: This is a new feature for our product.

View file

@ -46,7 +46,6 @@ CONTEXT = """
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_write_review():
write_review = WriteReview()
review = await write_review.run(CONTEXT)

View file

@ -16,7 +16,6 @@ from metagpt.actions.write_teaching_plan import WriteTeachingPlanPart
("topic", "context"),
[("Title", "Lesson 1: Learn to draw an apple."), ("Teaching Content", "Lesson 1: Learn to draw an apple.")],
)
@pytest.mark.usefixtures("llm_mock")
async def test_write_teaching_plan_part(topic, context):
action = WriteTeachingPlanPart(topic=topic, context=context)
rsp = await action.run()

View file

@ -13,7 +13,6 @@ from metagpt.schema import Document, TestingContext
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_write_test():
code = """
import random
@ -40,7 +39,6 @@ async def test_write_test():
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_write_code_invalid_code(mocker):
# Mock the _aask method to return an invalid code string
mocker.patch.object(WriteTest, "_aask", return_value="Invalid Code String")

View file

@ -14,7 +14,6 @@ from metagpt.actions.write_tutorial import WriteContent, WriteDirectory
@pytest.mark.asyncio
@pytest.mark.parametrize(("language", "topic"), [("English", "Write a tutorial about Python")])
@pytest.mark.usefixtures("llm_mock")
async def test_write_directory(language: str, topic: str):
ret = await WriteDirectory(language=language).run(topic=topic)
assert isinstance(ret, dict)
@ -30,7 +29,6 @@ async def test_write_directory(language: str, topic: str):
("language", "topic", "directory"),
[("English", "Write a tutorial about Python", {"Introduction": ["What is Python?", "Why learn Python?"]})],
)
@pytest.mark.usefixtures("llm_mock")
async def test_write_content(language: str, topic: str, directory: Dict):
ret = await WriteContent(language=language, directory=directory).run(topic=topic)
assert isinstance(ret, str)

View file

@ -29,6 +29,16 @@ points = [
]
def assert_almost_equal(actual, expected):
delta = 1e-10
if isinstance(expected, list):
assert len(actual) == len(expected)
for ac, exp in zip(actual, expected):
assert abs(ac - exp) <= delta, f"{ac} is not within {delta} of {exp}"
else:
assert abs(actual - expected) <= delta, f"{actual} is not within {delta} of {expected}"
def test_qdrant_store():
qdrant_connection = QdrantConnection(memory=True)
vectors_config = VectorParams(size=2, distance=Distance.COSINE)
@ -42,30 +52,30 @@ def test_qdrant_store():
qdrant_store.add("Book", points)
results = qdrant_store.search("Book", query=[1.0, 1.0])
assert results[0]["id"] == 2
assert results[0]["score"] == 0.999106722578389
assert_almost_equal(results[0]["score"], 0.999106722578389)
assert results[1]["id"] == 7
assert results[1]["score"] == 0.9961650411397226
assert_almost_equal(results[1]["score"], 0.9961650411397226)
results = qdrant_store.search("Book", query=[1.0, 1.0], return_vector=True)
assert results[0]["id"] == 2
assert results[0]["score"] == 0.999106722578389
assert results[0]["vector"] == [0.7363563179969788, 0.6765939593315125]
assert_almost_equal(results[0]["score"], 0.999106722578389)
assert_almost_equal(results[0]["vector"], [0.7363563179969788, 0.6765939593315125])
assert results[1]["id"] == 7
assert results[1]["score"] == 0.9961650411397226
assert results[1]["vector"] == [0.7662628889083862, 0.6425272226333618]
assert_almost_equal(results[1]["score"], 0.9961650411397226)
assert_almost_equal(results[1]["vector"], [0.7662628889083862, 0.6425272226333618])
results = qdrant_store.search(
"Book",
query=[1.0, 1.0],
query_filter=Filter(must=[FieldCondition(key="rand_number", range=Range(gte=8))]),
)
assert results[0]["id"] == 8
assert results[0]["score"] == 0.9100373450784073
assert_almost_equal(results[0]["score"], 0.9100373450784073)
assert results[1]["id"] == 9
assert results[1]["score"] == 0.7127610621127889
assert_almost_equal(results[1]["score"], 0.7127610621127889)
results = qdrant_store.search(
"Book",
query=[1.0, 1.0],
query_filter=Filter(must=[FieldCondition(key="rand_number", range=Range(gte=8))]),
return_vector=True,
)
assert results[0]["vector"] == [0.35037919878959656, 0.9366079568862915]
assert results[1]["vector"] == [0.9999677538871765, 0.00802854634821415]
assert_almost_equal(results[0]["vector"], [0.35037919878959656, 0.9366079568862915])
assert_almost_equal(results[1]["vector"], [0.9999677538871765, 0.00802854634821415])

View file

@ -6,6 +6,8 @@
@File : test_skill_loader.py
@Desc : Unit tests.
"""
from pathlib import Path
import pytest
from metagpt.config import CONFIG
@ -23,7 +25,8 @@ async def test_suite():
{"id": 6, "name": "knowledge", "type": "builtin", "config": {}, "enabled": True},
{"id": 6, "name": "web_search", "type": "builtin", "config": {}, "enabled": True},
]
loader = await SkillsDeclaration.load()
pathname = Path(__file__).parent / "../../../docs/.well-known/skills.yaml"
loader = await SkillsDeclaration.load(skill_yaml_file_name=pathname)
skills = loader.get_skill_list()
assert skills
assert len(skills) >= 3

View file

@ -0,0 +1,8 @@
import pytest
@pytest.fixture(autouse=True)
def llm_mock(rsp_cache, mocker, request):
# An empty fixture to overwrite the global llm_mock fixture
# because in provider folder, we want to test the aask and aask functions for the specific models
pass

View file

@ -22,7 +22,6 @@ from tests.metagpt.roles.mock import MockMessages
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_architect():
# Prerequisites
filename = uuid.uuid4().hex + ".json"

View file

@ -13,7 +13,6 @@ from pydantic import BaseModel
from metagpt.actions.skill_action import SkillAction
from metagpt.actions.talk_action import TalkAction
from metagpt.config import CONFIG
from metagpt.logs import logger
from metagpt.memory.brain_memory import BrainMemory
from metagpt.roles.assistant import Assistant
from metagpt.schema import Message
@ -21,7 +20,6 @@ from metagpt.utils.common import any_to_str
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_run():
CONFIG.language = "Chinese"
@ -88,7 +86,7 @@ async def test_run():
if not has_action:
break
msg: Message = await role.act()
logger.info(msg)
# logger.info(msg)
assert msg
assert msg.cause_by == seed.cause_by
assert msg.content

View file

@ -30,7 +30,6 @@ from tests.metagpt.roles.mock import STRS_FOR_PARSING, TASKS, MockMessages
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_engineer():
# Prerequisites
rqno = "20231221155954.json"
@ -114,7 +113,6 @@ def test_todo():
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_new_coding_context():
# Prerequisites
demo_path = Path(__file__).parent / "../../data/demo_project"

View file

@ -41,7 +41,6 @@ from metagpt.schema import Message
),
],
)
@pytest.mark.usefixtures("llm_mock")
async def test_invoice_ocr_assistant(query: str, invoice_path: Path, invoice_table_path: Path, expected_result: dict):
invoice_path = TEST_DATA_PATH / invoice_path
role = InvoiceOCRAssistant()

View file

@ -13,8 +13,7 @@ from tests.metagpt.roles.mock import MockMessages
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_product_manager():
async def test_product_manager(new_filename):
product_manager = ProductManager()
rsp = await product_manager.run(MockMessages.req)
logger.info(rsp)

View file

@ -13,7 +13,6 @@ from tests.metagpt.roles.mock import MockMessages
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_project_manager():
project_manager = ProjectManager()
rsp = await project_manager.run(MockMessages.system_design)

View file

@ -17,6 +17,7 @@ from metagpt.schema import Message
@pytest.mark.asyncio
@pytest.mark.skip
async def test_init():
class Inputs(BaseModel):
name: str
@ -103,7 +104,6 @@ async def test_new_file_name():
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_run():
CONFIG.set_context({"language": "Chinese", "teaching_language": "English"})
lesson = """

View file

@ -15,7 +15,6 @@ from metagpt.roles.tutorial_assistant import TutorialAssistant
@pytest.mark.asyncio
@pytest.mark.parametrize(("language", "topic"), [("Chinese", "Write a tutorial about pip")])
@pytest.mark.usefixtures("llm_mock")
async def test_tutorial_assistant(language: str, topic: str):
role = TutorialAssistant(language=language)
msg = await role.run(topic)

View file

@ -21,7 +21,6 @@ def test_action_serialize():
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_action_deserialize():
action = Action()
serialized_data = action.model_dump()

View file

@ -17,7 +17,6 @@ def test_architect_serialize():
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_architect_deserialize():
role = Architect()
ser_role_dict = role.model_dump(by_alias=True)

View file

@ -8,7 +8,6 @@ from metagpt.actions.prepare_interview import PrepareInterview
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_action_deserialize():
action = PrepareInterview()
serialized_data = action.model_dump()

View file

@ -10,8 +10,7 @@ from metagpt.schema import Message
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_product_manager_deserialize():
async def test_product_manager_deserialize(new_filename):
role = ProductManager()
ser_role_dict = role.model_dump(by_alias=True)
new_role = ProductManager(**ser_role_dict)

View file

@ -18,7 +18,6 @@ def test_project_manager_serialize():
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_project_manager_deserialize():
role = ProjectManager()
ser_role_dict = role.model_dump(by_alias=True)

View file

@ -69,7 +69,6 @@ def test_engineer_serialize():
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_engineer_deserialize():
role = Engineer(use_code_review=True)
ser_role_dict = role.model_dump()
@ -97,7 +96,6 @@ def test_role_serdeser_save():
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_role_serdeser_interrupt():
role_c = RoleC()
shutil.rmtree(SERDESER_PATH.joinpath("team"), ignore_errors=True)

View file

@ -109,7 +109,6 @@ async def test_team_recover_save():
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_team_recover_multi_roles_save():
idea = "write a snake game"
stg_path = SERDESER_PATH.joinpath("team")

View file

@ -17,7 +17,6 @@ def test_write_design_serialize():
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_write_code_deserialize():
context = CodingContext(
filename="test_code.py", design_doc=Document(content="write add function to calculate two numbers")

View file

@ -9,7 +9,6 @@ from metagpt.schema import CodingContext, Document
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_write_code_review_deserialize():
code_content = """
def div(a: int, b: int = 0):

View file

@ -22,7 +22,6 @@ def test_write_task_serialize():
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_write_design_deserialize():
action = WriteDesign()
serialized_data = action.model_dump()
@ -32,7 +31,6 @@ async def test_write_design_deserialize():
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_write_task_deserialize():
action = WriteTasks()
serialized_data = action.model_dump()

View file

@ -29,7 +29,6 @@ class Person:
],
ids=["google", "numpy", "sphinx"],
)
@pytest.mark.usefixtures("llm_mock")
async def test_action_deserialize(style: str, part: str):
action = WriteDocstring()
serialized_data = action.model_dump()

View file

@ -9,7 +9,7 @@ from metagpt.actions import WritePRD
from metagpt.schema import Message
def test_action_serialize():
def test_action_serialize(new_filename):
action = WritePRD()
ser_action_dict = action.model_dump()
assert "name" in ser_action_dict
@ -17,8 +17,7 @@ def test_action_serialize():
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_action_deserialize():
async def test_action_deserialize(new_filename):
action = WritePRD()
serialized_data = action.model_dump()
new_action = WritePRD(**serialized_data)

View file

@ -42,7 +42,6 @@ CONTEXT = """
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_action_deserialize():
action = WriteReview()
serialized_data = action.model_dump()

View file

@ -9,7 +9,6 @@ from metagpt.actions.write_tutorial import WriteContent, WriteDirectory
@pytest.mark.asyncio
@pytest.mark.parametrize(("language", "topic"), [("English", "Write a tutorial about Python")])
@pytest.mark.usefixtures("llm_mock")
async def test_write_directory_deserialize(language: str, topic: str):
action = WriteDirectory()
serialized_data = action.model_dump()
@ -31,7 +30,6 @@ async def test_write_directory_deserialize(language: str, topic: str):
("language", "topic", "directory"),
[("English", "Write a tutorial about Python", {"Introduction": ["What is Python?", "Why learn Python?"]})],
)
@pytest.mark.usefixtures("llm_mock")
async def test_write_content_deserialize(language: str, topic: str, directory: Dict):
action = WriteContent(language=language, directory=directory)
serialized_data = action.model_dump()

View file

@ -18,6 +18,9 @@ from metagpt.actions.write_code import WriteCode
from metagpt.const import SYSTEM_DESIGN_FILE_REPO, TASK_FILE_REPO
from metagpt.schema import (
AIMessage,
ClassAttribute,
ClassMethod,
ClassView,
CodeSummarizeContext,
Document,
Message,
@ -153,5 +156,30 @@ def test_CodeSummarizeContext(file_list, want):
assert want in m
def test_class_view():
attr_a = ClassAttribute(name="a", value_type="int", default_value="0", visibility="+", abstraction=True)
assert attr_a.get_mermaid(align=1) == "\t+int a=0*"
attr_b = ClassAttribute(name="b", value_type="str", default_value="0", visibility="#", static=True)
assert attr_b.get_mermaid(align=0) == '#str b="0"$'
class_view = ClassView(name="A")
class_view.attributes = [attr_a, attr_b]
method_a = ClassMethod(name="run", visibility="+", abstraction=True)
assert method_a.get_mermaid(align=1) == "\t+run()*"
method_b = ClassMethod(
name="_test",
visibility="#",
static=True,
args=[ClassAttribute(name="a", value_type="str"), ClassAttribute(name="b", value_type="int")],
return_type="str",
)
assert method_b.get_mermaid(align=0) == "#_test(str a,int b):str$"
class_view.methods = [method_a, method_b]
assert (
class_view.get_mermaid(align=0)
== 'class A{\n\t+int a=0*\n\t#str b="0"$\n\t+run()*\n\t#_test(str a,int b):str$\n}\n'
)
if __name__ == "__main__":
pytest.main([__file__, "-s"])

View file

@ -16,14 +16,14 @@ runner = CliRunner()
@pytest.mark.asyncio
async def test_empty_team():
async def test_empty_team(new_filename):
# FIXME: we're now using "metagpt" cli, so the entrance should be replaced instead.
company = Team()
history = await company.run(idea="Build a simple search system. I will upload my files later.")
logger.info(history)
def test_startup():
def test_startup(new_filename):
args = ["Make a cli snake game"]
result = runner.invoke(app, args)
logger.info(result)

View file

@ -24,13 +24,14 @@ async def test_oas2_svc():
process = subprocess.Popen(["python", str(script_pathname)], cwd=str(workdir), env=env)
await asyncio.sleep(5)
url = "http://localhost:8080/openapi/greeting/dave"
headers = {"accept": "text/plain", "Content-Type": "application/json"}
data = {}
response = requests.post(url, headers=headers, json=data)
assert response.text == "Hello dave\n"
process.terminate()
try:
url = "http://localhost:8080/openapi/greeting/dave"
headers = {"accept": "text/plain", "Content-Type": "application/json"}
data = {}
response = requests.post(url, headers=headers, json=data)
assert response.text == "Hello dave\n"
finally:
process.terminate()
if __name__ == "__main__":

View file

@ -5,6 +5,8 @@
@Author : mashenquan
@File : test_metagpt_text_to_image.py
"""
import base64
from unittest.mock import AsyncMock
import pytest
@ -13,7 +15,14 @@ from metagpt.tools.metagpt_text_to_image import oas3_metagpt_text_to_image
@pytest.mark.asyncio
async def test_draw():
async def test_draw(mocker):
# mock
mock_post = mocker.patch("aiohttp.ClientSession.post")
mock_response = AsyncMock()
mock_response.status = 200
mock_response.json.return_value = {"images": [base64.b64encode(b"success")], "parameters": {"size": 1110}}
mock_post.return_value.__aenter__.return_value = mock_response
# Prerequisites
assert CONFIG.METAGPT_TEXT_TO_IMAGE_MODEL_URL

View file

@ -3,7 +3,7 @@
"""
@Time : 2023/12/26
@Author : mashenquan
@File : test_hello.py
@File : test_openapi_v3_hello.py
"""
import asyncio
import subprocess
@ -24,13 +24,14 @@ async def test_hello():
process = subprocess.Popen(["python", str(script_pathname)], cwd=workdir, env=env)
await asyncio.sleep(5)
url = "http://localhost:8082/openapi/greeting/dave"
headers = {"accept": "text/plain", "Content-Type": "application/json"}
data = {}
response = requests.post(url, headers=headers, json=data)
assert response.text == "Hello dave\n"
process.terminate()
try:
url = "http://localhost:8082/openapi/greeting/dave"
headers = {"accept": "text/plain", "Content-Type": "application/json"}
data = {}
response = requests.post(url, headers=headers, json=data)
assert response.text == "Hello dave\n"
finally:
process.terminate()
if __name__ == "__main__":

View file

@ -7,15 +7,20 @@
"""
from __future__ import annotations
import json
from pathlib import Path
from typing import Callable
import pytest
import tests.data.search
from metagpt.config import CONFIG
from metagpt.logs import logger
from metagpt.tools import SearchEngineType
from metagpt.tools.search_engine import SearchEngine
search_cache_path = Path(tests.data.search.__path__[0])
class MockSearchEnine:
async def run(self, query: str, max_results: int = 8, as_string: bool = True) -> str | list[dict[str, str]]:
@ -41,16 +46,23 @@ class MockSearchEnine:
(SearchEngineType.CUSTOM_ENGINE, MockSearchEnine().run, 6, False),
],
)
async def test_search_engine(search_engine_type, run_func: Callable, max_results: int, as_string: bool):
async def test_search_engine(search_engine_type, run_func: Callable, max_results: int, as_string: bool, aiohttp_mocker):
# Prerequisites
cache_json_path = None
if search_engine_type is SearchEngineType.SERPAPI_GOOGLE:
assert CONFIG.SERPAPI_API_KEY and CONFIG.SERPAPI_API_KEY != "YOUR_API_KEY"
cache_json_path = search_cache_path / f"serpapi-metagpt-{max_results}.json"
elif search_engine_type is SearchEngineType.DIRECT_GOOGLE:
assert CONFIG.GOOGLE_API_KEY and CONFIG.GOOGLE_API_KEY != "YOUR_API_KEY"
assert CONFIG.GOOGLE_CSE_ID and CONFIG.GOOGLE_CSE_ID != "YOUR_CSE_ID"
elif search_engine_type is SearchEngineType.SERPER_GOOGLE:
assert CONFIG.SERPER_API_KEY and CONFIG.SERPER_API_KEY != "YOUR_API_KEY"
cache_json_path = search_cache_path / f"serper-metagpt-{max_results}.json"
if cache_json_path:
with open(cache_json_path) as f:
data = json.load(f)
aiohttp_mocker.set_json(data)
search_engine = SearchEngine(search_engine_type, run_func)
rsp = await search_engine.run("metagpt", max_results, as_string)
logger.info(rsp)

View file

@ -14,7 +14,6 @@ from metagpt.tools.translator import Translator
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_api")
@pytest.mark.usefixtures("llm_mock")
async def test_translate(llm_api):
poetries = [
("Let life be beautiful like summer flowers", ""),

View file

@ -13,9 +13,9 @@ from metagpt.utils.parse_html import WebPage
@pytest.mark.parametrize(
"browser_type, use_proxy, kwagrs, url, urls",
[
("chromium", {"proxy": True}, {}, "https://deepwisdom.ai", ("https://deepwisdom.ai",)),
("firefox", {}, {"ignore_https_errors": True}, "https://deepwisdom.ai", ("https://deepwisdom.ai",)),
("webkit", {}, {"ignore_https_errors": True}, "https://deepwisdom.ai", ("https://deepwisdom.ai",)),
("chromium", {"proxy": True}, {}, "https://www.deepwisdom.ai", ("https://www.deepwisdom.ai",)),
("firefox", {}, {"ignore_https_errors": True}, "https://www.deepwisdom.ai", ("https://www.deepwisdom.ai",)),
("webkit", {}, {"ignore_https_errors": True}, "https://www.deepwisdom.ai", ("https://www.deepwisdom.ai",)),
],
ids=["chromium-normal", "firefox-normal", "webkit-normal"],
)
@ -23,6 +23,7 @@ async def test_scrape_web_page(browser_type, use_proxy, kwagrs, url, urls, proxy
global_proxy = CONFIG.global_proxy
try:
if use_proxy:
server, proxy = await proxy
CONFIG.global_proxy = proxy
browser = web_browser_engine_playwright.PlaywrightWrapper(browser_type=browser_type, **kwagrs)
result = await browser.run(url)
@ -35,6 +36,7 @@ async def test_scrape_web_page(browser_type, use_proxy, kwagrs, url, urls, proxy
assert len(results) == len(urls) + 1
assert all(("MetaGPT" in i.inner_text) for i in results)
if use_proxy:
server.close()
assert "Proxy:" in capfd.readouterr().out
finally:
CONFIG.global_proxy = global_proxy

View file

@ -26,6 +26,7 @@ async def test_scrape_web_page(browser_type, use_proxy, url, urls, proxy, capfd)
global_proxy = CONFIG.global_proxy
try:
if use_proxy:
server, proxy = await proxy
CONFIG.global_proxy = proxy
browser = web_browser_engine_selenium.SeleniumWrapper(browser_type=browser_type)
result = await browser.run(url)
@ -38,6 +39,7 @@ async def test_scrape_web_page(browser_type, use_proxy, url, urls, proxy, capfd)
assert len(results) == len(urls) + 1
assert all(("MetaGPT" in i.inner_text) for i in results)
if use_proxy:
server.close()
assert "Proxy:" in capfd.readouterr().out
finally:
CONFIG.global_proxy = global_proxy

View file

@ -36,6 +36,7 @@ from metagpt.utils.common import (
read_file_block,
read_json_file,
require_python_version,
split_namespace,
)
@ -163,6 +164,23 @@ class TestGetProjectRoot:
assert concat_namespace("a", "b", "c", "e") == "a:b:c:e"
assert concat_namespace("a", "b", "c", "e", "f") == "a:b:c:e:f"
@pytest.mark.parametrize(
("val", "want"),
[
(
"tests/metagpt/test_role.py:test_react:Input:subscription",
["tests/metagpt/test_role.py", "test_react", "Input", "subscription"],
),
(
"tests/metagpt/test_role.py:test_react:Input:goal",
["tests/metagpt/test_role.py", "test_react", "Input", "goal"],
),
],
)
def test_split_namespace(self, val, want):
res = split_namespace(val)
assert res == want
def test_read_json_file(self):
assert read_json_file(str(Path(__file__).parent / "../../data/ut_writer/yft_swaggerApi.json"), encoding="utf-8")
with pytest.raises(FileNotFoundError):

View file

@ -56,7 +56,7 @@ async def test_js_parser():
repo_parser = RepoParser(base_directory=data.path)
symbols = repo_parser.generate_symbols()
for s in symbols:
await GraphRepository.update_graph_db(graph_db=graph, file_info=s)
await GraphRepository.update_graph_db_with_file_info(graph_db=graph, file_info=s)
data = graph.json()
assert data
@ -71,11 +71,11 @@ async def test_codes():
for file_info in symbols:
for code_block in file_info.page_info:
try:
val = code_block.json(ensure_ascii=False)
val = code_block.model_dump_json()
assert val
except TypeError as e:
assert not e
await GraphRepository.update_graph_db(graph_db=graph, file_info=file_info)
await GraphRepository.update_graph_db_with_file_info(graph_db=graph, file_info=file_info)
data = graph.json()
assert data
print(data)

View file

@ -5,11 +5,13 @@
@Author : alexanderwu
@File : test_read_docx.py
"""
import pytest
from metagpt.const import METAGPT_ROOT
from metagpt.utils.read_document import read_docx
@pytest.mark.skip # https://copyprogramming.com/howto/python-docx-error-opening-file-bad-magic-number-for-file-header-eoferror
class TestReadDocx:
def test_read_docx(self):
docx_sample = METAGPT_ROOT / "tests/data/docx_for_test.docx"

View file

@ -6,13 +6,22 @@
@File : test_redis.py
"""
import mock
import pytest
from metagpt.config2 import Config
from metagpt.utils.redis import Redis
async def async_mock_from_url(*args, **kwargs):
mock_client = mock.AsyncMock()
mock_client.set.return_value = None
mock_client.get.side_effect = [b"test", b""]
return mock_client
@pytest.mark.asyncio
@mock.patch("aioredis.from_url", return_value=async_mock_from_url())
async def test_redis():
redis = Config.default().redis

View file

@ -2,20 +2,18 @@
# -*- coding: utf-8 -*-
# @Desc : unittest of repair_llm_raw_output
from metagpt.config import CONFIG
from metagpt.utils.repair_llm_raw_output import (
RepairType,
extract_content_from_output,
repair_invalid_json,
repair_llm_raw_output,
retry_parse_json_text,
)
"""
CONFIG.repair_llm_output should be True before retry_parse_json_text imported.
so we move `from ... impot ...` into each `test_xx` to avoid `Module level import not at top of file` format warning.
"""
CONFIG.repair_llm_output = True
def test_repair_case_sensitivity():
from metagpt.utils.repair_llm_raw_output import repair_llm_raw_output
raw_output = """{
"Original requirements": "Write a 2048 game",
"search Information": "",
@ -36,6 +34,8 @@ def test_repair_case_sensitivity():
def test_repair_special_character_missing():
from metagpt.utils.repair_llm_raw_output import repair_llm_raw_output
raw_output = """[CONTENT]
"Anything UNCLEAR": "No unclear requirements or information."
[CONTENT]"""
@ -66,11 +66,12 @@ def test_repair_special_character_missing():
target_output = '[CONTENT] {"a": "b"} [/CONTENT]'
output = repair_llm_raw_output(output=raw_output, req_keys=["[/CONTENT]"])
print("output\n", output)
assert output == target_output
def test_required_key_pair_missing():
from metagpt.utils.repair_llm_raw_output import repair_llm_raw_output
raw_output = '[CONTENT] {"a": "b"}'
target_output = '[CONTENT] {"a": "b"}\n[/CONTENT]'
@ -107,6 +108,8 @@ xxx
def test_repair_json_format():
from metagpt.utils.repair_llm_raw_output import RepairType, repair_llm_raw_output
raw_output = "{ xxx }]"
target_output = "{ xxx }"
@ -127,6 +130,8 @@ def test_repair_json_format():
def test_repair_invalid_json():
from metagpt.utils.repair_llm_raw_output import repair_invalid_json
raw_output = """{
"key": "value"
},
@ -169,6 +174,8 @@ value
def test_retry_parse_json_text():
from metagpt.utils.repair_llm_raw_output import retry_parse_json_text
invalid_json_text = """{
"Original Requirements": "Create a 2048 game",
"Competitive Quadrant Chart": "quadrantChart\n\ttitle Reach and engagement of campaigns\n\t\tx-axis"
@ -205,6 +212,7 @@ def test_extract_content_from_output():
xxx [CONTENT] xxx [CONTENT] xxxx [/CONTENT]
xxx [CONTENT] xxxx [/CONTENT] xxx [CONTENT][/CONTENT] xxx [CONTENT][/CONTENT] # target pair is the last one
"""
from metagpt.utils.repair_llm_raw_output import extract_content_from_output
output = (
'Sure! Here is the properly formatted JSON output based on the given context:\n\n[CONTENT]\n{\n"'

View file

@ -9,14 +9,31 @@ import uuid
from pathlib import Path
import aiofiles
import mock
import pytest
from metagpt.config2 import Config
from metagpt.utils.common import aread
from metagpt.utils.s3 import S3
@pytest.mark.asyncio
async def test_s3():
@mock.patch("aioboto3.Session")
async def test_s3(mock_session_class):
# Set up the mock response
data = await aread(__file__, "utf-8")
mock_session_object = mock.Mock()
reader_mock = mock.AsyncMock()
reader_mock.read.side_effect = [data.encode("utf-8"), b"", data.encode("utf-8")]
type(reader_mock).url = mock.PropertyMock(return_value="https://mock")
mock_client = mock.AsyncMock()
mock_client.put_object.return_value = None
mock_client.get_object.return_value = {"Body": reader_mock}
mock_client.__aenter__.return_value = mock_client
mock_client.__aexit__.return_value = None
mock_session_object.client.return_value = mock_client
mock_session_class.return_value = mock_session_object
# Prerequisites
s3 = Config.default().s3
assert s3
@ -38,6 +55,7 @@ async def test_s3():
# Mock session env
s3.access_key = "ABC"
type(reader_mock).url = mock.PropertyMock(return_value="")
try:
conn = S3(s3)
res = await conn.cache("ABC", ".bak", "script")
@ -45,6 +63,8 @@ async def test_s3():
except Exception:
pass
await reader.close()
if __name__ == "__main__":
pytest.main([__file__, "-s"])

View file

@ -0,0 +1,13 @@
#!/usr/bin/env python3
# _*_ coding: utf-8 _*_
import pytest
def test_nodeid(request):
print(request.node.nodeid)
assert request.node.nodeid
if __name__ == "__main__":
pytest.main([__file__, "-s"])