mirror of
https://github.com/FoundationAgents/MetaGPT.git
synced 2026-05-15 11:02:36 +02:00
Merge pull request #669 from garylin2099/llm_cache
Add llm response cache to speed up testing
This commit is contained in:
commit
42bb40a0f6
44 changed files with 203 additions and 8 deletions
6
.github/workflows/unittest.yaml
vendored
6
.github/workflows/unittest.yaml
vendored
|
|
@ -24,7 +24,12 @@ jobs:
|
|||
run: |
|
||||
echo "${{ secrets.METAGPT_KEY_YAML }}" | base64 -d > config/key.yaml
|
||||
pytest tests/ --doctest-modules --cov=./metagpt/ --cov-report=xml:cov.xml --cov-report=html:htmlcov --durations=20 | tee unittest.txt
|
||||
- name: Show coverage report
|
||||
run: |
|
||||
coverage report -m
|
||||
- name: Show failed tests and overall summary
|
||||
run: |
|
||||
grep -E "FAILED tests|[0-9]+ passed," unittest.txt
|
||||
- name: Upload pytest test results
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
|
|
@ -32,6 +37,7 @@ jobs:
|
|||
path: |
|
||||
./unittest.txt
|
||||
./htmlcov/
|
||||
./tests/data/rsp_cache_new.json
|
||||
retention-days: 3
|
||||
if: ${{ always() }}
|
||||
|
||||
|
|
@ -7,19 +7,86 @@
|
|||
"""
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from unittest.mock import Mock
|
||||
from typing import Optional
|
||||
|
||||
import pytest
|
||||
|
||||
from metagpt.config import CONFIG, Config
|
||||
from metagpt.const import DEFAULT_WORKSPACE_ROOT
|
||||
from metagpt.const import DEFAULT_WORKSPACE_ROOT, TEST_DATA_PATH
|
||||
from metagpt.llm import LLM
|
||||
from metagpt.logs import logger
|
||||
from metagpt.provider.openai_api import OpenAILLM
|
||||
from metagpt.utils.git_repository import GitRepository
|
||||
|
||||
|
||||
class MockLLM(OpenAILLM):
|
||||
rsp_cache: dict = {}
|
||||
|
||||
async def original_aask(
|
||||
self,
|
||||
msg: str,
|
||||
system_msgs: Optional[list[str]] = None,
|
||||
format_msgs: Optional[list[dict[str, str]]] = None,
|
||||
timeout=3,
|
||||
stream=True,
|
||||
):
|
||||
"""A copy of metagpt.provider.base_llm.BaseLLM.aask, we can't use super().aask because it will be mocked"""
|
||||
if system_msgs:
|
||||
message = self._system_msgs(system_msgs)
|
||||
else:
|
||||
message = [self._default_system_msg()] if self.use_system_prompt else []
|
||||
if format_msgs:
|
||||
message.extend(format_msgs)
|
||||
message.append(self._user_msg(msg))
|
||||
rsp = await self.acompletion_text(message, stream=stream, timeout=timeout)
|
||||
return rsp
|
||||
|
||||
async def aask(
|
||||
self,
|
||||
msg: str,
|
||||
system_msgs: Optional[list[str]] = None,
|
||||
format_msgs: Optional[list[dict[str, str]]] = None,
|
||||
timeout=3,
|
||||
stream=True,
|
||||
) -> str:
|
||||
if msg not in self.rsp_cache:
|
||||
# Call the original unmocked method
|
||||
rsp = await self.original_aask(msg, system_msgs, format_msgs, timeout, stream)
|
||||
logger.info(f"Added '{rsp[:20]}' ... to response cache")
|
||||
self.rsp_cache[msg] = rsp
|
||||
return rsp
|
||||
else:
|
||||
logger.info("Use response cache")
|
||||
return self.rsp_cache[msg]
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def rsp_cache():
|
||||
# model_version = CONFIG.openai_api_model
|
||||
rsp_cache_file_path = TEST_DATA_PATH / "rsp_cache.json" # read repo-provided
|
||||
new_rsp_cache_file_path = TEST_DATA_PATH / "rsp_cache_new.json" # exporting a new copy
|
||||
if os.path.exists(rsp_cache_file_path):
|
||||
with open(rsp_cache_file_path, "r") as f1:
|
||||
rsp_cache_json = json.load(f1)
|
||||
else:
|
||||
rsp_cache_json = {}
|
||||
yield rsp_cache_json
|
||||
with open(new_rsp_cache_file_path, "w") as f2:
|
||||
json.dump(rsp_cache_json, f2, indent=4, ensure_ascii=False)
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def llm_mock(rsp_cache, mocker):
|
||||
llm = MockLLM()
|
||||
llm.rsp_cache = rsp_cache
|
||||
mocker.patch("metagpt.provider.base_llm.BaseLLM.aask", llm.aask)
|
||||
yield mocker
|
||||
|
||||
|
||||
class Context:
|
||||
def __init__(self):
|
||||
self._llm_ui = None
|
||||
|
|
@ -44,12 +111,6 @@ def llm_api():
|
|||
logger.info("Tearing down the test")
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def mock_llm():
|
||||
# Create a mock LLM for testing
|
||||
return Mock()
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def proxy():
|
||||
pattern = re.compile(
|
||||
|
|
|
|||
78
tests/data/rsp_cache.json
Normal file
78
tests/data/rsp_cache.json
Normal file
File diff suppressed because one or more lines are too long
|
|
@ -117,6 +117,7 @@ if __name__ == '__main__':
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_debug_error():
|
||||
CONFIG.src_workspace = CONFIG.git_repo.workdir / uuid.uuid4().hex
|
||||
ctx = RunCodeContext(
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ from tests.metagpt.actions.mock_markdown import PRD_SAMPLE
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_design_api():
|
||||
inputs = ["我们需要一个音乐播放器,它应该有播放、暂停、上一曲、下一曲等功能。", PRD_SAMPLE]
|
||||
for prd in inputs:
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ from metagpt.actions.design_api_review import DesignReview
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_design_api_review():
|
||||
prd = "我们需要一个音乐播放器,它应该有播放、暂停、上一曲、下一曲等功能。"
|
||||
api_design = """
|
||||
|
|
|
|||
|
|
@ -20,6 +20,7 @@ context = """
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_generate_questions():
|
||||
action = GenerateQuestions()
|
||||
rsp = await action.run(context)
|
||||
|
|
|
|||
|
|
@ -54,6 +54,7 @@ async def test_generate_table(invoice_path: Path, expected_result: dict):
|
|||
("invoice_path", "query", "expected_result"),
|
||||
[(Path("invoices/invoice-1.pdf"), "Invoicing date", "2023年02月03日")],
|
||||
)
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_reply_question(invoice_path: Path, query: dict, expected_result: str):
|
||||
invoice_path = TEST_DATA_PATH / invoice_path
|
||||
ocr_result = await InvoiceOCR().run(file_path=Path(invoice_path))
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ from metagpt.logs import logger
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_prepare_interview():
|
||||
action = PrepareInterview()
|
||||
rsp = await action.run("I just graduated and hope to find a job as a Python engineer")
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ from tests.metagpt.actions.mock_json import DESIGN, PRD
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_design_api():
|
||||
await FileRepository.save_file("1.txt", content=str(PRD), relative_path=PRDS_FILE_REPO)
|
||||
await FileRepository.save_file("1.txt", content=str(DESIGN), relative_path=SYSTEM_DESIGN_FILE_REPO)
|
||||
|
|
|
|||
|
|
@ -177,6 +177,7 @@ class Snake:
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_summarize_code():
|
||||
CONFIG.src_workspace = CONFIG.git_repo.workdir / "src"
|
||||
await FileRepository.save_file(filename="1.json", relative_path=SYSTEM_DESIGN_FILE_REPO, content=DESIGN_CONTENT)
|
||||
|
|
|
|||
|
|
@ -33,6 +33,7 @@ from metagpt.schema import Message
|
|||
),
|
||||
],
|
||||
)
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_prompt(agent_description, language, context, knowledge, history_summary):
|
||||
# Prerequisites
|
||||
CONFIG.agent_description = agent_description
|
||||
|
|
|
|||
|
|
@ -28,6 +28,7 @@ from tests.metagpt.actions.mock_markdown import TASKS_2, WRITE_CODE_PROMPT_SAMPL
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_write_code():
|
||||
context = CodingContext(
|
||||
filename="task_filename.py", design_doc=Document(content="设计一个名为'add'的函数,该函数接受两个整数作为输入,并返回它们的和。")
|
||||
|
|
@ -44,6 +45,7 @@ async def test_write_code():
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_write_code_directly():
|
||||
prompt = WRITE_CODE_PROMPT_SAMPLE + "\n" + TASKS_2[0]
|
||||
llm = LLM()
|
||||
|
|
@ -52,6 +54,7 @@ async def test_write_code_directly():
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_write_code_deps():
|
||||
# Prerequisites
|
||||
CONFIG.src_workspace = CONFIG.git_repo.workdir / "snake1/snake1"
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ from metagpt.schema import CodingContext, Document
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_write_code_review(capfd):
|
||||
code = """
|
||||
def add(a, b):
|
||||
|
|
|
|||
|
|
@ -27,12 +27,14 @@ class Person:
|
|||
],
|
||||
ids=["google", "numpy", "sphinx"],
|
||||
)
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_write_docstring(style: str, part: str):
|
||||
ret = await WriteDocstring().run(code, style=style)
|
||||
assert part in ret
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_write():
|
||||
code = await WriteDocstring.write_docstring(__file__)
|
||||
assert code
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ from metagpt.utils.file_repository import FileRepository
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_write_prd():
|
||||
product_manager = ProductManager()
|
||||
requirements = "开发一个基于大语言模型与私有知识库的搜索引擎,希望可以基于大语言模型进行搜索总结"
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ from metagpt.actions.write_prd_review import WritePRDReview
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_write_prd_review():
|
||||
prd = """
|
||||
Introduction: This is a new feature for our product.
|
||||
|
|
|
|||
|
|
@ -46,6 +46,7 @@ CONTEXT = """
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_write_review():
|
||||
write_review = WriteReview()
|
||||
review = await write_review.run(CONTEXT)
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ from metagpt.actions.write_teaching_plan import WriteTeachingPlanPart
|
|||
("topic", "context"),
|
||||
[("Title", "Lesson 1: Learn to draw an apple."), ("Teaching Content", "Lesson 1: Learn to draw an apple.")],
|
||||
)
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_write_teaching_plan_part(topic, context):
|
||||
action = WriteTeachingPlanPart(topic=topic, context=context)
|
||||
rsp = await action.run()
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ from metagpt.schema import Document, TestingContext
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_write_test():
|
||||
code = """
|
||||
import random
|
||||
|
|
@ -39,6 +40,7 @@ async def test_write_test():
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_write_code_invalid_code(mocker):
|
||||
# Mock the _aask method to return an invalid code string
|
||||
mocker.patch.object(WriteTest, "_aask", return_value="Invalid Code String")
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ from metagpt.actions.write_tutorial import WriteContent, WriteDirectory
|
|||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(("language", "topic"), [("English", "Write a tutorial about Python")])
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_write_directory(language: str, topic: str):
|
||||
ret = await WriteDirectory(language=language).run(topic=topic)
|
||||
assert isinstance(ret, dict)
|
||||
|
|
@ -29,6 +30,7 @@ async def test_write_directory(language: str, topic: str):
|
|||
("language", "topic", "directory"),
|
||||
[("English", "Write a tutorial about Python", {"Introduction": ["What is Python?", "Why learn Python?"]})],
|
||||
)
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_write_content(language: str, topic: str, directory: Dict):
|
||||
ret = await WriteContent(language=language, directory=directory).run(topic=topic)
|
||||
assert isinstance(ret, str)
|
||||
|
|
|
|||
|
|
@ -22,6 +22,7 @@ from tests.metagpt.roles.mock import MockMessages
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_architect():
|
||||
# Prerequisites
|
||||
filename = uuid.uuid4().hex + ".json"
|
||||
|
|
|
|||
|
|
@ -21,6 +21,7 @@ from metagpt.utils.common import any_to_str
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_run():
|
||||
CONFIG.language = "Chinese"
|
||||
|
||||
|
|
|
|||
|
|
@ -30,6 +30,7 @@ from tests.metagpt.roles.mock import STRS_FOR_PARSING, TASKS, MockMessages
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_engineer():
|
||||
# Prerequisites
|
||||
rqno = "20231221155954.json"
|
||||
|
|
@ -113,6 +114,7 @@ def test_todo():
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_new_coding_context():
|
||||
# Prerequisites
|
||||
demo_path = Path(__file__).parent / "../../data/demo_project"
|
||||
|
|
|
|||
|
|
@ -41,6 +41,7 @@ from metagpt.schema import Message
|
|||
),
|
||||
],
|
||||
)
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_invoice_ocr_assistant(query: str, invoice_path: Path, invoice_table_path: Path, expected_result: dict):
|
||||
invoice_path = TEST_DATA_PATH / invoice_path
|
||||
role = InvoiceOCRAssistant()
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ from tests.metagpt.roles.mock import MockMessages
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_product_manager():
|
||||
product_manager = ProductManager()
|
||||
rsp = await product_manager.run(MockMessages.req)
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ from tests.metagpt.roles.mock import MockMessages
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_project_manager():
|
||||
project_manager = ProjectManager()
|
||||
rsp = await project_manager.run(MockMessages.system_design)
|
||||
|
|
|
|||
|
|
@ -103,6 +103,7 @@ async def test_new_file_name():
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_run():
|
||||
CONFIG.set_context({"language": "Chinese", "teaching_language": "English"})
|
||||
lesson = """
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ from metagpt.roles.tutorial_assistant import TutorialAssistant
|
|||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(("language", "topic"), [("Chinese", "Write a tutorial about pip")])
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_tutorial_assistant(language: str, topic: str):
|
||||
role = TutorialAssistant(language=language)
|
||||
msg = await role.run(topic)
|
||||
|
|
|
|||
|
|
@ -21,6 +21,7 @@ def test_action_serialize():
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_action_deserialize():
|
||||
action = Action()
|
||||
serialized_data = action.model_dump()
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ def test_architect_serialize():
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_architect_deserialize():
|
||||
role = Architect()
|
||||
ser_role_dict = role.model_dump(by_alias=True)
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ from metagpt.actions.prepare_interview import PrepareInterview
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_action_deserialize():
|
||||
action = PrepareInterview()
|
||||
serialized_data = action.model_dump()
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ from metagpt.schema import Message
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_product_manager_deserialize():
|
||||
role = ProductManager()
|
||||
ser_role_dict = role.model_dump(by_alias=True)
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ def test_project_manager_serialize():
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_project_manager_deserialize():
|
||||
role = ProjectManager()
|
||||
ser_role_dict = role.model_dump(by_alias=True)
|
||||
|
|
|
|||
|
|
@ -69,6 +69,7 @@ def test_engineer_serialize():
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_engineer_deserialize():
|
||||
role = Engineer(use_code_review=True)
|
||||
ser_role_dict = role.model_dump()
|
||||
|
|
@ -96,6 +97,7 @@ def test_role_serdeser_save():
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_role_serdeser_interrupt():
|
||||
role_c = RoleC()
|
||||
shutil.rmtree(SERDESER_PATH.joinpath("team"), ignore_errors=True)
|
||||
|
|
|
|||
|
|
@ -109,6 +109,7 @@ async def test_team_recover_save():
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_team_recover_multi_roles_save():
|
||||
idea = "write a snake game"
|
||||
stg_path = SERDESER_PATH.joinpath("team")
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ def test_write_design_serialize():
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_write_code_deserialize():
|
||||
context = CodingContext(
|
||||
filename="test_code.py", design_doc=Document(content="write add function to calculate two numbers")
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ from metagpt.schema import CodingContext, Document
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_write_code_review_deserialize():
|
||||
code_content = """
|
||||
def div(a: int, b: int = 0):
|
||||
|
|
|
|||
|
|
@ -22,6 +22,7 @@ def test_write_task_serialize():
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_write_design_deserialize():
|
||||
action = WriteDesign()
|
||||
serialized_data = action.model_dump()
|
||||
|
|
@ -31,6 +32,7 @@ async def test_write_design_deserialize():
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_write_task_deserialize():
|
||||
action = WriteTasks()
|
||||
serialized_data = action.model_dump()
|
||||
|
|
|
|||
|
|
@ -29,6 +29,7 @@ class Person:
|
|||
],
|
||||
ids=["google", "numpy", "sphinx"],
|
||||
)
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_action_deserialize(style: str, part: str):
|
||||
action = WriteDocstring()
|
||||
serialized_data = action.model_dump()
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ def test_action_serialize():
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_action_deserialize():
|
||||
action = WritePRD()
|
||||
serialized_data = action.model_dump()
|
||||
|
|
|
|||
|
|
@ -42,6 +42,7 @@ CONTEXT = """
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_action_deserialize():
|
||||
action = WriteReview()
|
||||
serialized_data = action.model_dump()
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ from metagpt.actions.write_tutorial import WriteContent, WriteDirectory
|
|||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(("language", "topic"), [("English", "Write a tutorial about Python")])
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_write_directory_deserialize(language: str, topic: str):
|
||||
action = WriteDirectory()
|
||||
serialized_data = action.model_dump()
|
||||
|
|
@ -30,6 +31,7 @@ async def test_write_directory_deserialize(language: str, topic: str):
|
|||
("language", "topic", "directory"),
|
||||
[("English", "Write a tutorial about Python", {"Introduction": ["What is Python?", "Why learn Python?"]})],
|
||||
)
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_write_content_deserialize(language: str, topic: str, directory: Dict):
|
||||
action = WriteContent(language=language, directory=directory)
|
||||
serialized_data = action.model_dump()
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ from metagpt.tools.translator import Translator
|
|||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("llm_api")
|
||||
@pytest.mark.usefixtures("llm_mock")
|
||||
async def test_translate(llm_api):
|
||||
poetries = [
|
||||
("Let life be beautiful like summer flowers", "花"),
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue