feat: +unit test

This commit is contained in:
莘权 马 2023-12-28 21:19:38 +08:00
parent f76078dedf
commit a2d8d06664
10 changed files with 114 additions and 75 deletions

View file

@ -30,3 +30,13 @@ class Person:
async def test_write_docstring(style: str, part: str):
ret = await WriteDocstring().run(code, style=style)
assert part in ret
@pytest.mark.asyncio
async def test_write():
code = await WriteDocstring.write_docstring(__file__)
assert code
if __name__ == "__main__":
pytest.main([__file__, "-s"])

View file

@ -23,10 +23,14 @@ async def test_write_prd_review():
Timeline: The feature should be ready for testing in 1.5 months.
"""
write_prd_review = WritePRDReview("write_prd_review")
write_prd_review = WritePRDReview(name="write_prd_review")
prd_review = await write_prd_review.run(prd)
# We cannot exactly predict the generated PRD review, but we can check if it is a string and if it is not empty
assert isinstance(prd_review, str)
assert len(prd_review) > 0
if __name__ == "__main__":
pytest.main([__file__, "-s"])

View file

@ -6,53 +6,21 @@
@File : test_write_teaching_plan.py
"""
import asyncio
from typing import Optional
from langchain.llms.base import LLM
from pydantic import BaseModel
import pytest
from metagpt.actions.write_teaching_plan import WriteTeachingPlanPart
from metagpt.config import Config
from metagpt.schema import Message
class MockWriteTeachingPlanPart(WriteTeachingPlanPart):
def __init__(self, options, name: str = "", context=None, llm: LLM = None, topic="", language="Chinese"):
super().__init__(options, name, context, llm, topic, language)
async def _aask(self, prompt: str, system_msgs: Optional[list[str]] = None) -> str:
return f"{WriteTeachingPlanPart.DATA_BEGIN_TAG}\nprompt\n{WriteTeachingPlanPart.DATA_END_TAG}"
async def mock_write_teaching_plan_part():
class Inputs(BaseModel):
input: str
name: str
topic: str
language: str
inputs = [
{"input": "AABBCC", "name": "A", "topic": WriteTeachingPlanPart.COURSE_TITLE, "language": "C"},
{"input": "DDEEFFF", "name": "A1", "topic": "B1", "language": "C1"},
]
for i in inputs:
seed = Inputs(**i)
options = Config().runtime_options
act = MockWriteTeachingPlanPart(options=options, name=seed.name, topic=seed.topic, language=seed.language)
await act.run([Message(content="")])
assert act.topic == seed.topic
assert str(act) == seed.topic
assert act.name == seed.name
assert act.rsp == "# prompt" if seed.topic == WriteTeachingPlanPart.COURSE_TITLE else "prompt"
def test_suite():
loop = asyncio.get_event_loop()
task = loop.create_task(mock_write_teaching_plan_part())
loop.run_until_complete(task)
@pytest.mark.asyncio
@pytest.mark.parametrize(
("topic", "context"),
[("Title", "Lesson 1: Learn to draw an apple."), ("Teaching Content", "Lesson 1: Learn to draw an apple.")],
)
async def test_write_teaching_plan_part(topic, context):
action = WriteTeachingPlanPart(topic=topic, context=context)
rsp = await action.run()
assert rsp
if __name__ == "__main__":
test_suite()
pytest.main([__file__, "-s"])

View file

@ -7,35 +7,26 @@
@Desc : Unit tests.
"""
import base64
import pytest
from pydantic import BaseModel
from metagpt.config import CONFIG
from metagpt.learn.text_to_image import text_to_image
@pytest.mark.asyncio
async def test():
class Input(BaseModel):
input: str
size_type: str
# Prerequisites
assert CONFIG.METAGPT_TEXT_TO_IMAGE_MODEL_URL
assert CONFIG.OPENAI_API_KEY
inputs = [{"input": "Panda emoji", "size_type": "512x512"}]
for i in inputs:
seed = Input(**i)
base64_data = await text_to_image(seed.input)
assert base64_data != ""
print(f"{seed.input} -> {base64_data}")
flags = ";base64,"
assert flags in base64_data
ix = base64_data.find(flags) + len(flags)
declaration = base64_data[0:ix]
assert declaration
data = base64_data[ix:]
assert data
assert base64.b64decode(data, validate=True)
data = await text_to_image("Panda emoji", size_type="512x512")
assert "base64" in data or "http" in data
key = CONFIG.METAGPT_TEXT_TO_IMAGE_MODEL_URL
CONFIG.METAGPT_TEXT_TO_IMAGE_MODEL_URL = None
data = await text_to_image("Panda emoji", size_type="512x512")
assert "base64" in data or "http" in data
CONFIG.METAGPT_TEXT_TO_IMAGE_MODEL_URL = key
if __name__ == "__main__":

View file

@ -0,0 +1,20 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@Time : 2023/12/28
@Author : mashenquan
@File : test_azure_openai.py
"""
from metagpt.config import CONFIG, LLMProviderEnum
from metagpt.llm import LLM
def test_llm():
# Prerequisites
assert CONFIG.DEPLOYMENT_NAME and CONFIG.DEPLOYMENT_NAME != "YOUR_DEPLOYMENT_NAME"
assert CONFIG.OPENAI_API_KEY and CONFIG.OPENAI_API_KEY != "YOUR_AZURE_API_KEY"
assert CONFIG.OPENAI_API_VERSION
assert CONFIG.OPENAI_BASE_URL
llm = LLM(provider=LLMProviderEnum.AZURE_OPENAI)
assert llm

View file

@ -0,0 +1,14 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@Time : 2023/12/28
@Author : mashenquan
@File : test_metagpt_api.py
"""
from metagpt.config import LLMProviderEnum
from metagpt.llm import LLM
def test_llm():
llm = LLM(provider=LLMProviderEnum.METAGPT)
assert llm

View file

@ -0,0 +1,25 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@Time : 2023/12/28
@Author : mashenquan
@File : test_open_llm_api.py
"""
from metagpt.config import CONFIG, LLMProviderEnum
from metagpt.llm import LLM
from metagpt.provider.open_llm_api import OpenLLMCostManager
def test_llm():
llm = LLM(provider=LLMProviderEnum.OPEN_LLM)
assert llm
def test_cost():
# Prerequisites
CONFIG.max_budget = 10
cost = OpenLLMCostManager()
cost.update_cost(prompt_tokens=10, completion_tokens=1, model="gpt-35-turbo")
assert cost.get_total_prompt_tokens() > 0
assert cost.get_total_completion_tokens() > 0

View file

@ -45,9 +45,11 @@ async def test_s3():
@pytest.mark.asyncio
async def test_s3_no_error():
conn = S3()
key = conn.auth_config["aws_secret_access_key"]
conn.auth_config["aws_secret_access_key"] = ""
res = await conn.cache("ABC", ".bak", "script")
assert not res
conn.auth_config["aws_secret_access_key"] = key
if __name__ == "__main__":