merge main

This commit is contained in:
geekan 2024-01-08 15:19:38 +08:00
commit 193fc5535a
107 changed files with 2029 additions and 374 deletions

View file

@ -2,10 +2,17 @@ name: Python application test
on:
workflow_dispatch:
pull_request_target:
push:
branches:
- 'main'
- 'dev'
- '*-release'
jobs:
build:
runs-on: ubuntu-latest
environment: unittest
strategy:
matrix:
# python-version: ['3.9', '3.10', '3.11']
@ -13,6 +20,8 @@ jobs:
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
@ -22,6 +31,7 @@ jobs:
sh tests/scripts/run_install_deps.sh
- name: Test with pytest
run: |
export ALLOW_OPENAI_API_CALL=0
echo "${{ secrets.METAGPT_KEY_YAML }}" | base64 -d > config/key.yaml
pytest tests/ --doctest-modules --cov=./metagpt/ --cov-report=xml:cov.xml --cov-report=html:htmlcov --durations=20 | tee unittest.txt
- name: Show coverage report
@ -29,7 +39,12 @@ jobs:
coverage report -m
- name: Show failed tests and overall summary
run: |
grep -E "FAILED tests|[0-9]+ passed," unittest.txt
grep -E "FAILED tests|ERROR tests|[0-9]+ passed," unittest.txt
failed_count=$(grep "FAILED|ERROR" unittest.txt | wc -l)
if [[ "$failed_count" -gt 0 ]]; then
echo "$failed_count failed lines found! Task failed."
exit 1
fi
- name: Upload pytest test results
uses: actions/upload-artifact@v3
with:
@ -40,4 +55,3 @@ jobs:
./tests/data/rsp_cache_new.json
retention-days: 3
if: ${{ always() }}

3
.gitignore vendored
View file

@ -172,7 +172,8 @@ tests/metagpt/utils/file_repo_git
*.png
htmlcov
htmlcov.*
*.dot
*.pkl
*-structure.csv
*-structure.json
*.dot
*.dot

View file

@ -1,6 +1,6 @@
The MIT License
Copyright (c) 2023 Chenglin Wu
Copyright (c) 2024 Chenglin Wu
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

View file

@ -34,7 +34,10 @@ # MetaGPT: The Multi-Agent Framework
<p align="center">Software Company Multi-Role Schematic (Gradually Implementing)</p>
## News
- Dec 15: [v0.5.0](https://github.com/geekan/MetaGPT/releases/tag/v0.5.0) is released! We introduce **incremental development**, facilitating agents to build up larger projects on top of their previous efforts or existing codebase. We also launch a whole collection of important features, including **multilingual support** (experimental), multiple **programming languages support** (experimental), **incremental development** (experimental), CLI support, pip support, enhanced code review, documentation mechanism, and optimized messaging mechanism!
🚀 Jan 03: Here comes [v0.6.0](https://github.com/geekan/MetaGPT/releases/tag/v0.6.0)! In this version, we added serialization and deserialization of important objects and enabled breakpoint recovery. We upgraded OpenAI package to v1.6.0 and supported Gemini, ZhipuAI, Ollama, OpenLLM, etc. Moreover, we provided extremely simple examples where you need only 7 lines to implement a general election [debate](https://github.com/geekan/MetaGPT/blob/main/examples/debate_simple.py). Check out more details [here](https://github.com/geekan/MetaGPT/releases/tag/v0.6.0)!
🚀 Dec 15: [v0.5.0](https://github.com/geekan/MetaGPT/releases/tag/v0.5.0) is released! We introduced **incremental development**, facilitating agents to build up larger projects on top of their previous efforts or existing codebase. We also launched a whole collection of important features, including **multilingual support** (experimental), multiple **programming languages support** (experimental), **incremental development** (experimental), CLI support, pip support, enhanced code review, documentation mechanism, and optimized messaging mechanism!
## Install

View file

@ -14,8 +14,8 @@ OPENAI_BASE_URL: "https://api.openai.com/v1"
OPENAI_API_MODEL: "gpt-4-1106-preview"
MAX_TOKENS: 4096
RPM: 10
LLM_TYPE: OpenAI # Except for these three major models OpenAI, MetaGPT LLM, and Azure other large models can be distinguished based on the validity of the key.
TIMEOUT: 60 # Timeout for llm invocation
#DEFAULT_PROVIDER: openai
#### if Spark
#SPARK_APPID : "YOUR_APPID"

View file

@ -1,9 +1,9 @@
role:
name: Teacher # Referenced the `Teacher` in `metagpt/roles/teacher.py`.
module: metagpt.roles.teacher # Referenced `metagpt/roles/teacher.py`.
skills: # Refer to the skill `name` of the published skill in `.well-known/skills.yaml`.
- name: text_to_speech
description: Text-to-speech
- name: text_to_image
description: Create a drawing based on the text.
role:
name: Teacher # Referenced the `Teacher` in `metagpt/roles/teacher.py`.
module: metagpt.roles.teacher # Referenced `metagpt/roles/teacher.py`.
skills: # Refer to the skill `name` of the published skill in `docs/.well-known/skills.yaml`.
- name: text_to_speech
description: Text-to-speech
- name: text_to_image
description: Create a drawing based on the text.

View file

@ -11,7 +11,7 @@ paths:
post:
summary: Generate greeting
description: Generates a greeting message.
operationId: hello.post_greeting
operationId: openapi_v3_hello.post_greeting
responses:
200:
description: greeting response

View file

@ -88,6 +88,8 @@ class InvoiceOCR(Action):
async def _ocr(invoice_file_path: Path):
ocr = PaddleOCR(use_angle_cls=True, lang="ch", page_num=1)
ocr_result = ocr.ocr(str(invoice_file_path), cls=True)
for result in ocr_result[0]:
result[1] = (result[1][0], round(result[1][1], 2)) # round long confidence scores to reduce token costs
return ocr_result
async def run(self, file_path: Path, *args, **kwargs) -> list:

View file

@ -9,59 +9,209 @@
import re
from pathlib import Path
import aiofiles
from metagpt.actions import Action
from metagpt.const import CLASS_VIEW_FILE_REPO, GRAPH_REPO_FILE_REPO
from metagpt.config import CONFIG
from metagpt.const import (
AGGREGATION,
COMPOSITION,
DATA_API_DESIGN_FILE_REPO,
GENERALIZATION,
GRAPH_REPO_FILE_REPO,
)
from metagpt.logs import logger
from metagpt.repo_parser import RepoParser
from metagpt.schema import ClassAttribute, ClassMethod, ClassView
from metagpt.utils.common import split_namespace
from metagpt.utils.di_graph_repository import DiGraphRepository
from metagpt.utils.graph_repository import GraphKeyword, GraphRepository
class RebuildClassView(Action):
def __init__(self, name="", context=None, llm=None):
super().__init__(name=name, context=context, llm=llm)
async def run(self, with_messages=None):
graph_repo_pathname = self.git_repo.workdir / GRAPH_REPO_FILE_REPO / self.git_repo.workdir.name
async def run(self, with_messages=None, format=CONFIG.prompt_schema):
graph_repo_pathname = CONFIG.git_repo.workdir / GRAPH_REPO_FILE_REPO / CONFIG.git_repo.workdir.name
graph_db = await DiGraphRepository.load_from(str(graph_repo_pathname.with_suffix(".json")))
repo_parser = RepoParser(base_directory=self.context)
class_views = await repo_parser.rebuild_class_views(path=Path(self.context)) # use pylint
repo_parser = RepoParser(base_directory=Path(self.context))
# use pylint
class_views, relationship_views, package_root = await repo_parser.rebuild_class_views(path=Path(self.context))
await GraphRepository.update_graph_db_with_class_views(graph_db, class_views)
symbols = repo_parser.generate_symbols() # use ast
await GraphRepository.update_graph_db_with_class_relationship_views(graph_db, relationship_views)
# use ast
direction, diff_path = self._diff_path(path_root=Path(self.context).resolve(), package_root=package_root)
symbols = repo_parser.generate_symbols()
for file_info in symbols:
# Align to the same root directory in accordance with `class_views`.
file_info.file = self._align_root(file_info.file, direction, diff_path)
await GraphRepository.update_graph_db_with_file_info(graph_db, file_info)
await self._create_mermaid_class_view(graph_db=graph_db)
await self._save(graph_db=graph_db)
await self._create_mermaid_class_views(graph_db=graph_db)
await graph_db.save()
async def _create_mermaid_class_view(self, graph_db):
pass
# dataset = await graph_db.select(subject=concat_namespace(filename, class_name), predicate=GraphKeyword.HAS_PAGE_INFO)
# if not dataset:
# logger.warning(f"No page info for {concat_namespace(filename, class_name)}")
# return
# code_block_info = CodeBlockInfo.parse_raw(dataset[0].object_)
# src_code = await read_file_block(filename=Path(self.context) / filename, lineno=code_block_info.lineno, end_lineno=code_block_info.end_lineno)
# code_type = ""
# dataset = await graph_db.select(subject=filename, predicate=GraphKeyword.IS)
# for spo in dataset:
# if spo.object_ in ["javascript", "python"]:
# code_type = spo.object_
# break
async def _create_mermaid_class_views(self, graph_db):
path = Path(CONFIG.git_repo.workdir) / DATA_API_DESIGN_FILE_REPO
path.mkdir(parents=True, exist_ok=True)
pathname = path / CONFIG.git_repo.workdir.name
async with aiofiles.open(str(pathname.with_suffix(".mmd")), mode="w", encoding="utf-8") as writer:
content = "classDiagram\n"
logger.debug(content)
await writer.write(content)
# class names
rows = await graph_db.select(predicate=GraphKeyword.IS, object_=GraphKeyword.CLASS)
class_distinct = set()
relationship_distinct = set()
for r in rows:
await RebuildClassView._create_mermaid_class(r.subject, graph_db, writer, class_distinct)
for r in rows:
await RebuildClassView._create_mermaid_relationship(r.subject, graph_db, writer, relationship_distinct)
# try:
# node = await REBUILD_CLASS_VIEW_NODE.fill(context=f"```{code_type}\n{src_code}\n```", llm=self.llm, to=format)
# class_view = node.instruct_content.model_dump()["Class View"]
# except Exception as e:
# class_view = RepoParser.rebuild_class_view(src_code, code_type)
# await graph_db.insert(subject=concat_namespace(filename, class_name), predicate=GraphKeyword.HAS_CLASS_VIEW, object_=class_view)
# logger.info(f"{concat_namespace(filename, class_name)} {GraphKeyword.HAS_CLASS_VIEW} {class_view}")
@staticmethod
async def _create_mermaid_class(ns_class_name, graph_db, file_writer, distinct):
fields = split_namespace(ns_class_name)
if len(fields) > 2:
# Ignore sub-class
return
async def _save(self, graph_db):
class_view_file_repo = self.git_repo.new_file_repository(relative_path=CLASS_VIEW_FILE_REPO)
dataset = await graph_db.select(predicate=GraphKeyword.HAS_CLASS_VIEW)
all_class_view = []
for spo in dataset:
title = f"---\ntitle: {spo.subject}\n---\n"
filename = re.sub(r"[/:]", "_", spo.subject) + ".mmd"
await class_view_file_repo.save(filename=filename, content=title + spo.object_)
all_class_view.append(spo.object_)
await class_view_file_repo.save(filename="all.mmd", content="\n".join(all_class_view))
class_view = ClassView(name=fields[1])
rows = await graph_db.select(subject=ns_class_name)
for r in rows:
name = split_namespace(r.object_)[-1]
name, visibility, abstraction = RebuildClassView._parse_name(name=name, language="python")
if r.predicate == GraphKeyword.HAS_CLASS_PROPERTY:
var_type = await RebuildClassView._parse_variable_type(r.object_, graph_db)
attribute = ClassAttribute(
name=name, visibility=visibility, abstraction=bool(abstraction), value_type=var_type
)
class_view.attributes.append(attribute)
elif r.predicate == GraphKeyword.HAS_CLASS_FUNCTION:
method = ClassMethod(name=name, visibility=visibility, abstraction=bool(abstraction))
await RebuildClassView._parse_function_args(method, r.object_, graph_db)
class_view.methods.append(method)
# update graph db
await graph_db.insert(ns_class_name, GraphKeyword.HAS_CLASS_VIEW, class_view.model_dump_json())
content = class_view.get_mermaid(align=1)
logger.debug(content)
await file_writer.write(content)
distinct.add(ns_class_name)
@staticmethod
async def _create_mermaid_relationship(ns_class_name, graph_db, file_writer, distinct):
s_fields = split_namespace(ns_class_name)
if len(s_fields) > 2:
# Ignore sub-class
return
predicates = {GraphKeyword.IS + v + GraphKeyword.OF: v for v in [GENERALIZATION, COMPOSITION, AGGREGATION]}
mappings = {
GENERALIZATION: " <|-- ",
COMPOSITION: " *-- ",
AGGREGATION: " o-- ",
}
content = ""
for p, v in predicates.items():
rows = await graph_db.select(subject=ns_class_name, predicate=p)
for r in rows:
o_fields = split_namespace(r.object_)
if len(o_fields) > 2:
# Ignore sub-class
continue
relationship = mappings.get(v, " .. ")
link = f"{o_fields[1]}{relationship}{s_fields[1]}"
distinct.add(link)
content += f"\t{link}\n"
if content:
logger.debug(content)
await file_writer.write(content)
@staticmethod
def _parse_name(name: str, language="python"):
pattern = re.compile(r"<I>(.*?)<\/I>")
result = re.search(pattern, name)
abstraction = ""
if result:
name = result.group(1)
abstraction = "*"
if name.startswith("__"):
visibility = "-"
elif name.startswith("_"):
visibility = "#"
else:
visibility = "+"
return name, visibility, abstraction
@staticmethod
async def _parse_variable_type(ns_name, graph_db) -> str:
rows = await graph_db.select(subject=ns_name, predicate=GraphKeyword.HAS_TYPE_DESC)
if not rows:
return ""
vals = rows[0].object_.replace("'", "").split(":")
if len(vals) == 1:
return ""
val = vals[-1].strip()
return "" if val == "NoneType" else val + " "
@staticmethod
async def _parse_function_args(method: ClassMethod, ns_name: str, graph_db: GraphRepository):
rows = await graph_db.select(subject=ns_name, predicate=GraphKeyword.HAS_ARGS_DESC)
if not rows:
return
info = rows[0].object_.replace("'", "")
fs_tag = "("
ix = info.find(fs_tag)
fe_tag = "):"
eix = info.rfind(fe_tag)
if eix < 0:
fe_tag = ")"
eix = info.rfind(fe_tag)
args_info = info[ix + len(fs_tag) : eix].strip()
method.return_type = info[eix + len(fe_tag) :].strip()
if method.return_type == "None":
method.return_type = ""
if "(" in method.return_type:
method.return_type = method.return_type.replace("(", "Tuple[").replace(")", "]")
# parse args
if not args_info:
return
splitter_ixs = []
cost = 0
for i in range(len(args_info)):
if args_info[i] == "[":
cost += 1
elif args_info[i] == "]":
cost -= 1
if args_info[i] == "," and cost == 0:
splitter_ixs.append(i)
splitter_ixs.append(len(args_info))
args = []
ix = 0
for eix in splitter_ixs:
args.append(args_info[ix:eix])
ix = eix + 1
for arg in args:
parts = arg.strip().split(":")
if len(parts) == 1:
method.args.append(ClassAttribute(name=parts[0].strip()))
continue
method.args.append(ClassAttribute(name=parts[0].strip(), value_type=parts[-1].strip()))
@staticmethod
def _diff_path(path_root: Path, package_root: Path) -> (str, str):
if len(str(path_root)) > len(str(package_root)):
return "+", str(path_root.relative_to(package_root))
if len(str(path_root)) < len(str(package_root)):
return "-", str(package_root.relative_to(path_root))
return "=", "."
@staticmethod
def _align_root(path: str, direction: str, diff_path: str):
if direction == "=":
return path
if direction == "+":
return diff_path + "/" + path
else:
return path[len(diff_path) + 1 :]

View file

@ -1,33 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@Time : 2023/12/19
@Author : mashenquan
@File : rebuild_class_view_an.py
@Desc : Defines `ActionNode` objects used by rebuild_class_view.py
"""
from metagpt.actions.action_node import ActionNode
CLASS_SOURCE_CODE_BLOCK = ActionNode(
key="Class View",
expected_type=str,
instruction='Generate the mermaid class diagram corresponding to source code in "context."',
example="""
classDiagram
class A {
-int x
+int y
-int speed
-int direction
+__init__(x: int, y: int, speed: int, direction: int)
+change_direction(new_direction: int) None
+move() None
}
""",
)
REBUILD_CLASS_VIEW_NODES = [
CLASS_SOURCE_CODE_BLOCK,
]
REBUILD_CLASS_VIEW_NODE = ActionNode.from_children("RebuildClassView", REBUILD_CLASS_VIEW_NODES)

View file

@ -0,0 +1,60 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@Time : 2024/1/4
@Author : mashenquan
@File : rebuild_sequence_view.py
@Desc : Rebuild sequence view info
"""
from __future__ import annotations
from pathlib import Path
from typing import List
from metagpt.actions import Action
from metagpt.config import CONFIG
from metagpt.const import GRAPH_REPO_FILE_REPO
from metagpt.logs import logger
from metagpt.utils.common import aread, list_files
from metagpt.utils.di_graph_repository import DiGraphRepository
from metagpt.utils.graph_repository import GraphKeyword
class RebuildSequenceView(Action):
async def run(self, with_messages=None, format=CONFIG.prompt_schema):
graph_repo_pathname = CONFIG.git_repo.workdir / GRAPH_REPO_FILE_REPO / CONFIG.git_repo.workdir.name
graph_db = await DiGraphRepository.load_from(str(graph_repo_pathname.with_suffix(".json")))
entries = await RebuildSequenceView._search_main_entry(graph_db)
for entry in entries:
await self._rebuild_sequence_view(entry, graph_db)
await graph_db.save()
@staticmethod
async def _search_main_entry(graph_db) -> List:
rows = await graph_db.select(predicate=GraphKeyword.HAS_PAGE_INFO)
tag = "__name__:__main__"
entries = []
for r in rows:
if tag in r.subject or tag in r.object_:
entries.append(r)
return entries
async def _rebuild_sequence_view(self, entry, graph_db):
filename = entry.subject.split(":", 1)[0]
src_filename = RebuildSequenceView._get_full_filename(root=self.context, pathname=filename)
content = await aread(filename=src_filename, encoding="utf-8")
content = f"```python\n{content}\n```\n\n---\nTranslate the code above into Mermaid Sequence Diagram."
data = await self.llm.aask(
msg=content, system_msgs=["You are a python code to Mermaid Sequence Diagram translator in function detail"]
)
await graph_db.insert(subject=filename, predicate=GraphKeyword.HAS_SEQUENCE_VIEW, object_=data)
logger.info(data)
@staticmethod
def _get_full_filename(root: str | Path, pathname: str | Path) -> Path | None:
files = list_files(root=root)
postfix = "/" + str(pathname)
for i in files:
if str(i).endswith(postfix):
return i
return None

View file

@ -0,0 +1,16 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@Time : 2024/1/4
@Author : mashenquan
@File : rebuild_sequence_view_an.py
"""
from metagpt.actions.action_node import ActionNode
from metagpt.utils.mermaid import MMC2
CODE_2_MERMAID_SEQUENCE_DIAGRAM = ActionNode(
key="Program call flow",
expected_type=str,
instruction='Translate the "context" content into "format example" format.',
example=MMC2,
)

View file

@ -133,7 +133,7 @@ class WriteCode(Action):
if not coding_context.code_doc:
# avoid root_path pydantic ValidationError if use WriteCode alone
root_path = self.g_context.src_workspace if self.g_context.src_workspace else ""
coding_context.code_doc = Document(filename=coding_context.filename, root_path=root_path)
coding_context.code_doc = Document(filename=coding_context.filename, root_path=str(root_path))
coding_context.code_doc.content = code
return coding_context

View file

@ -59,11 +59,12 @@ class WriteTest(Action):
self.context.test_doc = Document(
filename="test_" + self.context.code_doc.filename, root_path=TEST_CODES_FILE_REPO
)
fake_root = "/data"
prompt = PROMPT_TEMPLATE.format(
code_to_test=self.context.code_doc.content,
test_file_name=self.context.test_doc.filename,
source_file_path=self.context.code_doc.root_relative_path,
workspace=self.git_repo.workdir,
source_file_path=fake_root + "/" + self.context.code_doc.root_relative_path,
workspace=fake_root,
)
self.context.test_doc.content = await self.write_code(prompt)
return self.context

View file

@ -93,6 +93,11 @@ class Config(metaclass=Singleton):
if v:
provider = k
break
if provider is None:
if self.DEFAULT_PROVIDER:
provider = LLMType(self.DEFAULT_PROVIDER)
else:
raise NotConfiguredException("You should config a LLM configuration first")
if provider is LLMType.GEMINI and not require_python_version(req_version=(3, 10)):
warnings.warn("Use Gemini requires Python >= 3.10")
@ -102,7 +107,6 @@ class Config(metaclass=Singleton):
if provider:
logger.info(f"API: {provider}")
return provider
raise NotConfiguredException("You should config a LLM configuration first")
def get_model_name(self, provider=None) -> str:
provider = provider or self.get_default_llm_provider_enum()

View file

@ -25,6 +25,9 @@ class LLMType(Enum):
AZURE = "azure"
OLLAMA = "ollama"
def __missing__(self, key):
return self.OPENAI
class LLMConfig(YamlModel):
"""Config for LLM

View file

@ -126,3 +126,8 @@ LLM_API_TIMEOUT = 300
# Message id
IGNORED_MESSAGE_ID = "0"
# Class Relationship
GENERALIZATION = "Generalize"
COMPOSITION = "Composite"
AGGREGATION = "Aggregate"

View file

@ -67,7 +67,7 @@ class SkillsDeclaration(BaseModel):
@staticmethod
async def load(skill_yaml_file_name: Path = None) -> "SkillsDeclaration":
if not skill_yaml_file_name:
skill_yaml_file_name = Path(__file__).parent.parent.parent / ".well-known/skills.yaml"
skill_yaml_file_name = Path(__file__).parent.parent.parent / "docs/.well-known/skills.yaml"
async with aiofiles.open(str(skill_yaml_file_name), mode="r") as reader:
data = await reader.read(-1)
skill_data = yaml.safe_load(data)

View file

@ -120,6 +120,7 @@ class GeminiLLM(BaseLLM):
content = chunk.text
log_llm_stream(content)
collected_content.append(content)
log_llm_stream("\n")
full_content = "".join(collected_content)
usage = await self.aget_usage(messages, full_content)

View file

@ -104,6 +104,7 @@ class OllamaLLM(BaseLLM):
else:
# stream finished
usage = self.get_usage(chunk)
log_llm_stream("\n")
self._update_costs(usage)
full_content = "".join(collected_content)

View file

@ -135,6 +135,7 @@ class OpenAILLM(BaseLLM):
async for i in resp:
log_llm_stream(i)
collected_messages.append(i)
log_llm_stream("\n")
full_reply_content = "".join(collected_messages)
usage = self._calc_usage(messages, full_reply_content)

View file

@ -118,6 +118,7 @@ class ZhiPuAILLM(BaseLLM):
usage = meta.get("usage")
else:
print(f"zhipuapi else event: {event.data}", end="")
log_llm_stream("\n")
self._update_costs(usage)
full_content = "".join(collected_content)

View file

@ -12,14 +12,14 @@ import json
import re
import subprocess
from pathlib import Path
from typing import Dict, List, Optional, Tuple
from typing import Dict, List, Optional
import aiofiles
import pandas as pd
from pydantic import BaseModel, Field
from metagpt.const import AGGREGATION, COMPOSITION, GENERALIZATION
from metagpt.logs import logger
from metagpt.utils.common import any_to_str
from metagpt.utils.common import any_to_str, aread
from metagpt.utils.exceptions import handle_exception
@ -46,6 +46,13 @@ class ClassInfo(BaseModel):
methods: Dict[str, str] = Field(default_factory=dict)
class ClassRelationship(BaseModel):
src: str = ""
dest: str = ""
relationship: str = ""
label: Optional[str] = None
class RepoParser(BaseModel):
base_directory: Path = Field(default=None)
@ -60,7 +67,8 @@ class RepoParser(BaseModel):
file_info = RepoFileInfo(file=str(file_path.relative_to(self.base_directory)))
for node in tree:
info = RepoParser.node_to_str(node)
file_info.page_info.append(info)
if info:
file_info.page_info.append(info)
if isinstance(node, ast.ClassDef):
class_methods = [m.name for m in node.body if is_func(m)]
file_info.classes.append({"name": node.name, "methods": class_methods})
@ -110,7 +118,9 @@ class RepoParser(BaseModel):
return output_path
@staticmethod
def node_to_str(node) -> (int, int, str, str | Tuple):
def node_to_str(node) -> CodeBlockInfo | None:
if isinstance(node, ast.Try):
return None
if any_to_str(node) == any_to_str(ast.Expr):
return CodeBlockInfo(
lineno=node.lineno,
@ -129,6 +139,7 @@ class RepoParser(BaseModel):
},
any_to_str(ast.If): RepoParser._parse_if,
any_to_str(ast.AsyncFunctionDef): lambda x: x.name,
any_to_str(ast.AnnAssign): lambda x: RepoParser._parse_variable(x.target),
}
func = mappings.get(any_to_str(node))
if func:
@ -143,7 +154,8 @@ class RepoParser(BaseModel):
else:
raise NotImplementedError(f"Not implement:{val}")
return code_block
raise NotImplementedError(f"Not implement code block:{node.lineno}, {node.end_lineno}, {any_to_str(node)}")
logger.warning(f"Unsupported code block:{node.lineno}, {node.end_lineno}, {any_to_str(node)}")
return None
@staticmethod
def _parse_expr(node) -> List:
@ -164,22 +176,51 @@ class RepoParser(BaseModel):
@staticmethod
def _parse_if(n):
tokens = [RepoParser._parse_variable(n.test.left)]
for item in n.test.comparators:
tokens.append(RepoParser._parse_variable(item))
tokens = []
try:
if isinstance(n.test, ast.BoolOp):
tokens = []
for v in n.test.values:
tokens.extend(RepoParser._parse_if_compare(v))
return tokens
if isinstance(n.test, ast.Compare):
v = RepoParser._parse_variable(n.test.left)
if v:
tokens.append(v)
for item in n.test.comparators:
v = RepoParser._parse_variable(item)
if v:
tokens.append(v)
return tokens
except Exception as e:
logger.warning(f"Unsupported if: {n}, err:{e}")
return tokens
@staticmethod
def _parse_if_compare(n):
if hasattr(n, "left"):
return RepoParser._parse_variable(n.left)
else:
return []
@staticmethod
def _parse_variable(node):
funcs = {
any_to_str(ast.Constant): lambda x: x.value,
any_to_str(ast.Name): lambda x: x.id,
any_to_str(ast.Attribute): lambda x: f"{x.value.id}.{x.attr}",
}
func = funcs.get(any_to_str(node))
if not func:
raise NotImplementedError(f"Not implement:{node}")
return func(node)
try:
funcs = {
any_to_str(ast.Constant): lambda x: x.value,
any_to_str(ast.Name): lambda x: x.id,
any_to_str(ast.Attribute): lambda x: f"{x.value.id}.{x.attr}"
if hasattr(x.value, "id")
else f"{x.attr}",
any_to_str(ast.Call): lambda x: RepoParser._parse_variable(x.func),
any_to_str(ast.Tuple): lambda x: "",
}
func = funcs.get(any_to_str(node))
if not func:
raise NotImplementedError(f"Not implement:{node}")
return func(node)
except Exception as e:
logger.warning(f"Unsupported variable:{node}, err:{e}")
@staticmethod
def _parse_assign(node):
@ -197,18 +238,21 @@ class RepoParser(BaseModel):
raise ValueError(f"{result}")
class_view_pathname = path / "classes.dot"
class_views = await self._parse_classes(class_view_pathname)
relationship_views = await self._parse_class_relationships(class_view_pathname)
packages_pathname = path / "packages.dot"
class_views = RepoParser._repair_namespaces(class_views=class_views, path=path)
class_views, relationship_views, package_root = RepoParser._repair_namespaces(
class_views=class_views, relationship_views=relationship_views, path=path
)
class_view_pathname.unlink(missing_ok=True)
packages_pathname.unlink(missing_ok=True)
return class_views
return class_views, relationship_views, package_root
async def _parse_classes(self, class_view_pathname):
class_views = []
if not class_view_pathname.exists():
return class_views
async with aiofiles.open(str(class_view_pathname), mode="r") as reader:
lines = await reader.readlines()
data = await aread(filename=class_view_pathname, encoding="utf-8")
lines = data.split("\n")
for line in lines:
package_name, info = RepoParser._split_class_line(line)
if not package_name:
@ -229,6 +273,19 @@ class RepoParser(BaseModel):
class_views.append(class_info)
return class_views
async def _parse_class_relationships(self, class_view_pathname) -> List[ClassRelationship]:
relationship_views = []
if not class_view_pathname.exists():
return relationship_views
data = await aread(filename=class_view_pathname, encoding="utf-8")
lines = data.split("\n")
for line in lines:
relationship = RepoParser._split_relationship_line(line)
if not relationship:
continue
relationship_views.append(relationship)
return relationship_views
@staticmethod
def _split_class_line(line):
part_splitor = '" ['
@ -247,6 +304,40 @@ class RepoParser(BaseModel):
info = re.sub(r"<br[^>]*>", "\n", info)
return class_name, info
@staticmethod
def _split_relationship_line(line):
splitters = [" -> ", " [", "];"]
idxs = []
for tag in splitters:
if tag not in line:
return None
idxs.append(line.find(tag))
ret = ClassRelationship()
ret.src = line[0 : idxs[0]].strip('"')
ret.dest = line[idxs[0] + len(splitters[0]) : idxs[1]].strip('"')
properties = line[idxs[1] + len(splitters[1]) : idxs[2]].strip(" ")
mappings = {
'arrowhead="empty"': GENERALIZATION,
'arrowhead="diamond"': COMPOSITION,
'arrowhead="odiamond"': AGGREGATION,
}
for k, v in mappings.items():
if k in properties:
ret.relationship = v
if v != GENERALIZATION:
ret.label = RepoParser._get_label(properties)
break
return ret
@staticmethod
def _get_label(line):
tag = 'label="'
if tag not in line:
return ""
ix = line.find(tag)
eix = line.find('"', ix + len(tag))
return line[ix + len(tag) : eix]
@staticmethod
def _create_path_mapping(path: str | Path) -> Dict[str, str]:
mappings = {
@ -271,9 +362,11 @@ class RepoParser(BaseModel):
return mappings
@staticmethod
def _repair_namespaces(class_views: List[ClassInfo], path: str | Path) -> List[ClassInfo]:
def _repair_namespaces(
class_views: List[ClassInfo], relationship_views: List[ClassRelationship], path: str | Path
) -> (List[ClassInfo], List[ClassRelationship], str):
if not class_views:
return []
return [], [], ""
c = class_views[0]
full_key = str(path).lstrip("/").replace("/", ".")
root_namespace = RepoParser._find_root(full_key, c.package)
@ -290,7 +383,12 @@ class RepoParser(BaseModel):
for c in class_views:
c.package = RepoParser._repair_ns(c.package, new_mappings)
return class_views
for i in range(len(relationship_views)):
v = relationship_views[i]
v.src = RepoParser._repair_ns(v.src, new_mappings)
v.dest = RepoParser._repair_ns(v.dest, new_mappings)
relationship_views[i] = v
return class_views, relationship_views, root_path
@staticmethod
def _repair_ns(package, mappings):

View file

@ -466,7 +466,7 @@ class Role(SerializationMixin, is_polymorphic_base=True):
Use llm to select actions in _think dynamically
"""
actions_taken = 0
rsp = Message(content="No actions taken yet") # will be overwritten after Role _act
rsp = Message(content="No actions taken yet", cause_by=Action) # will be overwritten after Role _act
while actions_taken < self.rc.max_react_loop:
# think
await self._think()

View file

@ -444,3 +444,63 @@ class CodeSummarizeContext(BaseModel):
class BugFixContext(BaseContext):
filename: str = ""
# mermaid class view
class ClassMeta(BaseModel):
name: str = ""
abstraction: bool = False
static: bool = False
visibility: str = ""
class ClassAttribute(ClassMeta):
value_type: str = ""
default_value: str = ""
def get_mermaid(self, align=1) -> str:
content = "".join(["\t" for i in range(align)]) + self.visibility
if self.value_type:
content += self.value_type + " "
content += self.name
if self.default_value:
content += "="
if self.value_type not in ["str", "string", "String"]:
content += self.default_value
else:
content += '"' + self.default_value.replace('"', "") + '"'
if self.abstraction:
content += "*"
if self.static:
content += "$"
return content
class ClassMethod(ClassMeta):
args: List[ClassAttribute] = Field(default_factory=list)
return_type: str = ""
def get_mermaid(self, align=1) -> str:
content = "".join(["\t" for i in range(align)]) + self.visibility
content += self.name + "(" + ",".join([v.get_mermaid(align=0) for v in self.args]) + ")"
if self.return_type:
content += ":" + self.return_type
if self.abstraction:
content += "*"
if self.static:
content += "$"
return content
class ClassView(ClassMeta):
attributes: List[ClassAttribute] = Field(default_factory=list)
methods: List[ClassMethod] = Field(default_factory=list)
def get_mermaid(self, align=1) -> str:
content = "".join(["\t" for i in range(align)]) + "class " + self.name + "{\n"
for v in self.attributes:
content += v.get_mermaid(align=align + 1) + "\n"
for v in self.methods:
content += v.get_mermaid(align=align + 1) + "\n"
content += "".join(["\t" for i in range(align)]) + "}\n"
return content

View file

@ -5,6 +5,12 @@
@Author : mashenquan
@File : metagpt_oas3_api_svc.py
@Desc : MetaGPT OpenAPI Specification 3.0 REST API service
curl -X 'POST' \
'http://localhost:8080/openapi/greeting/dave' \
-H 'accept: text/plain' \
-H 'Content-Type: application/json' \
-d '{}'
"""
from pathlib import Path
@ -15,7 +21,7 @@ import connexion
def oas_http_svc():
"""Start the OAS 3.0 OpenAPI HTTP service"""
print("http://localhost:8080/oas3/ui/")
specification_dir = Path(__file__).parent.parent.parent / ".well-known"
specification_dir = Path(__file__).parent.parent.parent / "docs/.well-known"
app = connexion.AsyncApp(__name__, specification_dir=str(specification_dir))
app.add_api("metagpt_oas3_api.yaml")
app.add_api("openapi.yaml")

View file

@ -23,7 +23,7 @@ async def post_greeting(name: str) -> str:
if __name__ == "__main__":
specification_dir = Path(__file__).parent.parent.parent / ".well-known"
specification_dir = Path(__file__).parent.parent.parent / "docs/.well-known"
app = connexion.AsyncApp(__name__, specification_dir=str(specification_dir))
app.add_api("openapi.yaml", arguments={"title": "Hello World Example"})
app.run(port=8082)

View file

@ -407,6 +407,10 @@ def concat_namespace(*args) -> str:
return ":".join(str(value) for value in args)
def split_namespace(ns_class_name: str) -> List[str]:
return ns_class_name.split(":")
def general_after_log(i: "loguru.Logger", sec_format: str = "%0.3f") -> typing.Callable[["RetryCallState"], None]:
"""
Generates a logging function to be used after a call is retried.
@ -546,3 +550,20 @@ async def read_file_block(filename: str | Path, lineno: int, end_lineno: int):
break
lines.append(line)
return "".join(lines)
def list_files(root: str | Path) -> List[Path]:
files = []
try:
directory_path = Path(root)
if not directory_path.exists():
return []
for file_path in directory_path.iterdir():
if file_path.is_file():
files.append(file_path)
else:
subfolder_files = list_files(root=file_path)
files.extend(subfolder_files)
except Exception as e:
logger.error(f"Error: {e}")
return files

View file

@ -12,9 +12,9 @@ import json
from pathlib import Path
from typing import List
import aiofiles
import networkx
from metagpt.utils.common import aread, awrite
from metagpt.utils.graph_repository import SPO, GraphRepository
@ -55,12 +55,10 @@ class DiGraphRepository(GraphRepository):
if not path.exists():
path.mkdir(parents=True, exist_ok=True)
pathname = Path(path) / self.name
async with aiofiles.open(str(pathname.with_suffix(".json")), mode="w", encoding="utf-8") as writer:
await writer.write(data)
await awrite(filename=pathname.with_suffix(".json"), data=data, encoding="utf-8")
async def load(self, pathname: str | Path):
async with aiofiles.open(str(pathname), mode="r", encoding="utf-8") as reader:
data = await reader.read(-1)
data = await aread(filename=pathname, encoding="utf-8")
m = json.loads(data)
self._repo = networkx.node_link_graph(m)

View file

@ -137,6 +137,8 @@ class FileRepository:
files = self._git_repo.changed_files
relative_files = {}
for p, ct in files.items():
if ct.value == "D": # deleted
continue
try:
rf = Path(p).relative_to(self._relative_path)
except ValueError:

View file

@ -13,19 +13,25 @@ from typing import List
from pydantic import BaseModel
from metagpt.repo_parser import ClassInfo, RepoFileInfo
from metagpt.logs import logger
from metagpt.repo_parser import ClassInfo, ClassRelationship, RepoFileInfo
from metagpt.utils.common import concat_namespace
class GraphKeyword:
IS = "is"
OF = "Of"
ON = "On"
CLASS = "class"
FUNCTION = "function"
HAS_FUNCTION = "has_function"
SOURCE_CODE = "source_code"
NULL = "<null>"
GLOBAL_VARIABLE = "global_variable"
CLASS_FUNCTION = "class_function"
CLASS_PROPERTY = "class_property"
HAS_CLASS_FUNCTION = "has_class_function"
HAS_CLASS_PROPERTY = "has_class_property"
HAS_CLASS = "has_class"
HAS_PAGE_INFO = "has_page_info"
HAS_CLASS_VIEW = "has_class_view"
@ -73,11 +79,13 @@ class GraphRepository(ABC):
await graph_db.insert(subject=file_info.file, predicate=GraphKeyword.IS, object_=file_type)
for c in file_info.classes:
class_name = c.get("name", "")
# file -> class
await graph_db.insert(
subject=file_info.file,
predicate=GraphKeyword.HAS_CLASS,
object_=concat_namespace(file_info.file, class_name),
)
# class detail
await graph_db.insert(
subject=concat_namespace(file_info.file, class_name),
predicate=GraphKeyword.IS,
@ -85,12 +93,22 @@ class GraphRepository(ABC):
)
methods = c.get("methods", [])
for fn in methods:
await graph_db.insert(
subject=concat_namespace(file_info.file, class_name),
predicate=GraphKeyword.HAS_CLASS_FUNCTION,
object_=concat_namespace(file_info.file, class_name, fn),
)
await graph_db.insert(
subject=concat_namespace(file_info.file, class_name, fn),
predicate=GraphKeyword.IS,
object_=GraphKeyword.CLASS_FUNCTION,
)
for f in file_info.functions:
# file -> function
await graph_db.insert(
subject=file_info.file, predicate=GraphKeyword.HAS_FUNCTION, object_=concat_namespace(file_info.file, f)
)
# function detail
await graph_db.insert(
subject=concat_namespace(file_info.file, f), predicate=GraphKeyword.IS, object_=GraphKeyword.FUNCTION
)
@ -105,30 +123,37 @@ class GraphRepository(ABC):
await graph_db.insert(
subject=concat_namespace(file_info.file, *code_block.tokens),
predicate=GraphKeyword.HAS_PAGE_INFO,
object_=code_block.json(ensure_ascii=False),
object_=code_block.model_dump_json(),
)
for k, v in code_block.properties.items():
await graph_db.insert(
subject=concat_namespace(file_info.file, k, v),
predicate=GraphKeyword.HAS_PAGE_INFO,
object_=code_block.json(ensure_ascii=False),
object_=code_block.model_dump_json(),
)
@staticmethod
async def update_graph_db_with_class_views(graph_db: "GraphRepository", class_views: List[ClassInfo]):
for c in class_views:
filename, class_name = c.package.split(":", 1)
filename, _ = c.package.split(":", 1)
await graph_db.insert(subject=filename, predicate=GraphKeyword.IS, object_=GraphKeyword.SOURCE_CODE)
file_types = {".py": "python", ".js": "javascript"}
file_type = file_types.get(Path(filename).suffix, GraphKeyword.NULL)
await graph_db.insert(subject=filename, predicate=GraphKeyword.IS, object_=file_type)
await graph_db.insert(subject=filename, predicate=GraphKeyword.HAS_CLASS, object_=class_name)
await graph_db.insert(subject=filename, predicate=GraphKeyword.HAS_CLASS, object_=c.package)
await graph_db.insert(
subject=c.package,
predicate=GraphKeyword.IS,
object_=GraphKeyword.CLASS,
)
for vn, vt in c.attributes.items():
# class -> property
await graph_db.insert(
subject=c.package,
predicate=GraphKeyword.HAS_CLASS_PROPERTY,
object_=concat_namespace(c.package, vn),
)
# property detail
await graph_db.insert(
subject=concat_namespace(c.package, vn),
predicate=GraphKeyword.IS,
@ -138,6 +163,15 @@ class GraphRepository(ABC):
subject=concat_namespace(c.package, vn), predicate=GraphKeyword.HAS_TYPE_DESC, object_=vt
)
for fn, desc in c.methods.items():
if "</I>" in desc and "<I>" not in desc:
logger.error(desc)
# class -> function
await graph_db.insert(
subject=c.package,
predicate=GraphKeyword.HAS_CLASS_FUNCTION,
object_=concat_namespace(c.package, fn),
)
# function detail
await graph_db.insert(
subject=concat_namespace(c.package, fn),
predicate=GraphKeyword.IS,
@ -148,3 +182,19 @@ class GraphRepository(ABC):
predicate=GraphKeyword.HAS_ARGS_DESC,
object_=desc,
)
@staticmethod
async def update_graph_db_with_class_relationship_views(
graph_db: "GraphRepository", relationship_views: List[ClassRelationship]
):
for r in relationship_views:
await graph_db.insert(
subject=r.src, predicate=GraphKeyword.IS + r.relationship + GraphKeyword.OF, object_=r.dest
)
if not r.label:
continue
await graph_db.insert(
subject=r.src,
predicate=GraphKeyword.IS + r.relationship + GraphKeyword.ON,
object_=concat_namespace(r.dest, r.label),
)

View file

@ -22,7 +22,7 @@ pandas==2.0.3
pydantic==2.5.3
#pygame==2.1.3
#pymilvus==2.2.8
pytest==7.2.2
# pytest==7.2.2 # test extras require
python_docx==0.8.11
PyYAML==6.0.1
# sentence_transformers==2.2.2
@ -38,7 +38,7 @@ typing-inspect==0.8.0
typing_extensions==4.9.0
libcst==1.0.1
qdrant-client==1.7.0
pytest-mock==3.11.1
# pytest-mock==3.11.1 # test extras require
# open-interpreter==0.1.7; python_version>"3.9" # Conflict with openai 1.x
ta==0.10.2
semantic-kernel==0.4.3.dev0
@ -57,5 +57,5 @@ gitignore-parser==0.1.9
websockets~=12.0
networkx~=3.2.1
google-generativeai==0.3.2
playwright==1.40.0
# playwright==1.40.0 # playwright extras require
anytree

View file

@ -46,6 +46,8 @@ extras_require["test"] = [
"chromadb==0.4.14",
"gradio==3.0.0",
"grpcio-status==1.48.2",
"mock==5.1.0",
"pylint==3.0.3",
]
extras_require["pyppeteer"] = [
@ -56,7 +58,7 @@ extras_require["dev"] = (["pylint~=3.0.3", "black~=23.3.0", "isort~=5.12.0", "pr
setup(
name="metagpt",
version="0.5.2",
version="0.6.0",
description="The Multi-Agent Framework",
long_description=long_description,
long_description_content_type="text/markdown",

View file

@ -11,59 +11,21 @@ import json
import logging
import os
import re
from typing import Optional
import uuid
import pytest
from metagpt.config2 import config
from metagpt.const import DEFAULT_WORKSPACE_ROOT, TEST_DATA_PATH
from metagpt.context import context
from metagpt.llm import LLM
from metagpt.logs import logger
from metagpt.provider.openai_api import OpenAILLM
from metagpt.utils.git_repository import GitRepository
from tests.mock.mock_llm import MockLLM
class MockLLM(OpenAILLM):
rsp_cache: dict = {}
async def original_aask(
self,
msg: str,
system_msgs: Optional[list[str]] = None,
format_msgs: Optional[list[dict[str, str]]] = None,
timeout=3,
stream=True,
):
"""A copy of metagpt.provider.base_llm.BaseLLM.aask, we can't use super().aask because it will be mocked"""
if system_msgs:
message = self._system_msgs(system_msgs)
else:
message = [self._default_system_msg()] if self.use_system_prompt else []
if format_msgs:
message.extend(format_msgs)
message.append(self._user_msg(msg))
rsp = await self.acompletion_text(message, stream=stream, timeout=timeout)
return rsp
async def aask(
self,
msg: str,
system_msgs: Optional[list[str]] = None,
format_msgs: Optional[list[dict[str, str]]] = None,
timeout=3,
stream=True,
) -> str:
logger.debug(f"MockLLM.aask: {msg}")
if msg not in self.rsp_cache:
# Call the original unmocked method
rsp = await self.original_aask(msg, system_msgs, format_msgs, timeout, stream)
logger.info(f"Added '{rsp[:20]}' ... to response cache")
self.rsp_cache[msg] = rsp
return rsp
else:
logger.info("Use response cache")
return self.rsp_cache[msg]
RSP_CACHE_NEW = {} # used globally for producing new and useful only response cache
ALLOW_OPENAI_API_CALL = int(
os.environ.get("ALLOW_OPENAI_API_CALL", 1)
) # NOTE: should change to default 0 (False) once mock is complete
@pytest.fixture(scope="session")
@ -77,17 +39,37 @@ def rsp_cache():
else:
rsp_cache_json = {}
yield rsp_cache_json
with open(new_rsp_cache_file_path, "w") as f2:
with open(rsp_cache_file_path, "w") as f2:
json.dump(rsp_cache_json, f2, indent=4, ensure_ascii=False)
with open(new_rsp_cache_file_path, "w") as f2:
json.dump(RSP_CACHE_NEW, f2, indent=4, ensure_ascii=False)
@pytest.fixture(scope="function")
def llm_mock(rsp_cache, mocker):
llm = MockLLM(config.get_llm_config())
llm.cost_manager = context.cost_manager
# Hook to capture the test result
@pytest.hookimpl(tryfirst=True, hookwrapper=True)
def pytest_runtest_makereport(item, call):
outcome = yield
rep = outcome.get_result()
if rep.when == "call":
item.test_outcome = rep
@pytest.fixture(scope="function", autouse=True)
def llm_mock(rsp_cache, mocker, request):
llm = MockLLM(allow_open_api_call=ALLOW_OPENAI_API_CALL)
llm.rsp_cache = rsp_cache
mocker.patch("metagpt.provider.base_llm.BaseLLM.aask", llm.aask)
mocker.patch("metagpt.provider.base_llm.BaseLLM.aask_batch", llm.aask_batch)
yield mocker
if hasattr(request.node, "test_outcome") and request.node.test_outcome.passed:
if llm.rsp_candidates:
for rsp_candidate in llm.rsp_candidates:
cand_key = list(rsp_candidate.keys())[0]
cand_value = list(rsp_candidate.values())[0]
if cand_key not in llm.rsp_cache:
logger.info(f"Added '{cand_key[:100]} ... -> {cand_value[:20]} ...' to response cache")
llm.rsp_cache.update(rsp_candidate)
RSP_CACHE_NEW.update(rsp_candidate)
class Context:
@ -114,7 +96,7 @@ def llm_api():
logger.info("Tearing down the test")
@pytest.fixture(scope="session")
@pytest.fixture
def proxy():
pattern = re.compile(
rb"(?P<method>[a-zA-Z]+) (?P<uri>(\w+://)?(?P<host>[^\s\'\"<>\[\]{}|/:]+)(:(?P<port>\d+))?[^\s\'\"<>\[\]{}|]*) "
@ -138,8 +120,11 @@ def proxy():
remote_writer.write(data)
await asyncio.gather(pipe(reader, remote_writer), pipe(remote_reader, writer))
server = asyncio.get_event_loop().run_until_complete(asyncio.start_server(handle_client, "127.0.0.1", 0))
return "http://{}:{}".format(*server.sockets[0].getsockname())
async def proxy_func():
server = await asyncio.start_server(handle_client, "127.0.0.1", 0)
return server, "http://{}:{}".format(*server.sockets[0].getsockname())
return proxy_func()
# see https://github.com/Delgan/loguru/issues/59#issuecomment-466591978
@ -154,9 +139,9 @@ def loguru_caplog(caplog):
# init & dispose git repo
@pytest.fixture(scope="session", autouse=True)
@pytest.fixture(scope="function", autouse=True)
def setup_and_teardown_git_repo(request):
context.git_repo = GitRepository(local_path=DEFAULT_WORKSPACE_ROOT / "unittest")
context.git_repo = GitRepository(local_path=DEFAULT_WORKSPACE_ROOT / f"unittest/{uuid.uuid4().hex}")
context.config.git_reinit = True
# Destroy git repo at the end of the test session.
@ -170,3 +155,48 @@ def setup_and_teardown_git_repo(request):
@pytest.fixture(scope="session", autouse=True)
def init_config():
pass
@pytest.fixture(scope="function")
def new_filename(mocker):
# NOTE: Mock new filename to make reproducible llm aask, should consider changing after implementing requirement segmentation
mocker.patch("metagpt.utils.file_repository.FileRepository.new_filename", lambda: "20240101")
yield mocker
@pytest.fixture
def aiohttp_mocker(mocker):
class MockAioResponse:
async def json(self, *args, **kwargs):
return self._json
def set_json(self, json):
self._json = json
response = MockAioResponse()
class MockCTXMng:
async def __aenter__(self):
return response
async def __aexit__(self, *args, **kwargs):
pass
def __await__(self):
yield
return response
def mock_request(self, method, url, **kwargs):
return MockCTXMng()
def wrap(method):
def run(self, url, **kwargs):
return mock_request(self, method, url, **kwargs)
return run
mocker.patch("aiohttp.ClientSession.request", mock_request)
for i in ["get", "post", "delete", "patch"]:
mocker.patch(f"aiohttp.ClientSession.{i}", wrap(i))
yield response

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,258 @@
{
"search_metadata": {
"id": "65952b400ead410fae1f548f",
"status": "Success",
"json_endpoint": "https://serpapi.com/searches/f3454e001dacdae1/65952b400ead410fae1f548f.json",
"created_at": "2024-01-03 09:39:12 UTC",
"processed_at": "2024-01-03 09:39:12 UTC",
"google_url": "https://www.google.com/search?q=metagpt&oq=metagpt&hl=en&gl=us&num=8&sourceid=chrome&ie=UTF-8",
"raw_html_file": "https://serpapi.com/searches/f3454e001dacdae1/65952b400ead410fae1f548f.html",
"total_time_taken": 1.78
},
"search_parameters": {
"engine": "google",
"q": "metagpt",
"google_domain": "google.com",
"hl": "en",
"gl": "us",
"num": "8",
"device": "desktop"
},
"search_information": {
"query_displayed": "metagpt",
"total_results": 110000,
"time_taken_displayed": 0.3,
"menu_items": [
{
"position": 1,
"title": "News",
"link": "https://www.google.com/search?num=8&sca_esv=4bcb71572bca9257&sca_upv=1&hl=en&gl=us&q=metagpt&tbm=nws&source=lnms&sa=X&ved=2ahUKEwjh-qqa9sCDAxV4fTABHZ8gClUQ0pQJegQIDRAB",
"serpapi_link": "https://serpapi.com/search.json?device=desktop&engine=google&gl=us&google_domain=google.com&hl=en&num=8&q=metagpt&tbm=nws"
},
{
"position": 2,
"title": "Images",
"link": "https://www.google.com/search?num=8&sca_esv=4bcb71572bca9257&sca_upv=1&hl=en&gl=us&q=metagpt&tbm=isch&source=lnms&sa=X&ved=2ahUKEwjh-qqa9sCDAxV4fTABHZ8gClUQ0pQJegQIDBAB",
"serpapi_link": "https://serpapi.com/search.json?device=desktop&engine=google_images&gl=us&google_domain=google.com&hl=en&q=metagpt"
},
{
"position": 3,
"title": "Perspectives",
"link": "https://www.google.com/search?num=8&sca_esv=4bcb71572bca9257&sca_upv=1&hl=en&gl=us&q=metagpt&uds=AMIYvT-LYN0C-KgfpAf4hDGmHUqYzPt2YD2Sjup6GzZxffnKpRHzrkDtH-YMw_l16Rw3319fYKZIWOgxIizOkCn4WaiWmK--Gd_KWgcdk2AGw9K3og-5w2Q&udm=4&sa=X&ved=2ahUKEwjh-qqa9sCDAxV4fTABHZ8gClUQs6gLegQICxAB",
"serpapi_link": "https://serpapi.com/search.json?device=desktop&engine=google&gl=us&google_domain=google.com&hl=en&num=8&q=metagpt"
},
{
"position": 4,
"title": "Download",
"link": "https://www.google.com/search?num=8&sca_esv=4bcb71572bca9257&sca_upv=1&hl=en&gl=us&q=MetaGPT+download&uds=AMIYvT-5zq-IxPfUvCGLrNgPl7Seu8ODWYIoXhisgEvQZV3Y8pl5TzJLGfCHEIw7og1p8xJsV4GDoO9mlugZYdQpedp8elSjLy5ABJfq6NUCY0MAtXsFqu8&sa=X&ved=2ahUKEwjh-qqa9sCDAxV4fTABHZ8gClUQxKsJegQIChAB&ictx=0",
"serpapi_link": "https://serpapi.com/search.json?device=desktop&engine=google&gl=us&google_domain=google.com&hl=en&num=8&q=MetaGPT+download"
}
],
"organic_results_state": "Results for exact spelling"
},
"inline_videos": [
{
"position": 1,
"title": "How To Install MetaGPT - Build A Startup With One Prompt!!",
"link": "https://www.youtube.com/watch?v=uT75J_KG_aY",
"thumbnail": "https://serpapi.com/searches/65952b400ead410fae1f548f/images/a0db2f9f70f02dd11e3d3d4154df9fd65b46b2fbf4804f7038c9ce99c8efea1c.jpeg",
"channel": "Matthew Berman",
"duration": "6:36",
"platform": "YouTube",
"date": "Aug 14, 2023"
},
{
"position": 2,
"title": "MetaGPT HUGE Update: Autonomous AI Agents with ...",
"link": "https://www.youtube.com/watch?v=Xyws6iI-eH8",
"thumbnail": "https://serpapi.com/searches/65952b400ead410fae1f548f/images/a0db2f9f70f02dd1d578e6031265d66299cf6aecd327454cdf67b92808f3dd86.jpeg",
"channel": "WorldofAI",
"duration": "11:38",
"platform": "YouTube",
"date": "1 week ago"
},
{
"position": 3,
"title": "\ud83d\ude80 MetaGPT Setup: Launch a Startup with One \u270d\ufe0f Prompt!",
"link": "https://www.youtube.com/watch?v=nqZlTV_L6Ao",
"thumbnail": "https://serpapi.com/searches/65952b400ead410fae1f548f/images/a0db2f9f70f02dd1c5666bd22292fdc357357dac89294aabb55ebea0a40ce322.jpeg",
"channel": "Prompt Engineering",
"duration": "14:15",
"platform": "YouTube",
"date": "Sep 4, 2023",
"key_moments": [
{
"time": "00:00",
"title": "Intro",
"link": "https://www.youtube.com/watch?v=nqZlTV_L6Ao&t=0",
"thumbnail": "https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcQW-YKGXQDHplRpEDgL5Q-HlJ8HggTw_ghp_KWPh8xUcQ&s"
},
{
"time": "00:12",
"title": "What is MetaGPT",
"link": "https://www.youtube.com/watch?v=nqZlTV_L6Ao&t=12",
"thumbnail": "https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcRJ4RRAXOG6yvGPYqkuj5cMoiyYdAN6g7E3VU04SA3P7w&s"
},
{
"time": "01:06",
"title": "Setup",
"link": "https://www.youtube.com/watch?v=nqZlTV_L6Ao&t=66",
"thumbnail": "https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcTDlJBrAtfBkC8zI9wY4dOqVIaNFbjcYSZr4M1ZnD7RSw&s"
},
{
"time": "05:23",
"title": "Changing configuration",
"link": "https://www.youtube.com/watch?v=nqZlTV_L6Ao&t=323",
"thumbnail": "https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcT8MbsIRVXJy__UE4ba0FoCTMGfrykasHm3UGvSzMQAtQ&s"
}
]
}
],
"organic_results": [
{
"position": 1,
"title": "geekan/MetaGPT: \ud83c\udf1f The Multi-Agent Framework",
"link": "https://github.com/geekan/MetaGPT",
"redirect_link": "https://www.google.comhttps://github.com/geekan/MetaGPT",
"displayed_link": "https://github.com \u203a geekan \u203a MetaGPT",
"favicon": "https://serpapi.com/searches/65952b400ead410fae1f548f/images/f37f87ccfb08b6fc2fe7e2076c022e7690f9b18357b8e5feb75a30ffbaaabfb1.png",
"snippet": "MetaGPT takes a one line requirement as input and outputs user stories / competitive analysis / requirements / data structures / APIs / documents, etc.",
"snippet_highlighted_words": [
"MetaGPT"
],
"sitelinks": {
"inline": [
{
"title": "Roadmap",
"link": "https://github.com/geekan/MetaGPT/blob/main/docs/ROADMAP.md"
},
{
"title": "README.md",
"link": "https://github.com/geekan/MetaGPT/blob/main/README.md"
},
{
"title": "Issues",
"link": "https://github.com/geekan/MetaGPT/issues"
},
{
"title": "Actions",
"link": "https://github.com/geekan/MetaGPT/actions"
}
]
},
"source": "GitHub"
},
{
"position": 2,
"title": "MetaGPT: Meta Programming for A Multi-Agent ...",
"link": "https://arxiv.org/abs/2308.00352",
"redirect_link": "https://www.google.comhttps://arxiv.org/abs/2308.00352",
"displayed_link": "https://arxiv.org \u203a cs",
"favicon": "https://serpapi.com/searches/65952b400ead410fae1f548f/images/f37f87ccfb08b6fc2fe7e2076c022e76592372342f3f5dd76573e051b50f1bce.png",
"author": "by S Hong",
"cited_by": "Cited by 53",
"extracted_cited_by": 53,
"date": "2023",
"snippet": "Abstract:Remarkable progress has been made on automated problem solving through societies of agents based on large language models (LLMs).",
"source": "arXiv"
},
{
"position": 3,
"title": "MetaGPT: a Multi-Agent Framework to Automate Your ...",
"link": "https://medium.datadriveninvestor.com/metagpt-a-multi-agent-framework-to-automate-your-software-company-4b6ae747cc36",
"redirect_link": "https://www.google.comhttps://medium.datadriveninvestor.com/metagpt-a-multi-agent-framework-to-automate-your-software-company-4b6ae747cc36",
"displayed_link": "https://medium.datadriveninvestor.com \u203a metagpt-a-...",
"favicon": "https://serpapi.com/searches/65952b400ead410fae1f548f/images/f37f87ccfb08b6fc2fe7e2076c022e76e8319069677ee18a99026fb1e05709cf.png",
"snippet": "MetaGPT is about to reach 10000 stars on Github. It's a Multi-Agent Framework that can behave as an engineer, product manager, architect, project managers.",
"snippet_highlighted_words": [
"MetaGPT"
],
"source": "DataDrivenInvestor"
},
{
"position": 4,
"title": "MetaGPT: Complete Guide to the Best AI Agent Available ...",
"link": "https://www.unite.ai/metagpt-complete-guide-to-the-best-ai-agent-available-right-now/",
"redirect_link": "https://www.google.comhttps://www.unite.ai/metagpt-complete-guide-to-the-best-ai-agent-available-right-now/",
"displayed_link": "https://www.unite.ai \u203a metagpt-complete-guide-to-the-...",
"favicon": "https://serpapi.com/searches/65952b400ead410fae1f548f/images/f37f87ccfb08b6fc2fe7e2076c022e76334a7b2eeab09f16973a82a209ee6339.png",
"date": "Sep 11, 2023",
"snippet": "Discover why MetaGPT outperforms AutoGPT, BabyAgi, and other AI agents in complex coding tasks. Our in-depth article guides you through the ...",
"snippet_highlighted_words": [
"MetaGPT"
],
"source": "Unite.AI"
}
],
"related_searches": [
{
"block_position": 1,
"query": "metagpt online",
"link": "https://www.google.com/search?num=8&sca_esv=4bcb71572bca9257&sca_upv=1&hl=en&gl=us&q=MetaGPT+online&sa=X&ved=2ahUKEwjh-qqa9sCDAxV4fTABHZ8gClUQ1QJ6BAglEAE",
"serpapi_link": "https://serpapi.com/search.json?device=desktop&engine=google&gl=us&google_domain=google.com&hl=en&num=8&q=MetaGPT+online"
},
{
"block_position": 1,
"query": "metagpt paper",
"link": "https://www.google.com/search?num=8&sca_esv=4bcb71572bca9257&sca_upv=1&hl=en&gl=us&q=MetaGPT+paper&sa=X&ved=2ahUKEwjh-qqa9sCDAxV4fTABHZ8gClUQ1QJ6BAgoEAE",
"serpapi_link": "https://serpapi.com/search.json?device=desktop&engine=google&gl=us&google_domain=google.com&hl=en&num=8&q=MetaGPT+paper"
},
{
"block_position": 1,
"query": "Metagpt review",
"link": "https://www.google.com/search?num=8&sca_esv=4bcb71572bca9257&sca_upv=1&hl=en&gl=us&q=Metagpt+review&sa=X&ved=2ahUKEwjh-qqa9sCDAxV4fTABHZ8gClUQ1QJ6BAgrEAE",
"serpapi_link": "https://serpapi.com/search.json?device=desktop&engine=google&gl=us&google_domain=google.com&hl=en&num=8&q=Metagpt+review"
},
{
"block_position": 1,
"query": "Metagpt download",
"link": "https://www.google.com/search?num=8&sca_esv=4bcb71572bca9257&sca_upv=1&hl=en&gl=us&q=Metagpt+download&sa=X&ved=2ahUKEwjh-qqa9sCDAxV4fTABHZ8gClUQ1QJ6BAgpEAE",
"serpapi_link": "https://serpapi.com/search.json?device=desktop&engine=google&gl=us&google_domain=google.com&hl=en&num=8&q=Metagpt+download"
},
{
"block_position": 1,
"query": "metagpt ai",
"link": "https://www.google.com/search?num=8&sca_esv=4bcb71572bca9257&sca_upv=1&hl=en&gl=us&q=MetaGPT+AI&sa=X&ved=2ahUKEwjh-qqa9sCDAxV4fTABHZ8gClUQ1QJ6BAgeEAE",
"serpapi_link": "https://serpapi.com/search.json?device=desktop&engine=google&gl=us&google_domain=google.com&hl=en&num=8&q=MetaGPT+AI"
},
{
"block_position": 1,
"query": "metagpt github",
"link": "https://www.google.com/search?num=8&sca_esv=4bcb71572bca9257&sca_upv=1&hl=en&gl=us&q=Metagpt+github&sa=X&ved=2ahUKEwjh-qqa9sCDAxV4fTABHZ8gClUQ1QJ6BAgfEAE",
"serpapi_link": "https://serpapi.com/search.json?device=desktop&engine=google&gl=us&google_domain=google.com&hl=en&num=8&q=Metagpt+github"
},
{
"block_position": 1,
"query": "metagpt reddit",
"link": "https://www.google.com/search?num=8&sca_esv=4bcb71572bca9257&sca_upv=1&hl=en&gl=us&q=MetaGPT+Reddit&sa=X&ved=2ahUKEwjh-qqa9sCDAxV4fTABHZ8gClUQ1QJ6BAgnEAE",
"serpapi_link": "https://serpapi.com/search.json?device=desktop&engine=google&gl=us&google_domain=google.com&hl=en&num=8&q=MetaGPT+Reddit"
},
{
"block_position": 1,
"query": "how to use metagpt",
"link": "https://www.google.com/search?num=8&sca_esv=4bcb71572bca9257&sca_upv=1&hl=en&gl=us&q=How+to+use+MetaGPT&sa=X&ved=2ahUKEwjh-qqa9sCDAxV4fTABHZ8gClUQ1QJ6BAgqEAE",
"serpapi_link": "https://serpapi.com/search.json?device=desktop&engine=google&gl=us&google_domain=google.com&hl=en&num=8&q=How+to+use+MetaGPT"
}
],
"pagination": {
"current": 1,
"next": "https://www.google.com/search?q=metagpt&oq=metagpt&hl=en&gl=us&num=8&start=8&sourceid=chrome&ie=UTF-8",
"other_pages": {
"2": "https://www.google.com/search?q=metagpt&oq=metagpt&hl=en&gl=us&num=8&start=8&sourceid=chrome&ie=UTF-8",
"3": "https://www.google.com/search?q=metagpt&oq=metagpt&hl=en&gl=us&num=8&start=16&sourceid=chrome&ie=UTF-8",
"4": "https://www.google.com/search?q=metagpt&oq=metagpt&hl=en&gl=us&num=8&start=24&sourceid=chrome&ie=UTF-8",
"5": "https://www.google.com/search?q=metagpt&oq=metagpt&hl=en&gl=us&num=8&start=32&sourceid=chrome&ie=UTF-8"
}
},
"serpapi_pagination": {
"current": 1,
"next_link": "https://serpapi.com/search.json?device=desktop&engine=google&gl=us&google_domain=google.com&hl=en&num=8&q=metagpt&start=8",
"next": "https://serpapi.com/search.json?device=desktop&engine=google&gl=us&google_domain=google.com&hl=en&num=8&q=metagpt&start=8",
"other_pages": {
"2": "https://serpapi.com/search.json?device=desktop&engine=google&gl=us&google_domain=google.com&hl=en&num=8&q=metagpt&start=8",
"3": "https://serpapi.com/search.json?device=desktop&engine=google&gl=us&google_domain=google.com&hl=en&num=8&q=metagpt&start=16",
"4": "https://serpapi.com/search.json?device=desktop&engine=google&gl=us&google_domain=google.com&hl=en&num=8&q=metagpt&start=24",
"5": "https://serpapi.com/search.json?device=desktop&engine=google&gl=us&google_domain=google.com&hl=en&num=8&q=metagpt&start=32"
}
}
}

View file

@ -0,0 +1,350 @@
{
"search_metadata": {
"id": "65952b400ead410fae1f548f",
"status": "Success",
"json_endpoint": "https://serpapi.com/searches/f3454e001dacdae1/65952b400ead410fae1f548f.json",
"created_at": "2024-01-03 09:39:12 UTC",
"processed_at": "2024-01-03 09:39:12 UTC",
"google_url": "https://www.google.com/search?q=metagpt&oq=metagpt&hl=en&gl=us&num=8&sourceid=chrome&ie=UTF-8",
"raw_html_file": "https://serpapi.com/searches/f3454e001dacdae1/65952b400ead410fae1f548f.html",
"total_time_taken": 1.78
},
"search_parameters": {
"engine": "google",
"q": "metagpt",
"google_domain": "google.com",
"hl": "en",
"gl": "us",
"num": "8",
"device": "desktop"
},
"search_information": {
"query_displayed": "metagpt",
"total_results": 110000,
"time_taken_displayed": 0.3,
"menu_items": [
{
"position": 1,
"title": "News",
"link": "https://www.google.com/search?num=8&sca_esv=4bcb71572bca9257&sca_upv=1&hl=en&gl=us&q=metagpt&tbm=nws&source=lnms&sa=X&ved=2ahUKEwjh-qqa9sCDAxV4fTABHZ8gClUQ0pQJegQIDRAB",
"serpapi_link": "https://serpapi.com/search.json?device=desktop&engine=google&gl=us&google_domain=google.com&hl=en&num=8&q=metagpt&tbm=nws"
},
{
"position": 2,
"title": "Images",
"link": "https://www.google.com/search?num=8&sca_esv=4bcb71572bca9257&sca_upv=1&hl=en&gl=us&q=metagpt&tbm=isch&source=lnms&sa=X&ved=2ahUKEwjh-qqa9sCDAxV4fTABHZ8gClUQ0pQJegQIDBAB",
"serpapi_link": "https://serpapi.com/search.json?device=desktop&engine=google_images&gl=us&google_domain=google.com&hl=en&q=metagpt"
},
{
"position": 3,
"title": "Perspectives",
"link": "https://www.google.com/search?num=8&sca_esv=4bcb71572bca9257&sca_upv=1&hl=en&gl=us&q=metagpt&uds=AMIYvT-LYN0C-KgfpAf4hDGmHUqYzPt2YD2Sjup6GzZxffnKpRHzrkDtH-YMw_l16Rw3319fYKZIWOgxIizOkCn4WaiWmK--Gd_KWgcdk2AGw9K3og-5w2Q&udm=4&sa=X&ved=2ahUKEwjh-qqa9sCDAxV4fTABHZ8gClUQs6gLegQICxAB",
"serpapi_link": "https://serpapi.com/search.json?device=desktop&engine=google&gl=us&google_domain=google.com&hl=en&num=8&q=metagpt"
},
{
"position": 4,
"title": "Download",
"link": "https://www.google.com/search?num=8&sca_esv=4bcb71572bca9257&sca_upv=1&hl=en&gl=us&q=MetaGPT+download&uds=AMIYvT-5zq-IxPfUvCGLrNgPl7Seu8ODWYIoXhisgEvQZV3Y8pl5TzJLGfCHEIw7og1p8xJsV4GDoO9mlugZYdQpedp8elSjLy5ABJfq6NUCY0MAtXsFqu8&sa=X&ved=2ahUKEwjh-qqa9sCDAxV4fTABHZ8gClUQxKsJegQIChAB&ictx=0",
"serpapi_link": "https://serpapi.com/search.json?device=desktop&engine=google&gl=us&google_domain=google.com&hl=en&num=8&q=MetaGPT+download"
},
{
"position": 5,
"title": "Videos",
"link": "https://www.google.com/search?num=8&sca_esv=4bcb71572bca9257&sca_upv=1&hl=en&gl=us&q=metagpt&tbm=vid&source=lnms&sa=X&ved=2ahUKEwjh-qqa9sCDAxV4fTABHZ8gClUQ0pQJegQINxAB",
"serpapi_link": "https://serpapi.com/search.json?device=desktop&engine=google_videos&gl=us&google_domain=google.com&hl=en&num=8&q=metagpt"
},
{
"position": 6,
"title": "Shopping",
"link": "https://www.google.com/search?num=8&sca_esv=4bcb71572bca9257&sca_upv=1&hl=en&gl=us&q=metagpt&tbm=shop&source=lnms",
"serpapi_link": "https://serpapi.com/search.json?device=desktop&engine=google_shopping&gl=us&google_domain=google.com&hl=en&num=8&q=metagpt"
},
{
"position": 7,
"title": "Review",
"link": "https://www.google.com/search?num=8&sca_esv=4bcb71572bca9257&sca_upv=1&hl=en&gl=us&q=MetaGPT+review&uds=AMIYvT9VP83904q4-J94lPXwCEnwL3j5QAtL1fmmW1S1R5RgwRLmxvuFVQ7OcN0dFbrjXQkUwlZlHOt9GNXyfomxI6gDvZxA6gokeHbKUq_anMgIkmFv3IY&sa=X&ved=2ahUKEwjh-qqa9sCDAxV4fTABHZ8gClUQxKsJegQIOhAB&ictx=0",
"serpapi_link": "https://serpapi.com/search.json?device=desktop&engine=google&gl=us&google_domain=google.com&hl=en&num=8&q=MetaGPT+review"
},
{
"position": 8,
"title": "Online",
"link": "https://www.google.com/search?num=8&sca_esv=4bcb71572bca9257&sca_upv=1&hl=en&gl=us&q=MetaGPT+online&uds=AMIYvT8Ap1YYLsvgKVUJMi_v4l0FNZz9UYjvpQyVx07CgVk-hay-mNemgcUIz5ipc8mmv44wplpB3umGIvKSQMEgsHCY8aTWe6FLDtUjGT9hv-pihBT6dYw&sa=X&ved=2ahUKEwjh-qqa9sCDAxV4fTABHZ8gClUQxKsJegQIOxAB&ictx=0",
"serpapi_link": "https://serpapi.com/search.json?device=desktop&engine=google&gl=us&google_domain=google.com&hl=en&num=8&q=MetaGPT+online"
},
{
"position": 9,
"title": "App",
"link": "https://www.google.com/search?num=8&sca_esv=4bcb71572bca9257&sca_upv=1&hl=en&gl=us&q=Metagpt+app&uds=AMIYvT_YL6Iqd-0G_f_v9e2v-JybHFZesGv-WkSjqZQUhGvjb7qTf3NoIkE_8qY5quBbzv_GSlurBfqWahyxbnyVMX5mlfpqn-U3E-KHZ3PAJcM8mO6MflU&sa=X&ved=2ahUKEwjh-qqa9sCDAxV4fTABHZ8gClUQxKsJegQIORAB&ictx=0",
"serpapi_link": "https://serpapi.com/search.json?device=desktop&engine=google&gl=us&google_domain=google.com&hl=en&num=8&q=Metagpt+app"
}
],
"organic_results_state": "Results for exact spelling"
},
"inline_videos": [
{
"position": 1,
"title": "How To Install MetaGPT - Build A Startup With One Prompt!!",
"link": "https://www.youtube.com/watch?v=uT75J_KG_aY",
"thumbnail": "https://serpapi.com/searches/65952b400ead410fae1f548f/images/a0db2f9f70f02dd11e3d3d4154df9fd65b46b2fbf4804f7038c9ce99c8efea1c.jpeg",
"channel": "Matthew Berman",
"duration": "6:36",
"platform": "YouTube",
"date": "Aug 14, 2023"
},
{
"position": 2,
"title": "MetaGPT HUGE Update: Autonomous AI Agents with ...",
"link": "https://www.youtube.com/watch?v=Xyws6iI-eH8",
"thumbnail": "https://serpapi.com/searches/65952b400ead410fae1f548f/images/a0db2f9f70f02dd1d578e6031265d66299cf6aecd327454cdf67b92808f3dd86.jpeg",
"channel": "WorldofAI",
"duration": "11:38",
"platform": "YouTube",
"date": "1 week ago"
},
{
"position": 3,
"title": "\ud83d\ude80 MetaGPT Setup: Launch a Startup with One \u270d\ufe0f Prompt!",
"link": "https://www.youtube.com/watch?v=nqZlTV_L6Ao",
"thumbnail": "https://serpapi.com/searches/65952b400ead410fae1f548f/images/a0db2f9f70f02dd1c5666bd22292fdc357357dac89294aabb55ebea0a40ce322.jpeg",
"channel": "Prompt Engineering",
"duration": "14:15",
"platform": "YouTube",
"date": "Sep 4, 2023",
"key_moments": [
{
"time": "00:00",
"title": "Intro",
"link": "https://www.youtube.com/watch?v=nqZlTV_L6Ao&t=0",
"thumbnail": "https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcQW-YKGXQDHplRpEDgL5Q-HlJ8HggTw_ghp_KWPh8xUcQ&s"
},
{
"time": "00:12",
"title": "What is MetaGPT",
"link": "https://www.youtube.com/watch?v=nqZlTV_L6Ao&t=12",
"thumbnail": "https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcRJ4RRAXOG6yvGPYqkuj5cMoiyYdAN6g7E3VU04SA3P7w&s"
},
{
"time": "01:06",
"title": "Setup",
"link": "https://www.youtube.com/watch?v=nqZlTV_L6Ao&t=66",
"thumbnail": "https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcTDlJBrAtfBkC8zI9wY4dOqVIaNFbjcYSZr4M1ZnD7RSw&s"
},
{
"time": "05:23",
"title": "Changing configuration",
"link": "https://www.youtube.com/watch?v=nqZlTV_L6Ao&t=323",
"thumbnail": "https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcT8MbsIRVXJy__UE4ba0FoCTMGfrykasHm3UGvSzMQAtQ&s"
},
{
"time": "06:35",
"title": "How to Run",
"link": "https://www.youtube.com/watch?v=nqZlTV_L6Ao&t=395",
"thumbnail": "https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcRuX6mOUVQVRzvnkOPYNcDpcazRC1QGeHhZh-Az9btUNA&s"
},
{
"time": "09:02",
"title": "What outputs to expect",
"link": "https://www.youtube.com/watch?v=nqZlTV_L6Ao&t=542",
"thumbnail": "https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcTFnNqvPfGrPnKJTJ1iOHGSNp6sVR5jn0Zy5N2JSGfeEQ&s"
},
{
"time": "10:45",
"title": "Generated Design Documents",
"link": "https://www.youtube.com/watch?v=nqZlTV_L6Ao&t=645",
"thumbnail": "https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcSN3I0gxudI4Mew93w_tw34HmWREz5XX8ArebReM3Y2_g&s"
},
{
"time": "12:25",
"title": "Run the created code base",
"link": "https://www.youtube.com/watch?v=nqZlTV_L6Ao&t=745",
"thumbnail": "https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcQLBx5bgKZ2Gqsu-PsIXuvtM0SBmHvBCndmKtresgqFCg&s"
}
]
}
],
"organic_results": [
{
"position": 1,
"title": "geekan/MetaGPT: \ud83c\udf1f The Multi-Agent Framework",
"link": "https://github.com/geekan/MetaGPT",
"redirect_link": "https://www.google.comhttps://github.com/geekan/MetaGPT",
"displayed_link": "https://github.com \u203a geekan \u203a MetaGPT",
"favicon": "https://serpapi.com/searches/65952b400ead410fae1f548f/images/f37f87ccfb08b6fc2fe7e2076c022e7690f9b18357b8e5feb75a30ffbaaabfb1.png",
"snippet": "MetaGPT takes a one line requirement as input and outputs user stories / competitive analysis / requirements / data structures / APIs / documents, etc.",
"snippet_highlighted_words": [
"MetaGPT"
],
"sitelinks": {
"inline": [
{
"title": "Roadmap",
"link": "https://github.com/geekan/MetaGPT/blob/main/docs/ROADMAP.md"
},
{
"title": "README.md",
"link": "https://github.com/geekan/MetaGPT/blob/main/README.md"
},
{
"title": "Issues",
"link": "https://github.com/geekan/MetaGPT/issues"
},
{
"title": "Actions",
"link": "https://github.com/geekan/MetaGPT/actions"
}
]
},
"source": "GitHub"
},
{
"position": 2,
"title": "MetaGPT: Meta Programming for A Multi-Agent ...",
"link": "https://arxiv.org/abs/2308.00352",
"redirect_link": "https://www.google.comhttps://arxiv.org/abs/2308.00352",
"displayed_link": "https://arxiv.org \u203a cs",
"favicon": "https://serpapi.com/searches/65952b400ead410fae1f548f/images/f37f87ccfb08b6fc2fe7e2076c022e76592372342f3f5dd76573e051b50f1bce.png",
"author": "by S Hong",
"cited_by": "Cited by 53",
"extracted_cited_by": 53,
"date": "2023",
"snippet": "Abstract:Remarkable progress has been made on automated problem solving through societies of agents based on large language models (LLMs).",
"source": "arXiv"
},
{
"position": 3,
"title": "MetaGPT: a Multi-Agent Framework to Automate Your ...",
"link": "https://medium.datadriveninvestor.com/metagpt-a-multi-agent-framework-to-automate-your-software-company-4b6ae747cc36",
"redirect_link": "https://www.google.comhttps://medium.datadriveninvestor.com/metagpt-a-multi-agent-framework-to-automate-your-software-company-4b6ae747cc36",
"displayed_link": "https://medium.datadriveninvestor.com \u203a metagpt-a-...",
"favicon": "https://serpapi.com/searches/65952b400ead410fae1f548f/images/f37f87ccfb08b6fc2fe7e2076c022e76e8319069677ee18a99026fb1e05709cf.png",
"snippet": "MetaGPT is about to reach 10000 stars on Github. It's a Multi-Agent Framework that can behave as an engineer, product manager, architect, project managers.",
"snippet_highlighted_words": [
"MetaGPT"
],
"source": "DataDrivenInvestor"
},
{
"position": 4,
"title": "MetaGPT: Complete Guide to the Best AI Agent Available ...",
"link": "https://www.unite.ai/metagpt-complete-guide-to-the-best-ai-agent-available-right-now/",
"redirect_link": "https://www.google.comhttps://www.unite.ai/metagpt-complete-guide-to-the-best-ai-agent-available-right-now/",
"displayed_link": "https://www.unite.ai \u203a metagpt-complete-guide-to-the-...",
"favicon": "https://serpapi.com/searches/65952b400ead410fae1f548f/images/f37f87ccfb08b6fc2fe7e2076c022e76334a7b2eeab09f16973a82a209ee6339.png",
"date": "Sep 11, 2023",
"snippet": "Discover why MetaGPT outperforms AutoGPT, BabyAgi, and other AI agents in complex coding tasks. Our in-depth article guides you through the ...",
"snippet_highlighted_words": [
"MetaGPT"
],
"source": "Unite.AI"
},
{
"position": 5,
"title": "MetaGPT AI technology page - Lablab.ai",
"link": "https://lablab.ai/tech/metagpt",
"redirect_link": "https://www.google.comhttps://lablab.ai/tech/metagpt",
"displayed_link": "https://lablab.ai \u203a tech \u203a metagpt",
"favicon": "https://serpapi.com/searches/65952b400ead410fae1f548f/images/f37f87ccfb08b6fc2fe7e2076c022e766a141f2bf05b1ab902f83ed00f4148a4.png",
"snippet": "MetaGPT: Collaborative AI for Complex Tasks. MetaGPT is a groundbreaking AI technology, designed to transform the landscape of software development.",
"snippet_highlighted_words": [
"MetaGPT",
"MetaGPT"
],
"source": "lablab.ai"
},
{
"position": 6,
"title": "MetaGPT | Discover AI use cases",
"link": "https://gpt3demo.com/apps/metagpt",
"redirect_link": "https://www.google.comhttps://gpt3demo.com/apps/metagpt",
"displayed_link": "https://gpt3demo.com \u203a apps \u203a metagpt",
"favicon": "https://serpapi.com/searches/65952b400ead410fae1f548f/images/f37f87ccfb08b6fc2fe7e2076c022e76142721493557b5d95328dafb62b6b43a.jpeg",
"snippet": "Assign different roles to GPTs to form a collaborative software entity for complex tasks. MetaGPT takes a one-line requirement as input and outputs user ...",
"snippet_highlighted_words": [
"MetaGPT"
],
"source": "GPT-3 Demo"
},
{
"position": 7,
"title": "Meet MetaGPT: The ChatGPT-Powered AI Assistant That ...",
"link": "https://www.kdnuggets.com/meet-metagpt-the-chatgptpowered-ai-assistant-that-turns-text-into-web-apps",
"redirect_link": "https://www.google.comhttps://www.kdnuggets.com/meet-metagpt-the-chatgptpowered-ai-assistant-that-turns-text-into-web-apps",
"displayed_link": "https://www.kdnuggets.com \u203a meet-metagpt-the-chatg...",
"favicon": "https://serpapi.com/searches/65952b400ead410fae1f548f/images/f37f87ccfb08b6fc2fe7e2076c022e767b0d4a705b7ad21b521b16648b390fe8.png",
"date": "Sep 8, 2023",
"snippet": "This revolutionary AI tool lets you create no-code web applications in just seconds!",
"source": "KDnuggets"
}
],
"related_searches": [
{
"block_position": 1,
"query": "metagpt online",
"link": "https://www.google.com/search?num=8&sca_esv=4bcb71572bca9257&sca_upv=1&hl=en&gl=us&q=MetaGPT+online&sa=X&ved=2ahUKEwjh-qqa9sCDAxV4fTABHZ8gClUQ1QJ6BAglEAE",
"serpapi_link": "https://serpapi.com/search.json?device=desktop&engine=google&gl=us&google_domain=google.com&hl=en&num=8&q=MetaGPT+online"
},
{
"block_position": 1,
"query": "metagpt paper",
"link": "https://www.google.com/search?num=8&sca_esv=4bcb71572bca9257&sca_upv=1&hl=en&gl=us&q=MetaGPT+paper&sa=X&ved=2ahUKEwjh-qqa9sCDAxV4fTABHZ8gClUQ1QJ6BAgoEAE",
"serpapi_link": "https://serpapi.com/search.json?device=desktop&engine=google&gl=us&google_domain=google.com&hl=en&num=8&q=MetaGPT+paper"
},
{
"block_position": 1,
"query": "Metagpt review",
"link": "https://www.google.com/search?num=8&sca_esv=4bcb71572bca9257&sca_upv=1&hl=en&gl=us&q=Metagpt+review&sa=X&ved=2ahUKEwjh-qqa9sCDAxV4fTABHZ8gClUQ1QJ6BAgrEAE",
"serpapi_link": "https://serpapi.com/search.json?device=desktop&engine=google&gl=us&google_domain=google.com&hl=en&num=8&q=Metagpt+review"
},
{
"block_position": 1,
"query": "Metagpt download",
"link": "https://www.google.com/search?num=8&sca_esv=4bcb71572bca9257&sca_upv=1&hl=en&gl=us&q=Metagpt+download&sa=X&ved=2ahUKEwjh-qqa9sCDAxV4fTABHZ8gClUQ1QJ6BAgpEAE",
"serpapi_link": "https://serpapi.com/search.json?device=desktop&engine=google&gl=us&google_domain=google.com&hl=en&num=8&q=Metagpt+download"
},
{
"block_position": 1,
"query": "metagpt ai",
"link": "https://www.google.com/search?num=8&sca_esv=4bcb71572bca9257&sca_upv=1&hl=en&gl=us&q=MetaGPT+AI&sa=X&ved=2ahUKEwjh-qqa9sCDAxV4fTABHZ8gClUQ1QJ6BAgeEAE",
"serpapi_link": "https://serpapi.com/search.json?device=desktop&engine=google&gl=us&google_domain=google.com&hl=en&num=8&q=MetaGPT+AI"
},
{
"block_position": 1,
"query": "metagpt github",
"link": "https://www.google.com/search?num=8&sca_esv=4bcb71572bca9257&sca_upv=1&hl=en&gl=us&q=Metagpt+github&sa=X&ved=2ahUKEwjh-qqa9sCDAxV4fTABHZ8gClUQ1QJ6BAgfEAE",
"serpapi_link": "https://serpapi.com/search.json?device=desktop&engine=google&gl=us&google_domain=google.com&hl=en&num=8&q=Metagpt+github"
},
{
"block_position": 1,
"query": "metagpt reddit",
"link": "https://www.google.com/search?num=8&sca_esv=4bcb71572bca9257&sca_upv=1&hl=en&gl=us&q=MetaGPT+Reddit&sa=X&ved=2ahUKEwjh-qqa9sCDAxV4fTABHZ8gClUQ1QJ6BAgnEAE",
"serpapi_link": "https://serpapi.com/search.json?device=desktop&engine=google&gl=us&google_domain=google.com&hl=en&num=8&q=MetaGPT+Reddit"
},
{
"block_position": 1,
"query": "how to use metagpt",
"link": "https://www.google.com/search?num=8&sca_esv=4bcb71572bca9257&sca_upv=1&hl=en&gl=us&q=How+to+use+MetaGPT&sa=X&ved=2ahUKEwjh-qqa9sCDAxV4fTABHZ8gClUQ1QJ6BAgqEAE",
"serpapi_link": "https://serpapi.com/search.json?device=desktop&engine=google&gl=us&google_domain=google.com&hl=en&num=8&q=How+to+use+MetaGPT"
}
],
"pagination": {
"current": 1,
"next": "https://www.google.com/search?q=metagpt&oq=metagpt&hl=en&gl=us&num=8&start=8&sourceid=chrome&ie=UTF-8",
"other_pages": {
"2": "https://www.google.com/search?q=metagpt&oq=metagpt&hl=en&gl=us&num=8&start=8&sourceid=chrome&ie=UTF-8",
"3": "https://www.google.com/search?q=metagpt&oq=metagpt&hl=en&gl=us&num=8&start=16&sourceid=chrome&ie=UTF-8",
"4": "https://www.google.com/search?q=metagpt&oq=metagpt&hl=en&gl=us&num=8&start=24&sourceid=chrome&ie=UTF-8",
"5": "https://www.google.com/search?q=metagpt&oq=metagpt&hl=en&gl=us&num=8&start=32&sourceid=chrome&ie=UTF-8"
}
},
"serpapi_pagination": {
"current": 1,
"next_link": "https://serpapi.com/search.json?device=desktop&engine=google&gl=us&google_domain=google.com&hl=en&num=8&q=metagpt&start=8",
"next": "https://serpapi.com/search.json?device=desktop&engine=google&gl=us&google_domain=google.com&hl=en&num=8&q=metagpt&start=8",
"other_pages": {
"2": "https://serpapi.com/search.json?device=desktop&engine=google&gl=us&google_domain=google.com&hl=en&num=8&q=metagpt&start=8",
"3": "https://serpapi.com/search.json?device=desktop&engine=google&gl=us&google_domain=google.com&hl=en&num=8&q=metagpt&start=16",
"4": "https://serpapi.com/search.json?device=desktop&engine=google&gl=us&google_domain=google.com&hl=en&num=8&q=metagpt&start=24",
"5": "https://serpapi.com/search.json?device=desktop&engine=google&gl=us&google_domain=google.com&hl=en&num=8&q=metagpt&start=32"
}
}
}

View file

@ -0,0 +1,102 @@
[
{
"searchParameters": {
"q": "metagpt",
"num": 8,
"page": 1,
"type": "search",
"engine": "google"
},
"organic": [
{
"title": "geekan/MetaGPT: The Multi-Agent Framework: Given one line Requirement, return PRD, Design, Tasks, Repo - GitHub",
"link": "https://github.com/geekan/MetaGPT",
"snippet": "MetaGPT takes a one line requirement as input and outputs user stories / competitive analysis / requirements / data structures / APIs / documents, etc.",
"sitelinks": [
{
"title": "README.md",
"link": "https://github.com/geekan/MetaGPT/blob/main/README.md"
},
{
"title": "Roadmap",
"link": "https://github.com/geekan/MetaGPT/blob/main/docs/ROADMAP.md"
},
{
"title": "Issues",
"link": "https://github.com/geekan/MetaGPT/issues"
},
{
"title": "Actions",
"link": "https://github.com/geekan/MetaGPT/actions"
}
],
"position": 1
},
{
"title": "MetaGPT: Meta Programming for A Multi-Agent Collaborative Framework - arXiv",
"link": "https://arxiv.org/abs/2308.00352",
"snippet": "Abstract:Remarkable progress has been made on automated problem solving through societies of agents based on large language models (LLMs).",
"date": "Aug 1, 2023",
"position": 2
},
{
"title": "How To Install MetaGPT - Build A Startup With One Prompt!! - YouTube",
"link": "https://youtube.com/watch?v=uT75J_KG_aY",
"snippet": "In this video, we review MetaGPT, a new project that aims ...",
"date": "Aug 14, 2023",
"attributes": {
"Duration": "6:36",
"Posted": "Aug 14, 2023"
},
"imageUrl": "https://i.ytimg.com/vi/uT75J_KG_aY/default.jpg?sqp=-oaymwEECHgQQw&rs=AMzJL3lfWRsXgckPQztWhHaRKYqxffksoA",
"position": 3
},
{
"title": "Meet MetaGPT: The ChatGPT-Powered AI Assistant That Turns Text Into Web Apps",
"link": "https://www.kdnuggets.com/meet-metagpt-the-chatgptpowered-ai-assistant-that-turns-text-into-web-apps",
"snippet": "This revolutionary AI tool lets you create no-code web applications in just seconds!",
"date": "Sep 8, 2023",
"position": 4
},
{
"title": "MetaGPT: Complete Guide to the Best AI Agent Available Right Now - Unite.AI",
"link": "https://www.unite.ai/metagpt-complete-guide-to-the-best-ai-agent-available-right-now/",
"snippet": "Discover why MetaGPT outperforms AutoGPT, BabyAgi, and other AI agents in complex coding tasks. Our in-depth article guides you through the ...",
"date": "Sep 11, 2023",
"position": 5
},
{
"title": "MetaGPT | Discover AI use cases - GPT-3 Demo",
"link": "https://gpt3demo.com/apps/metagpt",
"snippet": "Assign different roles to GPTs to form a collaborative software entity for complex tasks. MetaGPT takes a one-line requirement as input and outputs user ...",
"position": 6
}
],
"relatedSearches": [
{
"query": "How to use MetaGPT"
},
{
"query": "MetaGPT Reddit"
},
{
"query": "Metagpt arXiv"
},
{
"query": "MetaGPT youtube"
},
{
"query": "MetaGPT example"
},
{
"query": "Metagpt huggingface"
},
{
"query": "metagpt: meta programming for multi-agent collaborative framework"
},
{
"query": "MetaGPT alternative"
}
]
}
]

View file

@ -0,0 +1,115 @@
[
{
"searchParameters": {
"q": "metagpt",
"num": 8,
"page": 1,
"type": "search",
"engine": "google"
},
"organic": [
{
"title": "geekan/MetaGPT: The Multi-Agent Framework: Given one line Requirement, return PRD, Design, Tasks, Repo - GitHub",
"link": "https://github.com/geekan/MetaGPT",
"snippet": "MetaGPT takes a one line requirement as input and outputs user stories / competitive analysis / requirements / data structures / APIs / documents, etc.",
"sitelinks": [
{
"title": "README.md",
"link": "https://github.com/geekan/MetaGPT/blob/main/README.md"
},
{
"title": "Roadmap",
"link": "https://github.com/geekan/MetaGPT/blob/main/docs/ROADMAP.md"
},
{
"title": "Issues",
"link": "https://github.com/geekan/MetaGPT/issues"
},
{
"title": "Actions",
"link": "https://github.com/geekan/MetaGPT/actions"
}
],
"position": 1
},
{
"title": "MetaGPT: Meta Programming for A Multi-Agent Collaborative Framework - arXiv",
"link": "https://arxiv.org/abs/2308.00352",
"snippet": "Abstract:Remarkable progress has been made on automated problem solving through societies of agents based on large language models (LLMs).",
"date": "Aug 1, 2023",
"position": 2
},
{
"title": "How To Install MetaGPT - Build A Startup With One Prompt!! - YouTube",
"link": "https://youtube.com/watch?v=uT75J_KG_aY",
"snippet": "In this video, we review MetaGPT, a new project that aims ...",
"date": "Aug 14, 2023",
"attributes": {
"Duration": "6:36",
"Posted": "Aug 14, 2023"
},
"imageUrl": "https://i.ytimg.com/vi/uT75J_KG_aY/default.jpg?sqp=-oaymwEECHgQQw&rs=AMzJL3lfWRsXgckPQztWhHaRKYqxffksoA",
"position": 3
},
{
"title": "Meet MetaGPT: The ChatGPT-Powered AI Assistant That Turns Text Into Web Apps",
"link": "https://www.kdnuggets.com/meet-metagpt-the-chatgptpowered-ai-assistant-that-turns-text-into-web-apps",
"snippet": "This revolutionary AI tool lets you create no-code web applications in just seconds!",
"date": "Sep 8, 2023",
"position": 4
},
{
"title": "MetaGPT: Complete Guide to the Best AI Agent Available Right Now - Unite.AI",
"link": "https://www.unite.ai/metagpt-complete-guide-to-the-best-ai-agent-available-right-now/",
"snippet": "Discover why MetaGPT outperforms AutoGPT, BabyAgi, and other AI agents in complex coding tasks. Our in-depth article guides you through the ...",
"date": "Sep 11, 2023",
"position": 5
},
{
"title": "MetaGPT | Discover AI use cases - GPT-3 Demo",
"link": "https://gpt3demo.com/apps/metagpt",
"snippet": "Assign different roles to GPTs to form a collaborative software entity for complex tasks. MetaGPT takes a one-line requirement as input and outputs user ...",
"position": 6
},
{
"title": "MetaGPT AI technology page - Lablab.ai",
"link": "https://lablab.ai/tech/metagpt",
"snippet": "MetaGPT: Collaborative AI for Complex Tasks. MetaGPT is a groundbreaking AI technology, designed to transform the landscape of software development.",
"position": 7
},
{
"title": "MetaGPT: Meta Programming for Multi-Agent Collaborative Framework | OpenReview",
"link": "https://openreview.net/forum?id=VtmBAGCN7o",
"snippet": "This paper introduces MetaGPT, an innovative meta-programming framework for multi-agent collaborations based on LLM, which encodes Standardized ...",
"date": "Sep 22, 2023",
"position": 8
}
],
"relatedSearches": [
{
"query": "How to use MetaGPT"
},
{
"query": "MetaGPT Reddit"
},
{
"query": "Metagpt arXiv"
},
{
"query": "MetaGPT youtube"
},
{
"query": "MetaGPT example"
},
{
"query": "Metagpt huggingface"
},
{
"query": "metagpt: meta programming for multi-agent collaborative framework"
},
{
"query": "MetaGPT alternative"
}
]
}
]

View file

@ -116,7 +116,6 @@ if __name__ == '__main__':
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_debug_error():
context.src_workspace = context.git_repo.workdir / uuid.uuid4().hex
ctx = RunCodeContext(

View file

@ -16,7 +16,6 @@ from metagpt.schema import Message
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_design_api():
inputs = ["我们需要一个音乐播放器,它应该有播放、暂停、上一曲、下一曲等功能。"] # PRD_SAMPLE
repo = context.file_repo

View file

@ -11,7 +11,6 @@ from metagpt.actions.design_api_review import DesignReview
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_design_api_review():
prd = "我们需要一个音乐播放器,它应该有播放、暂停、上一曲、下一曲等功能。"
api_design = """

View file

@ -20,7 +20,6 @@ context = """
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_generate_questions():
action = GenerateQuestions()
rsp = await action.run(context)

View file

@ -54,7 +54,6 @@ async def test_generate_table(invoice_path: Path, expected_result: dict):
("invoice_path", "query", "expected_result"),
[(Path("invoices/invoice-1.pdf"), "Invoicing date", "2023年02月03日")],
)
@pytest.mark.usefixtures("llm_mock")
async def test_reply_question(invoice_path: Path, query: dict, expected_result: str):
invoice_path = TEST_DATA_PATH / invoice_path
ocr_result = await InvoiceOCR().run(file_path=Path(invoice_path))

View file

@ -12,7 +12,6 @@ from metagpt.logs import logger
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_prepare_interview():
action = PrepareInterview()
rsp = await action.run("I just graduated and hope to find a job as a Python engineer")

View file

@ -17,7 +17,6 @@ from tests.metagpt.actions.mock_json import DESIGN, PRD
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_design_api():
await context.file_repo.save_file("1.txt", content=str(PRD), relative_path=PRDS_FILE_REPO)
await context.file_repo.save_file("1.txt", content=str(DESIGN), relative_path=SYSTEM_DESIGN_FILE_REPO)

View file

@ -11,13 +11,46 @@ from pathlib import Path
import pytest
from metagpt.actions.rebuild_class_view import RebuildClassView
from metagpt.config import CONFIG
from metagpt.const import GRAPH_REPO_FILE_REPO
from metagpt.llm import LLM
@pytest.mark.asyncio
async def test_rebuild():
action = RebuildClassView(name="RedBean", context=Path(__file__).parent.parent, llm=LLM())
action = RebuildClassView(
name="RedBean", context=str(Path(__file__).parent.parent.parent.parent / "metagpt"), llm=LLM()
)
await action.run()
graph_file_repo = CONFIG.git_repo.new_file_repository(relative_path=GRAPH_REPO_FILE_REPO)
assert graph_file_repo.changed_files
@pytest.mark.parametrize(
("path", "direction", "diff", "want"),
[
("metagpt/startup.py", "=", ".", "metagpt/startup.py"),
("metagpt/startup.py", "+", "MetaGPT", "MetaGPT/metagpt/startup.py"),
("metagpt/startup.py", "-", "metagpt", "startup.py"),
],
)
def test_align_path(path, direction, diff, want):
res = RebuildClassView._align_root(path=path, direction=direction, diff_path=diff)
assert res == want
@pytest.mark.parametrize(
("path_root", "package_root", "want_direction", "want_diff"),
[
("/Users/x/github/MetaGPT/metagpt", "/Users/x/github/MetaGPT/metagpt", "=", "."),
("/Users/x/github/MetaGPT", "/Users/x/github/MetaGPT/metagpt", "-", "metagpt"),
("/Users/x/github/MetaGPT/metagpt", "/Users/x/github/MetaGPT", "+", "metagpt"),
],
)
def test_diff_path(path_root, package_root, want_direction, want_diff):
direction, diff = RebuildClassView._diff_path(path_root=Path(path_root), package_root=Path(package_root))
assert direction == want_direction
assert diff == want_diff
if __name__ == "__main__":

View file

@ -0,0 +1,55 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@Time : 2024/1/4
@Author : mashenquan
@File : test_rebuild_sequence_view.py
"""
from pathlib import Path
import pytest
from metagpt.actions.rebuild_sequence_view import RebuildSequenceView
from metagpt.config import CONFIG
from metagpt.const import GRAPH_REPO_FILE_REPO
from metagpt.llm import LLM
from metagpt.utils.common import aread
from metagpt.utils.file_repository import FileRepository
from metagpt.utils.git_repository import ChangeType
@pytest.mark.asyncio
async def test_rebuild():
# Mock
data = await aread(filename=Path(__file__).parent / "../../data/graph_db/networkx.json")
graph_db_filename = Path(CONFIG.git_repo.workdir.name).with_suffix(".json")
await FileRepository.save_file(
filename=str(graph_db_filename),
relative_path=GRAPH_REPO_FILE_REPO,
content=data,
)
CONFIG.git_repo.add_change({f"{GRAPH_REPO_FILE_REPO}/{graph_db_filename}": ChangeType.UNTRACTED})
CONFIG.git_repo.commit("commit1")
action = RebuildSequenceView(
name="RedBean", context=str(Path(__file__).parent.parent.parent.parent / "metagpt"), llm=LLM()
)
await action.run()
graph_file_repo = CONFIG.git_repo.new_file_repository(relative_path=GRAPH_REPO_FILE_REPO)
assert graph_file_repo.changed_files
@pytest.mark.parametrize(
("root", "pathname", "want"),
[
(Path(__file__).parent.parent.parent, "/".join(__file__.split("/")[-2:]), Path(__file__)),
(Path(__file__).parent.parent.parent, "f/g.txt", None),
],
)
def test_get_full_filename(root, pathname, want):
res = RebuildSequenceView._get_full_filename(root=root, pathname=pathname)
assert res == want
if __name__ == "__main__":
pytest.main([__file__, "-s"])

View file

@ -8,14 +8,7 @@
import pytest
from metagpt.actions import CollectLinks, research
@pytest.mark.asyncio
async def test_action():
action = CollectLinks()
result = await action.run(topic="baidu")
assert result
from metagpt.actions import research
@pytest.mark.asyncio

View file

@ -177,7 +177,6 @@ class Snake:
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_summarize_code():
context.src_workspace = context.git_repo.workdir / "src"
await context.file_repo.save_file(filename="1.json", relative_path=SYSTEM_DESIGN_FILE_REPO, content=DESIGN_CONTENT)

View file

@ -33,7 +33,6 @@ from metagpt.schema import Message
),
],
)
@pytest.mark.usefixtures("llm_mock")
async def test_prompt(agent_description, language, context, knowledge, history_summary):
# Prerequisites
g_context = Context()

View file

@ -27,7 +27,6 @@ from tests.metagpt.actions.mock_markdown import TASKS_2, WRITE_CODE_PROMPT_SAMPL
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_write_code():
ccontext = CodingContext(
filename="task_filename.py", design_doc=Document(content="设计一个名为'add'的函数,该函数接受两个整数作为输入,并返回它们的和。")
@ -44,7 +43,6 @@ async def test_write_code():
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_write_code_directly():
prompt = WRITE_CODE_PROMPT_SAMPLE + "\n" + TASKS_2[0]
llm = LLM()
@ -53,7 +51,6 @@ async def test_write_code_directly():
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_write_code_deps():
# Prerequisites
context.src_workspace = context.git_repo.workdir / "snake1/snake1"

View file

@ -12,7 +12,6 @@ from metagpt.schema import CodingContext, Document
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_write_code_review(capfd):
code = """
def add(a, b):

View file

@ -27,14 +27,12 @@ class Person:
],
ids=["google", "numpy", "sphinx"],
)
@pytest.mark.usefixtures("llm_mock")
async def test_write_docstring(style: str, part: str):
ret = await WriteDocstring().run(code, style=style)
assert part in ret
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_write():
code = await WriteDocstring.write_docstring(__file__)
assert code

View file

@ -8,21 +8,25 @@
"""
import pytest
from metagpt.actions import UserRequirement
from metagpt.actions import UserRequirement, WritePRD
from metagpt.const import DOCS_FILE_REPO, PRDS_FILE_REPO, REQUIREMENT_FILENAME
from metagpt.context import context
from metagpt.logs import logger
from metagpt.roles.product_manager import ProductManager
from metagpt.roles.role import RoleReactMode
from metagpt.schema import Message
from metagpt.utils.common import any_to_str
from metagpt.utils.file_repository import FileRepository
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_write_prd():
async def test_write_prd(new_filename):
product_manager = ProductManager()
requirements = "开发一个基于大语言模型与私有知识库的搜索引擎,希望可以基于大语言模型进行搜索总结"
await context.file_repo.save_file(filename=REQUIREMENT_FILENAME, content=requirements, relative_path=DOCS_FILE_REPO)
await FileRepository.save_file(filename=REQUIREMENT_FILENAME, content=requirements, relative_path=DOCS_FILE_REPO)
product_manager.rc.react_mode = RoleReactMode.BY_ORDER
prd = await product_manager.run(Message(content=requirements, cause_by=UserRequirement))
assert prd.cause_by == any_to_str(WritePRD)
logger.info(requirements)
logger.info(prd)
@ -30,3 +34,7 @@ async def test_write_prd():
assert prd is not None
assert prd.content != ""
assert context.git_repo.new_file_repository(relative_path=PRDS_FILE_REPO).changed_files
if __name__ == "__main__":
pytest.main([__file__, "-s"])

View file

@ -11,7 +11,6 @@ from metagpt.actions.write_prd_review import WritePRDReview
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_write_prd_review():
prd = """
Introduction: This is a new feature for our product.

View file

@ -46,7 +46,6 @@ CONTEXT = """
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_write_review():
write_review = WriteReview()
review = await write_review.run(CONTEXT)

View file

@ -16,7 +16,6 @@ from metagpt.actions.write_teaching_plan import WriteTeachingPlanPart
("topic", "context"),
[("Title", "Lesson 1: Learn to draw an apple."), ("Teaching Content", "Lesson 1: Learn to draw an apple.")],
)
@pytest.mark.usefixtures("llm_mock")
async def test_write_teaching_plan_part(topic, context):
action = WriteTeachingPlanPart(topic=topic, context=context)
rsp = await action.run()

View file

@ -13,7 +13,6 @@ from metagpt.schema import Document, TestingContext
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_write_test():
code = """
import random
@ -40,7 +39,6 @@ async def test_write_test():
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_write_code_invalid_code(mocker):
# Mock the _aask method to return an invalid code string
mocker.patch.object(WriteTest, "_aask", return_value="Invalid Code String")

View file

@ -14,7 +14,6 @@ from metagpt.actions.write_tutorial import WriteContent, WriteDirectory
@pytest.mark.asyncio
@pytest.mark.parametrize(("language", "topic"), [("English", "Write a tutorial about Python")])
@pytest.mark.usefixtures("llm_mock")
async def test_write_directory(language: str, topic: str):
ret = await WriteDirectory(language=language).run(topic=topic)
assert isinstance(ret, dict)
@ -30,7 +29,6 @@ async def test_write_directory(language: str, topic: str):
("language", "topic", "directory"),
[("English", "Write a tutorial about Python", {"Introduction": ["What is Python?", "Why learn Python?"]})],
)
@pytest.mark.usefixtures("llm_mock")
async def test_write_content(language: str, topic: str, directory: Dict):
ret = await WriteContent(language=language, directory=directory).run(topic=topic)
assert isinstance(ret, str)

View file

@ -29,6 +29,16 @@ points = [
]
def assert_almost_equal(actual, expected):
delta = 1e-10
if isinstance(expected, list):
assert len(actual) == len(expected)
for ac, exp in zip(actual, expected):
assert abs(ac - exp) <= delta, f"{ac} is not within {delta} of {exp}"
else:
assert abs(actual - expected) <= delta, f"{actual} is not within {delta} of {expected}"
def test_qdrant_store():
qdrant_connection = QdrantConnection(memory=True)
vectors_config = VectorParams(size=2, distance=Distance.COSINE)
@ -42,30 +52,30 @@ def test_qdrant_store():
qdrant_store.add("Book", points)
results = qdrant_store.search("Book", query=[1.0, 1.0])
assert results[0]["id"] == 2
assert results[0]["score"] == 0.999106722578389
assert_almost_equal(results[0]["score"], 0.999106722578389)
assert results[1]["id"] == 7
assert results[1]["score"] == 0.9961650411397226
assert_almost_equal(results[1]["score"], 0.9961650411397226)
results = qdrant_store.search("Book", query=[1.0, 1.0], return_vector=True)
assert results[0]["id"] == 2
assert results[0]["score"] == 0.999106722578389
assert results[0]["vector"] == [0.7363563179969788, 0.6765939593315125]
assert_almost_equal(results[0]["score"], 0.999106722578389)
assert_almost_equal(results[0]["vector"], [0.7363563179969788, 0.6765939593315125])
assert results[1]["id"] == 7
assert results[1]["score"] == 0.9961650411397226
assert results[1]["vector"] == [0.7662628889083862, 0.6425272226333618]
assert_almost_equal(results[1]["score"], 0.9961650411397226)
assert_almost_equal(results[1]["vector"], [0.7662628889083862, 0.6425272226333618])
results = qdrant_store.search(
"Book",
query=[1.0, 1.0],
query_filter=Filter(must=[FieldCondition(key="rand_number", range=Range(gte=8))]),
)
assert results[0]["id"] == 8
assert results[0]["score"] == 0.9100373450784073
assert_almost_equal(results[0]["score"], 0.9100373450784073)
assert results[1]["id"] == 9
assert results[1]["score"] == 0.7127610621127889
assert_almost_equal(results[1]["score"], 0.7127610621127889)
results = qdrant_store.search(
"Book",
query=[1.0, 1.0],
query_filter=Filter(must=[FieldCondition(key="rand_number", range=Range(gte=8))]),
return_vector=True,
)
assert results[0]["vector"] == [0.35037919878959656, 0.9366079568862915]
assert results[1]["vector"] == [0.9999677538871765, 0.00802854634821415]
assert_almost_equal(results[0]["vector"], [0.35037919878959656, 0.9366079568862915])
assert_almost_equal(results[1]["vector"], [0.9999677538871765, 0.00802854634821415])

View file

@ -6,6 +6,8 @@
@File : test_skill_loader.py
@Desc : Unit tests.
"""
from pathlib import Path
import pytest
from metagpt.config import CONFIG
@ -23,7 +25,8 @@ async def test_suite():
{"id": 6, "name": "knowledge", "type": "builtin", "config": {}, "enabled": True},
{"id": 6, "name": "web_search", "type": "builtin", "config": {}, "enabled": True},
]
loader = await SkillsDeclaration.load()
pathname = Path(__file__).parent / "../../../docs/.well-known/skills.yaml"
loader = await SkillsDeclaration.load(skill_yaml_file_name=pathname)
skills = loader.get_skill_list()
assert skills
assert len(skills) >= 3

View file

@ -0,0 +1,8 @@
import pytest
@pytest.fixture(autouse=True)
def llm_mock(rsp_cache, mocker, request):
# An empty fixture to overwrite the global llm_mock fixture
# because in provider folder, we want to test the aask and aask functions for the specific models
pass

View file

@ -22,7 +22,6 @@ from tests.metagpt.roles.mock import MockMessages
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_architect():
# Prerequisites
filename = uuid.uuid4().hex + ".json"

View file

@ -13,7 +13,6 @@ from pydantic import BaseModel
from metagpt.actions.skill_action import SkillAction
from metagpt.actions.talk_action import TalkAction
from metagpt.config import CONFIG
from metagpt.logs import logger
from metagpt.memory.brain_memory import BrainMemory
from metagpt.roles.assistant import Assistant
from metagpt.schema import Message
@ -21,7 +20,6 @@ from metagpt.utils.common import any_to_str
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_run():
CONFIG.language = "Chinese"
@ -88,7 +86,7 @@ async def test_run():
if not has_action:
break
msg: Message = await role.act()
logger.info(msg)
# logger.info(msg)
assert msg
assert msg.cause_by == seed.cause_by
assert msg.content

View file

@ -30,7 +30,6 @@ from tests.metagpt.roles.mock import STRS_FOR_PARSING, TASKS, MockMessages
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_engineer():
# Prerequisites
rqno = "20231221155954.json"
@ -114,7 +113,6 @@ def test_todo():
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_new_coding_context():
# Prerequisites
demo_path = Path(__file__).parent / "../../data/demo_project"

View file

@ -41,7 +41,6 @@ from metagpt.schema import Message
),
],
)
@pytest.mark.usefixtures("llm_mock")
async def test_invoice_ocr_assistant(query: str, invoice_path: Path, invoice_table_path: Path, expected_result: dict):
invoice_path = TEST_DATA_PATH / invoice_path
role = InvoiceOCRAssistant()

View file

@ -13,8 +13,7 @@ from tests.metagpt.roles.mock import MockMessages
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_product_manager():
async def test_product_manager(new_filename):
product_manager = ProductManager()
rsp = await product_manager.run(MockMessages.req)
logger.info(rsp)

View file

@ -13,7 +13,6 @@ from tests.metagpt.roles.mock import MockMessages
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_project_manager():
project_manager = ProjectManager()
rsp = await project_manager.run(MockMessages.system_design)

View file

@ -17,6 +17,7 @@ from metagpt.schema import Message
@pytest.mark.asyncio
@pytest.mark.skip
async def test_init():
class Inputs(BaseModel):
name: str
@ -103,7 +104,6 @@ async def test_new_file_name():
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_run():
CONFIG.set_context({"language": "Chinese", "teaching_language": "English"})
lesson = """

View file

@ -15,7 +15,6 @@ from metagpt.roles.tutorial_assistant import TutorialAssistant
@pytest.mark.asyncio
@pytest.mark.parametrize(("language", "topic"), [("Chinese", "Write a tutorial about pip")])
@pytest.mark.usefixtures("llm_mock")
async def test_tutorial_assistant(language: str, topic: str):
role = TutorialAssistant(language=language)
msg = await role.run(topic)

View file

@ -21,7 +21,6 @@ def test_action_serialize():
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_action_deserialize():
action = Action()
serialized_data = action.model_dump()

View file

@ -17,7 +17,6 @@ def test_architect_serialize():
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_architect_deserialize():
role = Architect()
ser_role_dict = role.model_dump(by_alias=True)

View file

@ -8,7 +8,6 @@ from metagpt.actions.prepare_interview import PrepareInterview
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_action_deserialize():
action = PrepareInterview()
serialized_data = action.model_dump()

View file

@ -10,8 +10,7 @@ from metagpt.schema import Message
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_product_manager_deserialize():
async def test_product_manager_deserialize(new_filename):
role = ProductManager()
ser_role_dict = role.model_dump(by_alias=True)
new_role = ProductManager(**ser_role_dict)

View file

@ -18,7 +18,6 @@ def test_project_manager_serialize():
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_project_manager_deserialize():
role = ProjectManager()
ser_role_dict = role.model_dump(by_alias=True)

View file

@ -69,7 +69,6 @@ def test_engineer_serialize():
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_engineer_deserialize():
role = Engineer(use_code_review=True)
ser_role_dict = role.model_dump()
@ -97,7 +96,6 @@ def test_role_serdeser_save():
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_role_serdeser_interrupt():
role_c = RoleC()
shutil.rmtree(SERDESER_PATH.joinpath("team"), ignore_errors=True)

View file

@ -109,7 +109,6 @@ async def test_team_recover_save():
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_team_recover_multi_roles_save():
idea = "write a snake game"
stg_path = SERDESER_PATH.joinpath("team")

View file

@ -17,7 +17,6 @@ def test_write_design_serialize():
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_write_code_deserialize():
context = CodingContext(
filename="test_code.py", design_doc=Document(content="write add function to calculate two numbers")

View file

@ -9,7 +9,6 @@ from metagpt.schema import CodingContext, Document
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_write_code_review_deserialize():
code_content = """
def div(a: int, b: int = 0):

View file

@ -22,7 +22,6 @@ def test_write_task_serialize():
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_write_design_deserialize():
action = WriteDesign()
serialized_data = action.model_dump()
@ -32,7 +31,6 @@ async def test_write_design_deserialize():
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_write_task_deserialize():
action = WriteTasks()
serialized_data = action.model_dump()

View file

@ -29,7 +29,6 @@ class Person:
],
ids=["google", "numpy", "sphinx"],
)
@pytest.mark.usefixtures("llm_mock")
async def test_action_deserialize(style: str, part: str):
action = WriteDocstring()
serialized_data = action.model_dump()

View file

@ -9,7 +9,7 @@ from metagpt.actions import WritePRD
from metagpt.schema import Message
def test_action_serialize():
def test_action_serialize(new_filename):
action = WritePRD()
ser_action_dict = action.model_dump()
assert "name" in ser_action_dict
@ -17,8 +17,7 @@ def test_action_serialize():
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_action_deserialize():
async def test_action_deserialize(new_filename):
action = WritePRD()
serialized_data = action.model_dump()
new_action = WritePRD(**serialized_data)

View file

@ -42,7 +42,6 @@ CONTEXT = """
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_mock")
async def test_action_deserialize():
action = WriteReview()
serialized_data = action.model_dump()

View file

@ -9,7 +9,6 @@ from metagpt.actions.write_tutorial import WriteContent, WriteDirectory
@pytest.mark.asyncio
@pytest.mark.parametrize(("language", "topic"), [("English", "Write a tutorial about Python")])
@pytest.mark.usefixtures("llm_mock")
async def test_write_directory_deserialize(language: str, topic: str):
action = WriteDirectory()
serialized_data = action.model_dump()
@ -31,7 +30,6 @@ async def test_write_directory_deserialize(language: str, topic: str):
("language", "topic", "directory"),
[("English", "Write a tutorial about Python", {"Introduction": ["What is Python?", "Why learn Python?"]})],
)
@pytest.mark.usefixtures("llm_mock")
async def test_write_content_deserialize(language: str, topic: str, directory: Dict):
action = WriteContent(language=language, directory=directory)
serialized_data = action.model_dump()

View file

@ -18,6 +18,9 @@ from metagpt.actions.write_code import WriteCode
from metagpt.const import SYSTEM_DESIGN_FILE_REPO, TASK_FILE_REPO
from metagpt.schema import (
AIMessage,
ClassAttribute,
ClassMethod,
ClassView,
CodeSummarizeContext,
Document,
Message,
@ -153,5 +156,30 @@ def test_CodeSummarizeContext(file_list, want):
assert want in m
def test_class_view():
attr_a = ClassAttribute(name="a", value_type="int", default_value="0", visibility="+", abstraction=True)
assert attr_a.get_mermaid(align=1) == "\t+int a=0*"
attr_b = ClassAttribute(name="b", value_type="str", default_value="0", visibility="#", static=True)
assert attr_b.get_mermaid(align=0) == '#str b="0"$'
class_view = ClassView(name="A")
class_view.attributes = [attr_a, attr_b]
method_a = ClassMethod(name="run", visibility="+", abstraction=True)
assert method_a.get_mermaid(align=1) == "\t+run()*"
method_b = ClassMethod(
name="_test",
visibility="#",
static=True,
args=[ClassAttribute(name="a", value_type="str"), ClassAttribute(name="b", value_type="int")],
return_type="str",
)
assert method_b.get_mermaid(align=0) == "#_test(str a,int b):str$"
class_view.methods = [method_a, method_b]
assert (
class_view.get_mermaid(align=0)
== 'class A{\n\t+int a=0*\n\t#str b="0"$\n\t+run()*\n\t#_test(str a,int b):str$\n}\n'
)
if __name__ == "__main__":
pytest.main([__file__, "-s"])

View file

@ -16,14 +16,14 @@ runner = CliRunner()
@pytest.mark.asyncio
async def test_empty_team():
async def test_empty_team(new_filename):
# FIXME: we're now using "metagpt" cli, so the entrance should be replaced instead.
company = Team()
history = await company.run(idea="Build a simple search system. I will upload my files later.")
logger.info(history)
def test_startup():
def test_startup(new_filename):
args = ["Make a cli snake game"]
result = runner.invoke(app, args)
logger.info(result)

View file

@ -24,13 +24,14 @@ async def test_oas2_svc():
process = subprocess.Popen(["python", str(script_pathname)], cwd=str(workdir), env=env)
await asyncio.sleep(5)
url = "http://localhost:8080/openapi/greeting/dave"
headers = {"accept": "text/plain", "Content-Type": "application/json"}
data = {}
response = requests.post(url, headers=headers, json=data)
assert response.text == "Hello dave\n"
process.terminate()
try:
url = "http://localhost:8080/openapi/greeting/dave"
headers = {"accept": "text/plain", "Content-Type": "application/json"}
data = {}
response = requests.post(url, headers=headers, json=data)
assert response.text == "Hello dave\n"
finally:
process.terminate()
if __name__ == "__main__":

View file

@ -5,6 +5,8 @@
@Author : mashenquan
@File : test_metagpt_text_to_image.py
"""
import base64
from unittest.mock import AsyncMock
import pytest
@ -13,7 +15,14 @@ from metagpt.tools.metagpt_text_to_image import oas3_metagpt_text_to_image
@pytest.mark.asyncio
async def test_draw():
async def test_draw(mocker):
# mock
mock_post = mocker.patch("aiohttp.ClientSession.post")
mock_response = AsyncMock()
mock_response.status = 200
mock_response.json.return_value = {"images": [base64.b64encode(b"success")], "parameters": {"size": 1110}}
mock_post.return_value.__aenter__.return_value = mock_response
# Prerequisites
assert CONFIG.METAGPT_TEXT_TO_IMAGE_MODEL_URL

View file

@ -3,7 +3,7 @@
"""
@Time : 2023/12/26
@Author : mashenquan
@File : test_hello.py
@File : test_openapi_v3_hello.py
"""
import asyncio
import subprocess
@ -24,13 +24,14 @@ async def test_hello():
process = subprocess.Popen(["python", str(script_pathname)], cwd=workdir, env=env)
await asyncio.sleep(5)
url = "http://localhost:8082/openapi/greeting/dave"
headers = {"accept": "text/plain", "Content-Type": "application/json"}
data = {}
response = requests.post(url, headers=headers, json=data)
assert response.text == "Hello dave\n"
process.terminate()
try:
url = "http://localhost:8082/openapi/greeting/dave"
headers = {"accept": "text/plain", "Content-Type": "application/json"}
data = {}
response = requests.post(url, headers=headers, json=data)
assert response.text == "Hello dave\n"
finally:
process.terminate()
if __name__ == "__main__":

View file

@ -7,15 +7,20 @@
"""
from __future__ import annotations
import json
from pathlib import Path
from typing import Callable
import pytest
import tests.data.search
from metagpt.config import CONFIG
from metagpt.logs import logger
from metagpt.tools import SearchEngineType
from metagpt.tools.search_engine import SearchEngine
search_cache_path = Path(tests.data.search.__path__[0])
class MockSearchEnine:
async def run(self, query: str, max_results: int = 8, as_string: bool = True) -> str | list[dict[str, str]]:
@ -41,16 +46,23 @@ class MockSearchEnine:
(SearchEngineType.CUSTOM_ENGINE, MockSearchEnine().run, 6, False),
],
)
async def test_search_engine(search_engine_type, run_func: Callable, max_results: int, as_string: bool):
async def test_search_engine(search_engine_type, run_func: Callable, max_results: int, as_string: bool, aiohttp_mocker):
# Prerequisites
cache_json_path = None
if search_engine_type is SearchEngineType.SERPAPI_GOOGLE:
assert CONFIG.SERPAPI_API_KEY and CONFIG.SERPAPI_API_KEY != "YOUR_API_KEY"
cache_json_path = search_cache_path / f"serpapi-metagpt-{max_results}.json"
elif search_engine_type is SearchEngineType.DIRECT_GOOGLE:
assert CONFIG.GOOGLE_API_KEY and CONFIG.GOOGLE_API_KEY != "YOUR_API_KEY"
assert CONFIG.GOOGLE_CSE_ID and CONFIG.GOOGLE_CSE_ID != "YOUR_CSE_ID"
elif search_engine_type is SearchEngineType.SERPER_GOOGLE:
assert CONFIG.SERPER_API_KEY and CONFIG.SERPER_API_KEY != "YOUR_API_KEY"
cache_json_path = search_cache_path / f"serper-metagpt-{max_results}.json"
if cache_json_path:
with open(cache_json_path) as f:
data = json.load(f)
aiohttp_mocker.set_json(data)
search_engine = SearchEngine(search_engine_type, run_func)
rsp = await search_engine.run("metagpt", max_results, as_string)
logger.info(rsp)

View file

@ -14,7 +14,6 @@ from metagpt.tools.translator import Translator
@pytest.mark.asyncio
@pytest.mark.usefixtures("llm_api")
@pytest.mark.usefixtures("llm_mock")
async def test_translate(llm_api):
poetries = [
("Let life be beautiful like summer flowers", ""),

View file

@ -13,9 +13,9 @@ from metagpt.utils.parse_html import WebPage
@pytest.mark.parametrize(
"browser_type, use_proxy, kwagrs, url, urls",
[
("chromium", {"proxy": True}, {}, "https://deepwisdom.ai", ("https://deepwisdom.ai",)),
("firefox", {}, {"ignore_https_errors": True}, "https://deepwisdom.ai", ("https://deepwisdom.ai",)),
("webkit", {}, {"ignore_https_errors": True}, "https://deepwisdom.ai", ("https://deepwisdom.ai",)),
("chromium", {"proxy": True}, {}, "https://www.deepwisdom.ai", ("https://www.deepwisdom.ai",)),
("firefox", {}, {"ignore_https_errors": True}, "https://www.deepwisdom.ai", ("https://www.deepwisdom.ai",)),
("webkit", {}, {"ignore_https_errors": True}, "https://www.deepwisdom.ai", ("https://www.deepwisdom.ai",)),
],
ids=["chromium-normal", "firefox-normal", "webkit-normal"],
)
@ -23,6 +23,7 @@ async def test_scrape_web_page(browser_type, use_proxy, kwagrs, url, urls, proxy
global_proxy = CONFIG.global_proxy
try:
if use_proxy:
server, proxy = await proxy
CONFIG.global_proxy = proxy
browser = web_browser_engine_playwright.PlaywrightWrapper(browser_type=browser_type, **kwagrs)
result = await browser.run(url)
@ -35,6 +36,7 @@ async def test_scrape_web_page(browser_type, use_proxy, kwagrs, url, urls, proxy
assert len(results) == len(urls) + 1
assert all(("MetaGPT" in i.inner_text) for i in results)
if use_proxy:
server.close()
assert "Proxy:" in capfd.readouterr().out
finally:
CONFIG.global_proxy = global_proxy

View file

@ -26,6 +26,7 @@ async def test_scrape_web_page(browser_type, use_proxy, url, urls, proxy, capfd)
global_proxy = CONFIG.global_proxy
try:
if use_proxy:
server, proxy = await proxy
CONFIG.global_proxy = proxy
browser = web_browser_engine_selenium.SeleniumWrapper(browser_type=browser_type)
result = await browser.run(url)
@ -38,6 +39,7 @@ async def test_scrape_web_page(browser_type, use_proxy, url, urls, proxy, capfd)
assert len(results) == len(urls) + 1
assert all(("MetaGPT" in i.inner_text) for i in results)
if use_proxy:
server.close()
assert "Proxy:" in capfd.readouterr().out
finally:
CONFIG.global_proxy = global_proxy

View file

@ -36,6 +36,7 @@ from metagpt.utils.common import (
read_file_block,
read_json_file,
require_python_version,
split_namespace,
)
@ -163,6 +164,23 @@ class TestGetProjectRoot:
assert concat_namespace("a", "b", "c", "e") == "a:b:c:e"
assert concat_namespace("a", "b", "c", "e", "f") == "a:b:c:e:f"
@pytest.mark.parametrize(
("val", "want"),
[
(
"tests/metagpt/test_role.py:test_react:Input:subscription",
["tests/metagpt/test_role.py", "test_react", "Input", "subscription"],
),
(
"tests/metagpt/test_role.py:test_react:Input:goal",
["tests/metagpt/test_role.py", "test_react", "Input", "goal"],
),
],
)
def test_split_namespace(self, val, want):
res = split_namespace(val)
assert res == want
def test_read_json_file(self):
assert read_json_file(str(Path(__file__).parent / "../../data/ut_writer/yft_swaggerApi.json"), encoding="utf-8")
with pytest.raises(FileNotFoundError):

View file

@ -56,7 +56,7 @@ async def test_js_parser():
repo_parser = RepoParser(base_directory=data.path)
symbols = repo_parser.generate_symbols()
for s in symbols:
await GraphRepository.update_graph_db(graph_db=graph, file_info=s)
await GraphRepository.update_graph_db_with_file_info(graph_db=graph, file_info=s)
data = graph.json()
assert data
@ -71,11 +71,11 @@ async def test_codes():
for file_info in symbols:
for code_block in file_info.page_info:
try:
val = code_block.json(ensure_ascii=False)
val = code_block.model_dump_json()
assert val
except TypeError as e:
assert not e
await GraphRepository.update_graph_db(graph_db=graph, file_info=file_info)
await GraphRepository.update_graph_db_with_file_info(graph_db=graph, file_info=file_info)
data = graph.json()
assert data
print(data)

Some files were not shown because too many files have changed in this diff Show more