feat: merge geekan/dev

This commit is contained in:
莘权 马 2023-12-19 17:03:49 +08:00
commit 4ffac9c391
25 changed files with 242 additions and 213 deletions

View file

@ -20,8 +20,7 @@ from metagpt.llm import LLM
from metagpt.logs import logger
from metagpt.provider.base_gpt_api import BaseGPTAPI
from metagpt.provider.postprecess.llm_output_postprecess import llm_output_postprecess
from metagpt.utils.common import OutputParser
from metagpt.utils.utils import general_after_log
from metagpt.utils.common import OutputParser, general_after_log
class Action(ABC):

View file

@ -43,7 +43,7 @@ Fill in the above nodes based on the format example.
"""
def dict_to_markdown(d, prefix="-", postfix="\n"):
def dict_to_markdown(d, prefix="###", postfix="\n"):
markdown_str = ""
for key, value in d.items():
markdown_str += f"{prefix} {key}: {value}{postfix}"
@ -52,6 +52,7 @@ def dict_to_markdown(d, prefix="-", postfix="\n"):
class ActionNode:
"""ActionNode is a tree of nodes."""
mode: str
# Action Context
@ -70,8 +71,15 @@ class ActionNode:
content: str
instruct_content: BaseModel
def __init__(self, key: str, expected_type: Type, instruction: str, example: str, content: str = "",
children: dict[str, "ActionNode"] = None):
def __init__(
self,
key: str,
expected_type: Type,
instruction: str,
example: str,
content: str = "",
children: dict[str, "ActionNode"] = None,
):
self.key = key
self.expected_type = expected_type
self.instruction = instruction

View file

@ -44,7 +44,7 @@ FULL_API_SPEC = ActionNode(
key="Full API spec",
expected_type=str,
instruction="Describe all APIs using OpenAPI 3.0 spec that may be used by both frontend and backend. If front-end "
"and back-end communication is not required, leave it blank.",
"and back-end communication is not required, leave it blank.",
example="openapi: 3.0.0 ...",
)

View file

@ -16,13 +16,13 @@
class.
"""
import subprocess
import traceback
from typing import Tuple
from metagpt.actions.action import Action
from metagpt.config import CONFIG
from metagpt.logs import logger
from metagpt.schema import RunCodeResult
from metagpt.utils.exceptions import handle_exception
PROMPT_TEMPLATE = """
Role: You are a senior development and qa engineer, your role is summarize the code running result.
@ -78,15 +78,12 @@ class RunCode(Action):
super().__init__(name, context, llm)
@classmethod
@handle_exception
async def run_text(cls, code) -> Tuple[str, str]:
try:
# We will document_store the result in this dictionary
namespace = {}
exec(code, namespace)
return namespace.get("result", ""), ""
except Exception:
# If there is an error in the code, return the error message
return "", traceback.format_exc()
# We will document_store the result in this dictionary
namespace = {}
exec(code, namespace)
return namespace.get("result", ""), ""
@classmethod
async def run_script(cls, working_directory, additional_python_paths=[], command=[]) -> Tuple[str, str]:
@ -145,18 +142,17 @@ class RunCode(Action):
rsp = await self._aask(prompt)
return RunCodeResult(summary=rsp, stdout=outs, stderr=errs)
@staticmethod
@handle_exception(exception_type=subprocess.CalledProcessError)
def _install_via_subprocess(cmd, check, cwd, env):
return subprocess.run(cmd, check=check, cwd=cwd, env=env)
@staticmethod
def _install_dependencies(working_directory, env):
install_command = ["python", "-m", "pip", "install", "-r", "requirements.txt"]
logger.info(" ".join(install_command))
try:
subprocess.run(install_command, check=True, cwd=working_directory, env=env)
except subprocess.CalledProcessError as e:
logger.warning(f"{e}")
RunCode._install_via_subprocess(install_command, check=True, cwd=working_directory, env=env)
install_pytest_command = ["python", "-m", "pip", "install", "pytest"]
logger.info(" ".join(install_pytest_command))
try:
subprocess.run(install_pytest_command, check=True, cwd=working_directory, env=env)
except subprocess.CalledProcessError as e:
logger.warning(f"{e}")
RunCode._install_via_subprocess(install_pytest_command, check=True, cwd=working_directory, env=env)

View file

@ -154,11 +154,15 @@ class WriteCodeReview(Action):
code=iterative_code,
filename=self.context.code_doc.filename,
)
cr_prompt = EXAMPLE_AND_INSTRUCTION.format(format_example=format_example, )
cr_prompt = EXAMPLE_AND_INSTRUCTION.format(
format_example=format_example,
)
logger.info(
f"Code review and rewrite {self.context.code_doc.filename}: {i+1}/{k} | {len(iterative_code)=}, {len(self.context.code_doc.content)=}"
)
result, rewrited_code = await self.write_code_review_and_rewrite(context_prompt, cr_prompt, self.context.code_doc.filename)
result, rewrited_code = await self.write_code_review_and_rewrite(
context_prompt, cr_prompt, self.context.code_doc.filename
)
if "LBTM" in result:
iterative_code = rewrited_code
elif "LGTM" in result:

View file

@ -54,6 +54,7 @@ class Config(metaclass=Singleton):
self.cost_manager = CostManager(**json.loads(cost_data)) if cost_data else CostManager()
logger.info("Config loading done.")
self._update()
logger.info(f"OpenAI API Model: {self.openai_api_model}")
def _update(self):
self.global_proxy = self._get("GLOBAL_PROXY")
@ -85,7 +86,7 @@ class Config(metaclass=Singleton):
self.openai_api_type = self._get("OPENAI_API_TYPE")
self.openai_api_version = self._get("OPENAI_API_VERSION")
self.openai_api_rpm = self._get("RPM", 3)
self.openai_api_model = self._get("OPENAI_API_MODEL", "gpt-4")
self.openai_api_model = self._get("OPENAI_API_MODEL", "gpt-4-1106-preview")
self.max_tokens_rsp = self._get("MAX_TOKENS", 2048)
self.deployment_name = self._get("DEPLOYMENT_NAME")
self.deployment_id = self._get("DEPLOYMENT_ID")

View file

@ -1,28 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@Time : 2023/5/28 14:54
@Author : alexanderwu
@File : inspect_module.py
"""
import inspect
import metagpt # replace with your module
def print_classes_and_functions(module):
"""FIXME: NOT WORK.."""
for name, obj in inspect.getmembers(module):
if inspect.isclass(obj):
print(f"Class: {name}")
elif inspect.isfunction(obj):
print(f"Function: {name}")
else:
print(name)
print(dir(module))
if __name__ == "__main__":
print_classes_and_functions(metagpt)

View file

@ -266,7 +266,7 @@ class OpenAIGPTAPI(BaseGPTAPI, RateLimiter):
)
return usage
except Exception as e:
logger.error("usage calculation failed!", e)
logger.error(f"{self.model} usage calculation failed!", e)
return CompletionUsage(prompt_tokens=0, completion_tokens=0, total_tokens=0)
async def acompletion_batch(self, batch: list[list[dict]], timeout=3) -> list[dict]:

View file

@ -15,17 +15,17 @@ from pydantic import BaseModel, Field
from metagpt.config import CONFIG
from metagpt.logs import logger
from metagpt.utils.exceptions import handle_exception
class RepoParser(BaseModel):
base_directory: Path = Field(default=None)
def parse_file(self, file_path):
@classmethod
@handle_exception(exception_type=Exception, default_return=[])
def _parse_file(cls, file_path: Path) -> list:
"""Parse a Python file in the repository."""
try:
return ast.parse(file_path.read_text()).body
except:
return []
return ast.parse(file_path.read_text()).body
def extract_class_and_function_info(self, tree, file_path):
"""Extract class, function, and global variable information from the AST."""
@ -51,6 +51,7 @@ class RepoParser(BaseModel):
def generate_symbols(self):
files_classes = []
directory = self.base_directory
matching_files = []
extensions = ["*.py", "*.js"]
for ext in extensions:
@ -94,5 +95,10 @@ def main():
logger.info(pformat(symbols))
def error():
"""raise Exception and logs it"""
RepoParser._parse_file(Path("test.py"))
if __name__ == "__main__":
main()
error()

View file

@ -28,7 +28,7 @@ class Architect(Role):
profile: str = "Architect",
goal: str = "design a concise, usable, complete software system",
constraints: str = "make sure the architecture is simple enough and use appropriate open source libraries."
"Use same language as user requirement"
"Use same language as user requirement",
) -> None:
"""Initializes the Architect with given attributes."""
super().__init__(name, profile, goal, constraints)

View file

@ -26,7 +26,7 @@ class ProjectManager(Role):
name: str = "Eve",
profile: str = "Project Manager",
goal: str = "break down tasks according to PRD/technical design, generate a task list, and analyze task "
"dependencies to start with the prerequisite modules",
"dependencies to start with the prerequisite modules",
constraints: str = "use same language as user requirement",
) -> None:
"""

View file

@ -21,7 +21,7 @@ import uuid
from asyncio import Queue, QueueEmpty, wait_for
from json import JSONDecodeError
from pathlib import Path
from typing import Dict, List, Optional, Set, TypedDict
from typing import Dict, List, Optional, Set, Type, TypedDict, TypeVar
from pydantic import BaseModel, Field
@ -36,6 +36,7 @@ from metagpt.const import (
)
from metagpt.logs import logger
from metagpt.utils.common import any_to_str, any_to_str_set
from metagpt.utils.exceptions import handle_exception
class RawMessage(TypedDict):
@ -160,6 +161,7 @@ class Message(BaseModel):
return self.json(exclude_none=True)
@staticmethod
@handle_exception(exception_type=JSONDecodeError, default_return=None)
def load(val):
"""Convert the json string to object."""
try:
@ -255,50 +257,46 @@ class MessageQueue:
return json.dumps(lst)
@staticmethod
def load(self, v) -> "MessageQueue":
def load(i) -> "MessageQueue":
"""Convert the json string to the `MessageQueue` object."""
q = MessageQueue()
queue = MessageQueue()
try:
lst = json.loads(v)
lst = json.loads(i)
for i in lst:
msg = Message(**i)
q.push(msg)
queue.push(msg)
except JSONDecodeError as e:
logger.warning(f"JSON load failed: {v}, error:{e}")
logger.warning(f"JSON load failed: {i}, error:{e}")
return q
return queue
class CodingContext(BaseModel):
# 定义一个泛型类型变量
T = TypeVar("T", bound="BaseModel")
class BaseContext(BaseModel):
@classmethod
@handle_exception
def loads(cls: Type[T], val: str) -> Optional[T]:
i = json.loads(val)
return cls(**i)
class CodingContext(BaseContext):
filename: str
design_doc: Optional[Document]
task_doc: Optional[Document]
code_doc: Optional[Document]
@staticmethod
def loads(val: str) -> CodingContext | None:
try:
m = json.loads(val)
return CodingContext(**m)
except Exception:
return None
class TestingContext(BaseModel):
class TestingContext(BaseContext):
filename: str
code_doc: Document
test_doc: Optional[Document]
@staticmethod
def loads(val: str) -> TestingContext | None:
try:
m = json.loads(val)
return TestingContext(**m)
except Exception:
return None
class RunCodeContext(BaseModel):
class RunCodeContext(BaseContext):
mode: str = "script"
code: Optional[str]
code_filename: str = ""
@ -310,28 +308,12 @@ class RunCodeContext(BaseModel):
output_filename: Optional[str]
output: Optional[str]
@staticmethod
def loads(val: str) -> RunCodeContext | None:
try:
m = json.loads(val)
return RunCodeContext(**m)
except Exception:
return None
class RunCodeResult(BaseModel):
class RunCodeResult(BaseContext):
summary: str
stdout: str
stderr: str
@staticmethod
def loads(val: str) -> RunCodeResult | None:
try:
m = json.loads(val)
return RunCodeResult(**m)
except Exception:
return None
class CodeSummarizeContext(BaseModel):
design_filename: str = ""
@ -355,5 +337,5 @@ class CodeSummarizeContext(BaseModel):
return hash((self.design_filename, self.task_filename))
class BugFixContext(BaseModel):
class BugFixContext(BaseContext):
filename: str = ""

View file

@ -11,6 +11,8 @@ from typing import List
import meilisearch
from meilisearch.index import Index
from metagpt.utils.exceptions import handle_exception
class DataSource:
def __init__(self, name: str, url: str):
@ -34,11 +36,7 @@ class MeilisearchEngine:
index.add_documents(documents)
self.set_index(index)
@handle_exception(exception_type=Exception, default_return=[])
def search(self, query):
try:
search_results = self._index.search(query)
return search_results["hits"]
except Exception as e:
# Handle MeiliSearch API errors
print(f"MeiliSearch API error: {e}")
return []
search_results = self._index.search(query)
return search_results["hits"]

View file

@ -17,11 +17,17 @@ import inspect
import os
import platform
import re
import typing
from typing import List, Tuple, Union
import aiofiles
import loguru
from tenacity import RetryCallState, _utils
from metagpt.config import CONFIG
from metagpt.const import MESSAGE_ROUTE_TO_ALL
from metagpt.logs import logger
from metagpt.utils.exceptions import handle_exception
def check_cmd_exists(command) -> int:
@ -292,9 +298,6 @@ class NoMoneyException(Exception):
def print_members(module, indent=0):
"""
https://stackoverflow.com/questions/1796180/how-can-i-get-a-list-of-all-classes-within-current-module-in-python
:param module:
:param indent:
:return:
"""
prefix = " " * indent
for name, obj in inspect.getmembers(module):
@ -312,6 +315,7 @@ def print_members(module, indent=0):
def parse_recipient(text):
# FIXME: use ActionNode instead.
pattern = r"## Send To:\s*([A-Za-z]+)\s*?" # hard code for now
recipient = re.search(pattern, text)
if recipient:
@ -328,18 +332,12 @@ def get_class_name(cls) -> str:
return f"{cls.__module__}.{cls.__name__}"
def get_object_name(obj) -> str:
"""Return class name of the object"""
cls = type(obj)
return f"{cls.__module__}.{cls.__name__}"
def any_to_str(val) -> str:
def any_to_str(val: str | typing.Callable) -> str:
"""Return the class name or the class name of the object, or 'val' if it's a string type."""
if isinstance(val, str):
return val
if not callable(val):
return get_object_name(val)
return get_class_name(type(val))
return get_class_name(val)
@ -347,21 +345,28 @@ def any_to_str(val) -> str:
def any_to_str_set(val) -> set:
"""Convert any type to string set."""
res = set()
if isinstance(val, dict) or isinstance(val, list) or isinstance(val, set) or isinstance(val, tuple):
# Check if the value is iterable, but not a string (since strings are technically iterable)
if isinstance(val, (dict, list, set, tuple)):
# Special handling for dictionaries to iterate over values
if isinstance(val, dict):
val = val.values()
for i in val:
res.add(any_to_str(i))
else:
res.add(any_to_str(val))
return res
def is_subscribed(message, tags):
def is_subscribed(message: "Message", tags: set):
"""Return whether it's consumer"""
if MESSAGE_ROUTE_TO_ALL in message.send_to:
return True
for t in tags:
if t in message.send_to:
for i in tags:
if i in message.send_to:
return True
return False
@ -397,3 +402,44 @@ def format_value(value):
def concat_namespace(*args) -> str:
return ":".join(str(value) for value in args)
def general_after_log(i: "loguru.Logger", sec_format: str = "%0.3f") -> typing.Callable[["RetryCallState"], None]:
"""
Generates a logging function to be used after a call is retried.
This generated function logs an error message with the outcome of the retried function call. It includes
the name of the function, the time taken for the call in seconds (formatted according to `sec_format`),
the number of attempts made, and the exception raised, if any.
:param i: A Logger instance from the loguru library used to log the error message.
:param sec_format: A string format specifier for how to format the number of seconds since the start of the call.
Defaults to three decimal places.
:return: A callable that accepts a RetryCallState object and returns None. This callable logs the details
of the retried call.
"""
def log_it(retry_state: "RetryCallState") -> None:
# If the function name is not known, default to "<unknown>"
if retry_state.fn is None:
fn_name = "<unknown>"
else:
# Retrieve the callable's name using a utility function
fn_name = _utils.get_callback_name(retry_state.fn)
# Log an error message with the function name, time since start, attempt number, and the exception
i.error(
f"Finished call to '{fn_name}' after {sec_format % retry_state.seconds_since_start}(s), "
f"this was the {_utils.to_ordinal(retry_state.attempt_number)} time calling it. "
f"exp: {retry_state.outcome.exception()}"
)
return log_it
@handle_exception
async def aread(file_path: str) -> str:
"""Read file asynchronously."""
async with aiofiles.open(str(file_path), mode="r") as reader:
content = await reader.read()
return content

View file

@ -25,7 +25,7 @@ def py_make_scanner(context):
except IndexError:
raise StopIteration(idx) from None
if nextchar == '"' or nextchar == "'":
if nextchar in ("'", '"'):
if idx + 2 < len(string) and string[idx + 1] == nextchar and string[idx + 2] == nextchar:
# Handle the case where the next two characters are the same as nextchar
return parse_string(string, idx + 3, strict, delimiter=nextchar * 3) # triple quote

View file

@ -15,7 +15,8 @@ from typing import Set
import aiofiles
from metagpt.config import CONFIG
from metagpt.logs import logger
from metagpt.utils.common import aread
from metagpt.utils.exceptions import handle_exception
class DependencyFile:
@ -36,21 +37,14 @@ class DependencyFile:
"""Load dependencies from the file asynchronously."""
if not self._filename.exists():
return
try:
async with aiofiles.open(str(self._filename), mode="r") as reader:
data = await reader.read()
self._dependencies = json.loads(data)
except Exception as e:
logger.error(f"Failed to load {str(self._filename)}, error:{e}")
self._dependencies = json.loads(await aread(self._filename))
@handle_exception
async def save(self):
"""Save dependencies to the file asynchronously."""
try:
data = json.dumps(self._dependencies)
async with aiofiles.open(str(self._filename), mode="w") as writer:
await writer.write(data)
except Exception as e:
logger.error(f"Failed to save {str(self._filename)}, error:{e}")
data = json.dumps(self._dependencies)
async with aiofiles.open(str(self._filename), mode="w") as writer:
await writer.write(data)
async def update(self, filename: Path | str, dependencies: Set[Path | str], persist=True):
"""Update dependencies for a file asynchronously.

View file

@ -0,0 +1,59 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@Time : 2023/12/19 14:46
@Author : alexanderwu
@File : exceptions.py
"""
import asyncio
import functools
import traceback
from typing import Any, Callable, Tuple, Type, TypeVar, Union
from metagpt.logs import logger
ReturnType = TypeVar("ReturnType")
def handle_exception(
_func: Callable[..., ReturnType] = None,
*,
exception_type: Union[Type[Exception], Tuple[Type[Exception], ...]] = Exception,
default_return: Any = None,
) -> Callable[..., ReturnType]:
"""handle exception, return default value"""
def decorator(func: Callable[..., ReturnType]) -> Callable[..., ReturnType]:
@functools.wraps(func)
async def async_wrapper(*args: Any, **kwargs: Any) -> ReturnType:
try:
return await func(*args, **kwargs)
except exception_type as e:
logger.opt(depth=1).error(
f"Calling {func.__name__} with args: {args}, kwargs: {kwargs} failed: {e}, "
f"stack: {traceback.format_exc()}"
)
return default_return
@functools.wraps(func)
def sync_wrapper(*args: Any, **kwargs: Any) -> ReturnType:
try:
return func(*args, **kwargs)
except exception_type as e:
logger.opt(depth=1).error(
f"Calling {func.__name__} with args: {args}, kwargs: {kwargs} failed: {e}, "
f"stack: {traceback.format_exc()}"
)
return default_return
if asyncio.iscoroutinefunction(func):
return async_wrapper
else:
return sync_wrapper
if _func is None:
return decorator
else:
return decorator(_func)

View file

@ -11,6 +11,7 @@ from pathlib import Path
import aiofiles
from metagpt.logs import logger
from metagpt.utils.exceptions import handle_exception
class File:
@ -19,6 +20,7 @@ class File:
CHUNK_SIZE = 64 * 1024
@classmethod
@handle_exception
async def write(cls, root_path: Path, filename: str, content: bytes) -> Path:
"""Write the file content to the local specified path.
@ -33,18 +35,15 @@ class File:
Raises:
Exception: If an unexpected error occurs during the file writing process.
"""
try:
root_path.mkdir(parents=True, exist_ok=True)
full_path = root_path / filename
async with aiofiles.open(full_path, mode="wb") as writer:
await writer.write(content)
logger.debug(f"Successfully write file: {full_path}")
return full_path
except Exception as e:
logger.error(f"Error writing file: {e}")
raise e
root_path.mkdir(parents=True, exist_ok=True)
full_path = root_path / filename
async with aiofiles.open(full_path, mode="wb") as writer:
await writer.write(content)
logger.debug(f"Successfully write file: {full_path}")
return full_path
@classmethod
@handle_exception
async def read(cls, file_path: Path, chunk_size: int = None) -> bytes:
"""Partitioning read the file content from the local specified path.
@ -58,18 +57,14 @@ class File:
Raises:
Exception: If an unexpected error occurs during the file reading process.
"""
try:
chunk_size = chunk_size or cls.CHUNK_SIZE
async with aiofiles.open(file_path, mode="rb") as reader:
chunks = list()
while True:
chunk = await reader.read(chunk_size)
if not chunk:
break
chunks.append(chunk)
content = b"".join(chunks)
logger.debug(f"Successfully read file, the path of file: {file_path}")
return content
except Exception as e:
logger.error(f"Error reading file: {e}")
raise e
chunk_size = chunk_size or cls.CHUNK_SIZE
async with aiofiles.open(file_path, mode="rb") as reader:
chunks = list()
while True:
chunk = await reader.read(chunk_size)
if not chunk:
break
chunks.append(chunk)
content = b"".join(chunks)
logger.debug(f"Successfully read file, the path of file: {file_path}")
return content

View file

@ -19,6 +19,7 @@ import aiofiles
from metagpt.config import CONFIG
from metagpt.logs import logger
from metagpt.schema import Document
from metagpt.utils.common import aread
from metagpt.utils.json_to_markdown import json_to_markdown
@ -97,15 +98,7 @@ class FileRepository:
path_name = self.workdir / filename
if not path_name.exists():
return None
try:
async with aiofiles.open(str(path_name), mode="r") as reader:
doc.content = await reader.read()
except FileNotFoundError as e:
logger.info(f"open {str(path_name)} failed:{e}")
return None
except Exception as e:
logger.info(f"open {str(path_name)} failed:{e}")
return None
doc.content = await aread(path_name)
return doc
async def get_all(self) -> List[Document]:

View file

@ -56,6 +56,7 @@ def count_message_tokens(messages, model="gpt-3.5-turbo-0613"):
if model in {
"gpt-3.5-turbo-0613",
"gpt-3.5-turbo-16k-0613",
"gpt-3.5-turbo-16k",
"gpt-3.5-turbo-1106",
"gpt-4-0314",
"gpt-4-32k-0314",
@ -63,7 +64,7 @@ def count_message_tokens(messages, model="gpt-3.5-turbo-0613"):
"gpt-4-32k-0613",
"gpt-4-1106-preview",
}:
tokens_per_message = 3
tokens_per_message = 3 # # every reply is primed with <|start|>assistant<|message|>
tokens_per_name = 1
elif model == "gpt-3.5-turbo-0301":
tokens_per_message = 4 # every message follows <|start|>{role/name}\n{content}<|end|>\n

View file

@ -1,22 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Desc :
import typing
from tenacity import _utils
def general_after_log(logger: "loguru.Logger", sec_format: str = "%0.3f") -> typing.Callable[["RetryCallState"], None]:
def log_it(retry_state: "RetryCallState") -> None:
if retry_state.fn is None:
fn_name = "<unknown>"
else:
fn_name = _utils.get_callback_name(retry_state.fn)
logger.error(
f"Finished call to '{fn_name}' after {sec_format % retry_state.seconds_since_start}(s), "
f"this was the {_utils.to_ordinal(retry_state.attempt_number)} time calling it. "
f"exp: {retry_state.outcome.exception()}"
)
return log_it