Merge branch 'main' into feature/json_write_prd

This commit is contained in:
femto 2023-09-21 12:00:10 +08:00 committed by GitHub
commit 7b34c433cd
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
69 changed files with 1911 additions and 56 deletions

View file

@ -11,7 +11,7 @@ import inspect
import os
import platform
import re
from typing import List, Tuple
from typing import List, Tuple, Union
from metagpt.logs import logger
@ -150,6 +150,53 @@ class OutputParser:
parsed_data[block] = content
return parsed_data
@classmethod
def extract_struct(cls, text: str, data_type: Union[type(list), type(dict)]) -> Union[list, dict]:
"""Extracts and parses a specified type of structure (dictionary or list) from the given text.
The text only contains a list or dictionary, which may have nested structures.
Args:
text: The text containing the structure (dictionary or list).
data_type: The data type to extract, can be "list" or "dict".
Returns:
- If extraction and parsing are successful, it returns the corresponding data structure (list or dictionary).
- If extraction fails or parsing encounters an error, it throw an exception.
Examples:
>>> text = 'xxx [1, 2, ["a", "b", [3, 4]], {"x": 5, "y": [6, 7]}] xxx'
>>> result_list = OutputParser.extract_struct(text, "list")
>>> print(result_list)
>>> # Output: [1, 2, ["a", "b", [3, 4]], {"x": 5, "y": [6, 7]}]
>>> text = 'xxx {"x": 1, "y": {"a": 2, "b": {"c": 3}}} xxx'
>>> result_dict = OutputParser.extract_struct(text, "dict")
>>> print(result_dict)
>>> # Output: {"x": 1, "y": {"a": 2, "b": {"c": 3}}}
"""
# Find the first "[" or "{" and the last "]" or "}"
start_index = text.find("[" if data_type is list else "{")
end_index = text.rfind("]" if data_type is list else "}")
if start_index != -1 and end_index != -1:
# Extract the structure part
structure_text = text[start_index:end_index + 1]
try:
# Attempt to convert the text to a Python data type using ast.literal_eval
result = ast.literal_eval(structure_text)
# Ensure the result matches the specified data type
if isinstance(result, list) or isinstance(result, dict):
return result
raise ValueError(f"The extracted structure is not a {data_type}.")
except (ValueError, SyntaxError) as e:
raise Exception(f"Error while extracting and parsing the {data_type}: {e}")
else:
raise Exception(f"No {data_type} found in the text.")
class CodeParser:
@classmethod

View file

@ -15,6 +15,8 @@ from metagpt.logs import logger
class File:
"""A general util for file operations."""
CHUNK_SIZE = 64 * 1024
@classmethod
async def write(cls, root_path: Path, filename: str, content: bytes) -> Path:
"""Write the file content to the local specified path.
@ -35,8 +37,39 @@ class File:
full_path = root_path / filename
async with aiofiles.open(full_path, mode="wb") as writer:
await writer.write(content)
logger.info(f"Successfully write file: {full_path}")
logger.debug(f"Successfully write file: {full_path}")
return full_path
except Exception as e:
logger.error(f"Error writing file: {e}")
raise e
raise e
@classmethod
async def read(cls, file_path: Path, chunk_size: int = None) -> bytes:
"""Partitioning read the file content from the local specified path.
Args:
file_path: The full file name of file, such as "/data/test.txt".
chunk_size: The size of each chunk in bytes (default is 64kb).
Returns:
The binary content of file.
Raises:
Exception: If an unexpected error occurs during the file reading process.
"""
try:
chunk_size = chunk_size or cls.CHUNK_SIZE
async with aiofiles.open(file_path, mode="rb") as reader:
chunks = list()
while True:
chunk = await reader.read(chunk_size)
if not chunk:
break
chunks.append(chunk)
content = b''.join(chunks)
logger.debug(f"Successfully read file, the path of file: {file_path}")
return content
except Exception as e:
logger.error(f"Error reading file: {e}")
raise e

View file

@ -0,0 +1,25 @@
# 添加代码语法高亮显示
from pygments import highlight as highlight_
from pygments.lexers import PythonLexer, SqlLexer
from pygments.formatters import TerminalFormatter, HtmlFormatter
def highlight(code: str, language: str = 'python', formatter: str = 'terminal'):
# 指定要高亮的语言
if language.lower() == 'python':
lexer = PythonLexer()
elif language.lower() == 'sql':
lexer = SqlLexer()
else:
raise ValueError(f"Unsupported language: {language}")
# 指定输出格式
if formatter.lower() == 'terminal':
formatter = TerminalFormatter()
elif formatter.lower() == 'html':
formatter = HtmlFormatter()
else:
raise ValueError(f"Unsupported formatter: {formatter}")
# 使用 Pygments 高亮代码片段
return highlight_(code, lexer, formatter)

View file

@ -0,0 +1,34 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@Time : 2023/9/13 12:29
@Author : femto Zheng
@File : make_sk_kernel.py
"""
import semantic_kernel as sk
from semantic_kernel.connectors.ai.open_ai.services.azure_chat_completion import (
AzureChatCompletion,
)
from semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion import (
OpenAIChatCompletion,
)
from metagpt.config import CONFIG
def make_sk_kernel():
kernel = sk.Kernel()
if CONFIG.openai_api_type == "azure":
kernel.add_chat_service(
"chat_completion",
AzureChatCompletion(CONFIG.deployment_name, CONFIG.openai_api_base, CONFIG.openai_api_key),
)
else:
kernel.add_chat_service(
"chat_completion",
OpenAIChatCompletion(
CONFIG.openai_api_model, CONFIG.openai_api_key, org_id=None, endpoint=CONFIG.openai_api_base
),
)
return kernel