update config int

This commit is contained in:
张建生 2025-02-28 16:36:29 +08:00
parent c4f169462f
commit 632e14d415
20 changed files with 21 additions and 41 deletions

View file

@ -6,13 +6,12 @@ Author: garylin2099
import re
from metagpt.actions import Action
from metagpt.config2 import Config
from metagpt.config2 import config
from metagpt.const import METAGPT_ROOT
from metagpt.logs import logger
from metagpt.roles import Role
from metagpt.schema import Message
config = Config.default()
EXAMPLE_CODE_FILE = METAGPT_ROOT / "examples/build_customized_agent.py"
MULTI_ACTION_AGENT_CODE_EXAMPLE = EXAMPLE_CODE_FILE.read_text()

View file

@ -16,7 +16,7 @@ from pydantic import BaseModel
from metagpt.actions.requirement_analysis.framework.evaluate_framework import EvaluateFramework
from metagpt.actions.requirement_analysis.framework.write_framework import WriteFramework
from metagpt.config2 import Config
from metagpt.config2 import config
from metagpt.utils.common import awrite
@ -54,7 +54,7 @@ async def save_framework(
output_dir = (
Path(output_dir)
if output_dir
else Config.default().workspace.path / (datetime.now().strftime("%Y%m%d%H%M%ST") + uuid.uuid4().hex[0:8])
else config.workspace.path / (datetime.now().strftime("%Y%m%d%H%M%ST") + uuid.uuid4().hex[0:8])
)
output_dir.mkdir(parents=True, exist_ok=True)

View file

@ -178,3 +178,4 @@ def merge_dict(dicts: Iterable[Dict]) -> Dict:
_CONFIG_CACHE = {}
config = Config.default()

View file

@ -6,7 +6,7 @@ from typing import Any, Callable, Optional, TypeVar
from pydantic import BaseModel, ConfigDict, model_validator
from metagpt.config2 import Config
from metagpt.config2 import config
from metagpt.exp_pool.context_builders import BaseContextBuilder, SimpleContextBuilder
from metagpt.exp_pool.manager import ExperienceManager, get_exp_manager
from metagpt.exp_pool.perfect_judges import BasePerfectJudge, SimplePerfectJudge
@ -60,8 +60,6 @@ def exp_cache(
def decorator(func: Callable[..., ReturnType]) -> Callable[..., ReturnType]:
@functools.wraps(func)
async def get_or_create(args: Any, kwargs: Any) -> ReturnType:
config = Config.default()
if not config.exp_pool.enabled:
rsp = func(*args, **kwargs)
return await rsp if asyncio.iscoroutine(rsp) else rsp

View file

@ -7,7 +7,7 @@ from pathlib import Path
import cv2
from metagpt.actions.action import Action
from metagpt.config2 import Config
from metagpt.config2 import config
from metagpt.environment.android.android_env import AndroidEnv
from metagpt.environment.android.const import ADB_EXEC_FAIL
from metagpt.environment.android.env_space import (
@ -55,7 +55,6 @@ class ManualRecord(Action):
self.task_desc_path.write_text(task_desc)
step = 0
config = Config.default()
extra_config = config.extra
while True:
step += 1

View file

@ -8,7 +8,7 @@ import re
from pathlib import Path
from metagpt.actions.action import Action
from metagpt.config2 import Config
from metagpt.config2 import config
from metagpt.ext.android_assistant.actions.parse_record_an import RECORD_PARSE_NODE
from metagpt.ext.android_assistant.prompts.operation_prompt import (
long_press_doc_template,
@ -45,7 +45,6 @@ class ParseRecord(Action):
path.mkdir(parents=True, exist_ok=True)
task_desc = self.task_desc_path.read_text()
config = Config.default()
extra_config = config.extra
with open(self.record_path, "r") as record_file:

View file

@ -6,7 +6,7 @@ import ast
from pathlib import Path
from metagpt.actions.action import Action
from metagpt.config2 import Config
from metagpt.config2 import config
from metagpt.environment.android.android_env import AndroidEnv
from metagpt.environment.android.const import ADB_EXEC_FAIL
from metagpt.environment.android.env_space import (
@ -101,7 +101,6 @@ next action. You should always prioritize these documented elements for interact
grid_on: bool,
env: AndroidEnv,
):
config = Config.default()
extra_config = config.extra
for path in [task_dir, docs_dir]:
path.mkdir(parents=True, exist_ok=True)

View file

@ -6,7 +6,7 @@ import ast
from pathlib import Path
from metagpt.actions.action import Action
from metagpt.config2 import Config
from metagpt.config2 import config
from metagpt.environment.android.android_env import AndroidEnv
from metagpt.environment.android.const import ADB_EXEC_FAIL
from metagpt.environment.android.env_space import (
@ -80,7 +80,6 @@ class SelfLearnAndReflect(Action):
async def run_self_learn(
self, round_count: int, task_desc: str, last_act: str, task_dir: Path, env: AndroidEnv
) -> AndroidActionOutput:
config = Config.default()
extra_config = config.extra
screenshot_path: Path = env.observe(
EnvObsParams(obs_type=EnvObsType.GET_SCREENSHOT, ss_name=f"{round_count}_before", local_save_dir=task_dir)

View file

@ -9,7 +9,7 @@ from typing import Optional
from pydantic import Field
from metagpt.actions.add_requirement import UserRequirement
from metagpt.config2 import Config
from metagpt.config2 import config
from metagpt.const import EXAMPLE_PATH
from metagpt.ext.android_assistant.actions.manual_record import ManualRecord
from metagpt.ext.android_assistant.actions.parse_record import ParseRecord
@ -38,7 +38,6 @@ class AndroidAssistant(Role):
def __init__(self, **data):
super().__init__(**data)
config = Config.default()
self._watch([UserRequirement, AndroidActionOutput])
extra_config = config.extra
self.task_desc = extra_config.get("task_desc", "Just explore any app in this phone!")

View file

@ -10,7 +10,7 @@ from xml.etree.ElementTree import Element, iterparse
import cv2
import pyshine as ps
from metagpt.config2 import Config
from metagpt.config2 import config
from metagpt.ext.android_assistant.utils.schema import (
ActionOp,
AndroidElement,
@ -48,7 +48,6 @@ def get_id_from_element(elem: Element) -> str:
def traverse_xml_tree(xml_path: Path, elem_list: list[AndroidElement], attrib: str, add_index=False):
path = []
config = Config.default()
extra_config = config.extra
for event, elem in iterparse(str(xml_path), ["start", "end"]):
if event == "start":

View file

@ -13,7 +13,7 @@ from typing import Union
from openai import OpenAI
from metagpt.config2 import Config
from metagpt.config2 import config
from metagpt.logs import logger
@ -48,7 +48,6 @@ def read_csv_to_list(curr_file: str, header=False, strip_trail=True):
def get_embedding(text, model: str = "text-embedding-ada-002"):
config = Config.default()
text = text.replace("\n", " ")
embedding = None
if not text:

View file

@ -31,7 +31,7 @@ from llama_index.core.schema import (
TransformComponent,
)
from metagpt.config2 import Config
from metagpt.config2 import config
from metagpt.rag.factories import (
get_index,
get_rag_embedding,
@ -400,7 +400,6 @@ class SimpleEngine(RetrieverQueryEngine):
dict[file_type: BaseReader]
"""
file_extractor: dict[str:BaseReader] = {}
config = Config.default()
if config.omniparse.base_url:
pdf_parser = OmniParse(
api_key=config.omniparse.api_key,

View file

@ -12,7 +12,7 @@ from llama_index.core.llms import (
from llama_index.core.llms.callbacks import llm_completion_callback
from pydantic import Field
from metagpt.config2 import Config
from metagpt.config2 import config
from metagpt.provider.base_llm import BaseLLM
from metagpt.utils.async_helper import NestAsyncio
from metagpt.utils.token_counter import TOKEN_MAX
@ -41,7 +41,6 @@ class RAGLLM(CustomLLM):
**kwargs
):
super().__init__(*args, **kwargs)
config = Config.default()
if context_window < 0:
context_window = TOKEN_MAX.get(config.llm.model, DEFAULT_CONTEXT_WINDOW)

View file

@ -11,7 +11,7 @@ from llama_index.core.schema import TextNode
from llama_index.core.vector_stores.types import VectorStoreQueryMode
from pydantic import BaseModel, ConfigDict, Field, PrivateAttr, model_validator
from metagpt.config2 import Config
from metagpt.config2 import config
from metagpt.configs.embedding_config import EmbeddingType
from metagpt.logs import logger
from metagpt.rag.interface import RAGObject
@ -47,7 +47,6 @@ class FAISSRetrieverConfig(IndexRetrieverConfig):
@model_validator(mode="after")
def check_dimensions(self):
if self.dimensions == 0:
config = Config.default()
self.dimensions = config.embedding.dimensions or self._embedding_type_to_dimensions.get(
config.embedding.api_type, 1536
)
@ -89,7 +88,6 @@ class MilvusRetrieverConfig(IndexRetrieverConfig):
@model_validator(mode="after")
def check_dimensions(self):
if self.dimensions == 0:
config = Config.default()
self.dimensions = config.embedding.dimensions or self._embedding_type_to_dimensions.get(
config.embedding.api_type, 1536
)

View file

@ -27,7 +27,7 @@ def generate_repo(
recover_path=None,
):
"""Run the startup logic. Can be called from CLI or other Python scripts."""
from metagpt.config2 import Config
from metagpt.config2 import config
from metagpt.context import Context
from metagpt.roles import (
Architect,
@ -38,8 +38,6 @@ def generate_repo(
)
from metagpt.team import Team
config = Config.default()
config.update_via_cli(project_path, project_name, inc, reqa_file, max_auto_summarize_code)
ctx = Context(config=config)

View file

@ -11,7 +11,7 @@ from llama_index.core.base.embeddings.base import BaseEmbedding
from llama_index.core.schema import NodeWithScore
from pydantic import BaseModel, Field, model_validator
from metagpt.config2 import Config
from metagpt.config2 import config
from metagpt.context import Context
from metagpt.logs import logger
from metagpt.rag.engines import SimpleEngine
@ -142,7 +142,6 @@ class IndexRepo(BaseModel):
return flat_nodes
if not self.embedding:
config = Config.default()
if self.model:
config.embedding.model = self.model
factory = RAGEmbeddingFactory(config)

View file

@ -4,7 +4,7 @@
import json
from pathlib import Path
from metagpt.config2 import Config
from metagpt.config2 import config
from metagpt.provider.openai_api import OpenAILLM as GPTAPI
from metagpt.utils.common import awrite
@ -282,7 +282,6 @@ class UTGenerator:
"""Choose based on different calling methods"""
result = ""
if self.chatgpt_method == "API":
config = Config.default()
result = await GPTAPI(config.get_openai_llm()).aask_code(messages=messages)
return result

View file

@ -7,11 +7,10 @@
"""
from llama_index.embeddings.openai import OpenAIEmbedding
from metagpt.config2 import Config
from metagpt.config2 import config
def get_embedding() -> OpenAIEmbedding:
config = Config.default()
llm = config.get_openai_llm()
if llm is None:
raise ValueError("To use OpenAIEmbedding, please ensure that config.llm.api_type is correctly set to 'openai'.")

View file

@ -13,7 +13,7 @@ from typing import Optional, Tuple, Union
import aiofiles
from fsspec.implementations.memory import MemoryFileSystem as _MemoryFileSystem
from metagpt.config2 import Config
from metagpt.config2 import config
from metagpt.logs import logger
from metagpt.utils import read_docx
from metagpt.utils.common import aread, aread_bin, awrite_bin, check_http_endpoint
@ -190,7 +190,6 @@ class File:
@staticmethod
async def _read_omniparse_config() -> Tuple[str, int]:
config = Config.default()
if config.omniparse and config.omniparse.base_url:
return config.omniparse.base_url, config.omniparse.timeout
return "", 0

View file

@ -13,11 +13,10 @@ from semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion impo
OpenAIChatCompletion,
)
from metagpt.config2 import Config
from metagpt.config2 import config
def make_sk_kernel():
config = Config.default()
kernel = sk.Kernel()
if llm := config.get_azure_llm():
kernel.add_chat_service(