2024-01-04 21:16:23 +08:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
"""
|
|
|
|
|
@Time : 2024/1/4 16:32
|
|
|
|
|
@Author : alexanderwu
|
|
|
|
|
@File : context.py
|
|
|
|
|
"""
|
|
|
|
|
import os
|
|
|
|
|
from pathlib import Path
|
2024-01-12 15:27:07 +08:00
|
|
|
from typing import Any, Optional
|
2024-01-04 21:16:23 +08:00
|
|
|
|
2024-01-09 17:39:09 +08:00
|
|
|
from pydantic import BaseModel, ConfigDict
|
2024-01-09 14:16:32 +08:00
|
|
|
|
2024-01-04 21:16:23 +08:00
|
|
|
from metagpt.config2 import Config
|
2024-01-11 15:10:07 +08:00
|
|
|
from metagpt.configs.llm_config import LLMConfig
|
2024-01-04 21:16:23 +08:00
|
|
|
from metagpt.provider.base_llm import BaseLLM
|
2024-01-09 15:56:40 +08:00
|
|
|
from metagpt.provider.llm_provider_registry import create_llm_instance
|
2024-01-04 21:16:23 +08:00
|
|
|
from metagpt.utils.cost_manager import CostManager
|
|
|
|
|
from metagpt.utils.git_repository import GitRepository
|
2024-01-15 16:37:42 +08:00
|
|
|
from metagpt.utils.project_repo import ProjectRepo
|
2024-01-04 21:16:23 +08:00
|
|
|
|
|
|
|
|
|
2024-01-09 14:16:32 +08:00
|
|
|
class AttrDict(BaseModel):
|
|
|
|
|
"""A dict-like object that allows access to keys as attributes, compatible with Pydantic."""
|
|
|
|
|
|
|
|
|
|
model_config = ConfigDict(extra="allow")
|
2024-01-08 18:30:04 +08:00
|
|
|
|
2024-01-09 14:16:32 +08:00
|
|
|
def __init__(self, **kwargs):
|
|
|
|
|
super().__init__(**kwargs)
|
|
|
|
|
self.__dict__.update(kwargs)
|
2024-01-08 16:26:52 +08:00
|
|
|
|
|
|
|
|
def __getattr__(self, key):
|
2024-01-09 14:16:32 +08:00
|
|
|
return self.__dict__.get(key, None)
|
2024-01-08 16:26:52 +08:00
|
|
|
|
|
|
|
|
def __setattr__(self, key, value):
|
2024-01-09 14:16:32 +08:00
|
|
|
self.__dict__[key] = value
|
2024-01-08 16:26:52 +08:00
|
|
|
|
|
|
|
|
def __delattr__(self, key):
|
2024-01-09 14:16:32 +08:00
|
|
|
if key in self.__dict__:
|
|
|
|
|
del self.__dict__[key]
|
2024-01-08 16:26:52 +08:00
|
|
|
else:
|
|
|
|
|
raise AttributeError(f"No such attribute: {key}")
|
|
|
|
|
|
2024-01-12 15:27:07 +08:00
|
|
|
def set(self, key, val: Any):
|
|
|
|
|
self.__dict__[key] = val
|
|
|
|
|
|
|
|
|
|
def get(self, key, default: Any = None):
|
|
|
|
|
return self.__dict__.get(key, default)
|
|
|
|
|
|
|
|
|
|
def remove(self, key):
|
|
|
|
|
if key in self.__dict__:
|
|
|
|
|
self.__delattr__(key)
|
|
|
|
|
|
2024-01-08 16:26:52 +08:00
|
|
|
|
2024-01-09 17:13:22 +08:00
|
|
|
class Context(BaseModel):
|
2024-01-09 14:16:32 +08:00
|
|
|
"""Env context for MetaGPT"""
|
|
|
|
|
|
|
|
|
|
model_config = ConfigDict(arbitrary_types_allowed=True)
|
|
|
|
|
|
2024-01-08 18:30:04 +08:00
|
|
|
kwargs: AttrDict = AttrDict()
|
2024-01-04 21:16:23 +08:00
|
|
|
config: Config = Config.default()
|
2024-01-15 16:37:42 +08:00
|
|
|
|
|
|
|
|
repo: Optional[ProjectRepo] = None
|
2024-01-04 21:16:23 +08:00
|
|
|
git_repo: Optional[GitRepository] = None
|
|
|
|
|
src_workspace: Optional[Path] = None
|
|
|
|
|
cost_manager: CostManager = CostManager()
|
2024-01-10 15:34:49 +08:00
|
|
|
|
|
|
|
|
_llm: Optional[BaseLLM] = None
|
2024-01-04 21:16:23 +08:00
|
|
|
|
|
|
|
|
def new_environ(self):
|
|
|
|
|
"""Return a new os.environ object"""
|
|
|
|
|
env = os.environ.copy()
|
2024-01-15 16:58:01 +08:00
|
|
|
# i = self.options
|
|
|
|
|
# env.update({k: v for k, v in i.items() if isinstance(v, str)})
|
2024-01-04 21:16:23 +08:00
|
|
|
return env
|
|
|
|
|
|
2024-01-10 15:34:49 +08:00
|
|
|
# def use_llm(self, name: Optional[str] = None, provider: LLMType = LLMType.OPENAI) -> BaseLLM:
|
|
|
|
|
# """Use a LLM instance"""
|
|
|
|
|
# self._llm_config = self.config.get_llm_config(name, provider)
|
|
|
|
|
# self._llm = None
|
|
|
|
|
# return self._llm
|
|
|
|
|
|
2024-01-11 15:10:07 +08:00
|
|
|
def llm(self) -> BaseLLM:
|
2024-01-10 16:02:05 +08:00
|
|
|
"""Return a LLM instance, fixme: support cache"""
|
|
|
|
|
# if self._llm is None:
|
2024-01-11 15:10:07 +08:00
|
|
|
self._llm = create_llm_instance(self.config.llm)
|
2024-01-10 16:02:05 +08:00
|
|
|
if self._llm.cost_manager is None:
|
|
|
|
|
self._llm.cost_manager = self.cost_manager
|
2024-01-10 15:34:49 +08:00
|
|
|
return self._llm
|
2024-01-04 21:16:23 +08:00
|
|
|
|
2024-01-10 18:32:03 +08:00
|
|
|
def llm_with_cost_manager_from_llm_config(self, llm_config: LLMConfig) -> BaseLLM:
|
|
|
|
|
"""Return a LLM instance, fixme: support cache"""
|
|
|
|
|
# if self._llm is None:
|
|
|
|
|
llm = create_llm_instance(llm_config)
|
|
|
|
|
if llm.cost_manager is None:
|
|
|
|
|
llm.cost_manager = self.cost_manager
|
|
|
|
|
return llm
|