2024-01-04 21:16:23 +08:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
"""
|
|
|
|
|
@Time : 2024/1/4 16:32
|
|
|
|
|
@Author : alexanderwu
|
|
|
|
|
@File : context.py
|
|
|
|
|
"""
|
|
|
|
|
import os
|
|
|
|
|
from pathlib import Path
|
2024-01-08 16:26:52 +08:00
|
|
|
from typing import Optional
|
2024-01-04 21:16:23 +08:00
|
|
|
|
2024-01-09 17:39:09 +08:00
|
|
|
from pydantic import BaseModel, ConfigDict
|
2024-01-09 14:16:32 +08:00
|
|
|
|
2024-01-04 21:16:23 +08:00
|
|
|
from metagpt.config2 import Config
|
2024-01-09 15:56:40 +08:00
|
|
|
from metagpt.configs.llm_config import LLMConfig, LLMType
|
2024-01-04 21:16:23 +08:00
|
|
|
from metagpt.const import OPTIONS
|
|
|
|
|
from metagpt.provider.base_llm import BaseLLM
|
2024-01-09 15:56:40 +08:00
|
|
|
from metagpt.provider.llm_provider_registry import create_llm_instance
|
2024-01-04 21:16:23 +08:00
|
|
|
from metagpt.utils.cost_manager import CostManager
|
|
|
|
|
from metagpt.utils.git_repository import GitRepository
|
|
|
|
|
|
|
|
|
|
|
2024-01-09 14:16:32 +08:00
|
|
|
class AttrDict(BaseModel):
|
|
|
|
|
"""A dict-like object that allows access to keys as attributes, compatible with Pydantic."""
|
|
|
|
|
|
|
|
|
|
model_config = ConfigDict(extra="allow")
|
2024-01-08 18:30:04 +08:00
|
|
|
|
2024-01-09 14:16:32 +08:00
|
|
|
def __init__(self, **kwargs):
|
|
|
|
|
super().__init__(**kwargs)
|
|
|
|
|
self.__dict__.update(kwargs)
|
2024-01-08 16:26:52 +08:00
|
|
|
|
|
|
|
|
def __getattr__(self, key):
|
2024-01-09 14:16:32 +08:00
|
|
|
return self.__dict__.get(key, None)
|
2024-01-08 16:26:52 +08:00
|
|
|
|
|
|
|
|
def __setattr__(self, key, value):
|
2024-01-09 14:16:32 +08:00
|
|
|
self.__dict__[key] = value
|
2024-01-08 16:26:52 +08:00
|
|
|
|
|
|
|
|
def __delattr__(self, key):
|
2024-01-09 14:16:32 +08:00
|
|
|
if key in self.__dict__:
|
|
|
|
|
del self.__dict__[key]
|
2024-01-08 16:26:52 +08:00
|
|
|
else:
|
|
|
|
|
raise AttributeError(f"No such attribute: {key}")
|
|
|
|
|
|
|
|
|
|
|
2024-01-09 17:13:22 +08:00
|
|
|
class Context(BaseModel):
|
2024-01-09 14:16:32 +08:00
|
|
|
"""Env context for MetaGPT"""
|
|
|
|
|
|
|
|
|
|
model_config = ConfigDict(arbitrary_types_allowed=True)
|
|
|
|
|
|
2024-01-08 18:30:04 +08:00
|
|
|
kwargs: AttrDict = AttrDict()
|
2024-01-04 21:16:23 +08:00
|
|
|
config: Config = Config.default()
|
|
|
|
|
git_repo: Optional[GitRepository] = None
|
|
|
|
|
src_workspace: Optional[Path] = None
|
|
|
|
|
cost_manager: CostManager = CostManager()
|
2024-01-10 15:34:49 +08:00
|
|
|
|
|
|
|
|
_llm: Optional[BaseLLM] = None
|
2024-01-04 21:16:23 +08:00
|
|
|
|
2024-01-05 00:41:00 +08:00
|
|
|
@property
|
|
|
|
|
def file_repo(self):
|
|
|
|
|
return self.git_repo.new_file_repository()
|
|
|
|
|
|
2024-01-04 21:16:23 +08:00
|
|
|
@property
|
|
|
|
|
def options(self):
|
|
|
|
|
"""Return all key-values"""
|
|
|
|
|
return OPTIONS.get()
|
|
|
|
|
|
|
|
|
|
def new_environ(self):
|
|
|
|
|
"""Return a new os.environ object"""
|
|
|
|
|
env = os.environ.copy()
|
|
|
|
|
i = self.options
|
|
|
|
|
env.update({k: v for k, v in i.items() if isinstance(v, str)})
|
|
|
|
|
return env
|
|
|
|
|
|
2024-01-10 15:34:49 +08:00
|
|
|
# def use_llm(self, name: Optional[str] = None, provider: LLMType = LLMType.OPENAI) -> BaseLLM:
|
|
|
|
|
# """Use a LLM instance"""
|
|
|
|
|
# self._llm_config = self.config.get_llm_config(name, provider)
|
|
|
|
|
# self._llm = None
|
|
|
|
|
# return self._llm
|
|
|
|
|
|
2024-01-10 18:32:03 +08:00
|
|
|
def llm(self, name: Optional[str] = None, provider: LLMType = None) -> BaseLLM:
|
2024-01-10 16:02:05 +08:00
|
|
|
"""Return a LLM instance, fixme: support cache"""
|
|
|
|
|
# if self._llm is None:
|
|
|
|
|
self._llm = create_llm_instance(self.config.get_llm_config(name, provider))
|
|
|
|
|
if self._llm.cost_manager is None:
|
|
|
|
|
self._llm.cost_manager = self.cost_manager
|
2024-01-10 15:34:49 +08:00
|
|
|
return self._llm
|
2024-01-04 21:16:23 +08:00
|
|
|
|
2024-01-10 18:32:03 +08:00
|
|
|
def llm_with_cost_manager_from_llm_config(self, llm_config: LLMConfig) -> BaseLLM:
|
|
|
|
|
"""Return a LLM instance, fixme: support cache"""
|
|
|
|
|
# if self._llm is None:
|
|
|
|
|
llm = create_llm_instance(llm_config)
|
|
|
|
|
if llm.cost_manager is None:
|
|
|
|
|
llm.cost_manager = self.cost_manager
|
|
|
|
|
return llm
|
|
|
|
|
|
2024-01-04 21:16:23 +08:00
|
|
|
|
2024-01-09 22:04:49 +08:00
|
|
|
class ContextMixin(BaseModel):
|
|
|
|
|
"""Mixin class for context and config"""
|
|
|
|
|
|
|
|
|
|
# Env/Role/Action will use this context as private context, or use self.context as public context
|
|
|
|
|
_context: Optional[Context] = None
|
|
|
|
|
# Env/Role/Action will use this config as private config, or use self.context.config as public config
|
|
|
|
|
_config: Optional[Config] = None
|
|
|
|
|
|
2024-01-10 15:34:49 +08:00
|
|
|
# Env/Role/Action will use this llm as private llm, or use self.context._llm instance
|
|
|
|
|
_llm_config: Optional[LLMConfig] = None
|
|
|
|
|
_llm: Optional[BaseLLM] = None
|
|
|
|
|
|
|
|
|
|
def __init__(
|
|
|
|
|
self,
|
|
|
|
|
context: Optional[Context] = None,
|
|
|
|
|
config: Optional[Config] = None,
|
|
|
|
|
llm: Optional[BaseLLM] = None,
|
|
|
|
|
**kwargs,
|
|
|
|
|
):
|
2024-01-09 22:04:49 +08:00
|
|
|
"""Initialize with config"""
|
|
|
|
|
super().__init__(**kwargs)
|
|
|
|
|
self.set_context(context)
|
|
|
|
|
self.set_config(config)
|
2024-01-10 15:34:49 +08:00
|
|
|
self.set_llm(llm)
|
2024-01-09 22:04:49 +08:00
|
|
|
|
|
|
|
|
def set(self, k, v, override=False):
|
|
|
|
|
"""Set attribute"""
|
|
|
|
|
if override or not self.__dict__.get(k):
|
|
|
|
|
self.__dict__[k] = v
|
|
|
|
|
|
|
|
|
|
def set_context(self, context: Context, override=True):
|
|
|
|
|
"""Set context"""
|
|
|
|
|
self.set("_context", context, override)
|
|
|
|
|
|
|
|
|
|
def set_config(self, config: Config, override=False):
|
|
|
|
|
"""Set config"""
|
|
|
|
|
self.set("_config", config, override)
|
|
|
|
|
|
2024-01-10 15:34:49 +08:00
|
|
|
def set_llm_config(self, llm_config: LLMConfig, override=False):
|
|
|
|
|
"""Set llm config"""
|
|
|
|
|
self.set("_llm_config", llm_config, override)
|
|
|
|
|
|
|
|
|
|
def set_llm(self, llm: BaseLLM, override=False):
|
|
|
|
|
"""Set llm"""
|
|
|
|
|
self.set("_llm", llm, override)
|
|
|
|
|
|
2024-01-10 18:32:03 +08:00
|
|
|
def use_llm(self, name: Optional[str] = None, provider: LLMType = None) -> BaseLLM:
|
2024-01-10 15:34:49 +08:00
|
|
|
"""Use a LLM instance"""
|
|
|
|
|
self._llm_config = self.config.get_llm_config(name, provider)
|
|
|
|
|
self._llm = None
|
|
|
|
|
return self.llm
|
|
|
|
|
|
2024-01-09 22:04:49 +08:00
|
|
|
@property
|
2024-01-10 15:34:49 +08:00
|
|
|
def config(self) -> Config:
|
2024-01-09 22:04:49 +08:00
|
|
|
"""Role config: role config > context config"""
|
|
|
|
|
if self._config:
|
|
|
|
|
return self._config
|
|
|
|
|
return self.context.config
|
|
|
|
|
|
|
|
|
|
@config.setter
|
2024-01-10 15:34:49 +08:00
|
|
|
def config(self, config: Config) -> None:
|
2024-01-09 22:04:49 +08:00
|
|
|
"""Set config"""
|
|
|
|
|
self.set_config(config)
|
|
|
|
|
|
|
|
|
|
@property
|
2024-01-10 15:34:49 +08:00
|
|
|
def context(self) -> Context:
|
2024-01-09 22:04:49 +08:00
|
|
|
"""Role context: role context > context"""
|
|
|
|
|
if self._context:
|
|
|
|
|
return self._context
|
2024-01-10 13:56:02 +08:00
|
|
|
return CONTEXT
|
2024-01-09 22:04:49 +08:00
|
|
|
|
|
|
|
|
@context.setter
|
2024-01-10 15:34:49 +08:00
|
|
|
def context(self, context: Context) -> None:
|
2024-01-09 22:04:49 +08:00
|
|
|
"""Set context"""
|
|
|
|
|
self.set_context(context)
|
|
|
|
|
|
2024-01-10 15:34:49 +08:00
|
|
|
@property
|
|
|
|
|
def llm(self) -> BaseLLM:
|
|
|
|
|
"""Role llm: role llm > context llm"""
|
2024-01-10 20:21:06 +08:00
|
|
|
# print(f"class:{self.__class__.__name__}({self.name}), llm: {self._llm}, llm_config: {self._llm_config}")
|
2024-01-10 15:34:49 +08:00
|
|
|
if self._llm_config and not self._llm:
|
2024-01-10 18:32:03 +08:00
|
|
|
self._llm = self.context.llm_with_cost_manager_from_llm_config(self._llm_config)
|
2024-01-10 15:34:49 +08:00
|
|
|
return self._llm or self.context.llm()
|
|
|
|
|
|
|
|
|
|
@llm.setter
|
|
|
|
|
def llm(self, llm: BaseLLM) -> None:
|
|
|
|
|
"""Set llm"""
|
|
|
|
|
self._llm = llm
|
|
|
|
|
|
2024-01-09 22:04:49 +08:00
|
|
|
|
2024-01-09 14:16:32 +08:00
|
|
|
# Global context, not in Env
|
2024-01-10 13:56:02 +08:00
|
|
|
CONTEXT = Context()
|