Merge branch 'dev' into v0.5-release

This commit is contained in:
geekan 2023-12-19 18:00:56 +08:00 committed by GitHub
commit 1073ffd60d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
36 changed files with 368 additions and 282 deletions

View file

@ -8,6 +8,7 @@ Provide configuration, singleton
"""
import os
from copy import deepcopy
from enum import Enum
from pathlib import Path
from typing import Any
@ -31,6 +32,15 @@ class NotConfiguredException(Exception):
super().__init__(self.message)
class LLMProviderEnum(Enum):
OPENAI = "openai"
ANTHROPIC = "anthropic"
SPARK = "spark"
ZHIPUAI = "zhipuai"
FIREWORKS = "fireworks"
OPEN_LLM = "open_llm"
class Config(metaclass=Singleton):
"""
Regular usage method:
@ -45,38 +55,58 @@ class Config(metaclass=Singleton):
default_yaml_file = METAGPT_ROOT / "config/config.yaml"
def __init__(self, yaml_file=default_yaml_file):
golbal_options = OPTIONS.get()
# cli paras
self.project_path = ""
self.project_name = ""
self.inc = False
self.reqa_file = ""
self.max_auto_summarize_code = 0
self._init_with_config_files_and_env(yaml_file)
logger.debug("Config loading done.")
self._update()
golbal_options.update(OPTIONS.get())
logger.debug("Config loading done.")
logger.info(f"OpenAI API Model: {self.openai_api_model}")
def get_default_llm_provider_enum(self):
if self._is_valid_llm_key(self.openai_api_key):
llm = LLMProviderEnum.OPENAI
elif self._is_valid_llm_key(self.anthropic_api_key):
llm = LLMProviderEnum.ANTHROPIC
elif self._is_valid_llm_key(self.zhipuai_api_key):
llm = LLMProviderEnum.ZHIPUAI
elif self._is_valid_llm_key(self.fireworks_api_key):
llm = LLMProviderEnum.FIREWORKS
elif self.open_llm_api_base:
llm = LLMProviderEnum.OPEN_LLM
else:
raise NotConfiguredException("You should config a LLM configuration first")
return llm
@staticmethod
def _is_valid_llm_key(k) -> bool:
return k and k != "YOUR_API_KEY"
def _update(self):
# logger.info("Config loading done.")
self.global_proxy = self._get("GLOBAL_PROXY")
self.openai_api_key = self._get("OPENAI_API_KEY")
self.anthropic_api_key = self._get("Anthropic_API_KEY")
self.anthropic_api_key = self._get("ANTHROPIC_API_KEY")
self.zhipuai_api_key = self._get("ZHIPUAI_API_KEY")
self.open_llm_api_base = self._get("OPEN_LLM_API_BASE")
self.open_llm_api_model = self._get("OPEN_LLM_API_MODEL")
self.fireworks_api_key = self._get("FIREWORKS_API_KEY")
if (
(not self.openai_api_key or "YOUR_API_KEY" == self.openai_api_key)
and (not self.anthropic_api_key or "YOUR_API_KEY" == self.anthropic_api_key)
and (not self.zhipuai_api_key or "YOUR_API_KEY" == self.zhipuai_api_key)
and (not self.open_llm_api_base)
and (not self.fireworks_api_key or "YOUR_API_KEY" == self.fireworks_api_key)
):
raise NotConfiguredException(
"Set OPENAI_API_KEY or Anthropic_API_KEY or ZHIPUAI_API_KEY first "
"or FIREWORKS_API_KEY or OPEN_LLM_API_BASE"
)
_ = self.get_default_llm_provider_enum()
self.openai_api_base = self._get("OPENAI_API_BASE")
self.openai_proxy = self._get("OPENAI_PROXY") or self.global_proxy
self.openai_api_type = self._get("OPENAI_API_TYPE")
self.openai_api_version = self._get("OPENAI_API_VERSION")
self.openai_api_rpm = self._get("RPM", 3)
self.openai_api_model = self._get("OPENAI_API_MODEL", "gpt-4")
self.openai_api_model = self._get("OPENAI_API_MODEL", "gpt-4-1106-preview")
self.max_tokens_rsp = self._get("MAX_TOKENS", 2048)
self.deployment_name = self._get("DEPLOYMENT_NAME")
self.deployment_id = self._get("DEPLOYMENT_ID")
@ -90,7 +120,7 @@ class Config(metaclass=Singleton):
self.fireworks_api_base = self._get("FIREWORKS_API_BASE")
self.fireworks_api_model = self._get("FIREWORKS_API_MODEL")
self.claude_api_key = self._get("Anthropic_API_KEY")
self.claude_api_key = self._get("ANTHROPIC_API_KEY")
self.serpapi_api_key = self._get("SERPAPI_API_KEY")
self.serper_api_key = self._get("SERPER_API_KEY")
self.google_api_key = self._get("GOOGLE_API_KEY")
@ -120,6 +150,19 @@ class Config(metaclass=Singleton):
self.workspace_path = Path(self._get("WORKSPACE_PATH", DEFAULT_WORKSPACE_ROOT))
self._ensure_workspace_exists()
def update_via_cli(self, project_path, project_name, inc, reqa_file, max_auto_summarize_code):
"""update config via cli"""
# Use in the PrepareDocuments action according to Section 2.2.3.5.1 of RFC 135.
if project_path:
inc = True
project_name = project_name or Path(project_path).name
self.project_path = project_path
self.project_name = project_name
self.inc = inc
self.reqa_file = reqa_file
self.max_auto_summarize_code = max_auto_summarize_code
def _ensure_workspace_exists(self):
self.workspace_path.mkdir(parents=True, exist_ok=True)
logger.debug(f"WORKSPACE_PATH set to {self.workspace_path}")
@ -142,8 +185,8 @@ class Config(metaclass=Singleton):
@staticmethod
def _get(*args, **kwargs):
m = OPTIONS.get()
return m.get(*args, **kwargs)
i = OPTIONS.get()
return i.get(*args, **kwargs)
def get(self, key, *args, **kwargs):
"""Search for a value in config/key.yaml, config/config.yaml, and env; raise an error if not found"""
@ -156,8 +199,8 @@ class Config(metaclass=Singleton):
OPTIONS.get()[name] = value
def __getattr__(self, name: str) -> Any:
m = OPTIONS.get()
return m.get(name)
i = OPTIONS.get()
return i.get(name)
def set_context(self, options: dict):
"""Update current config"""
@ -176,8 +219,8 @@ class Config(metaclass=Singleton):
def new_environ(self):
"""Return a new os.environ object"""
env = os.environ.copy()
m = self.options
env.update({k: v for k, v in m.items() if isinstance(v, str)})
i = self.options
env.update({k: v for k, v in i.items() if isinstance(v, str)})
return env