From d05193332418f0a0c2ab06e7c6f8d20496f29de6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=8E=98=E6=9D=83=20=E9=A9=AC?= Date: Thu, 4 Jan 2024 12:46:41 +0800 Subject: [PATCH] fixbug: recursive search for provider --- config/config.yaml | 1 + metagpt/config.py | 9 ++++++++- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/config/config.yaml b/config/config.yaml index 5025a4977..e5f8f4573 100644 --- a/config/config.yaml +++ b/config/config.yaml @@ -16,6 +16,7 @@ MAX_TOKENS: 4096 RPM: 10 LLM_TYPE: OpenAI # Except for these three major models – OpenAI, MetaGPT LLM, and Azure – other large models can be distinguished based on the validity of the key. TIMEOUT: 60 # Timeout for llm invocation +DEFAULT_PROVIDER: openai #### if Spark #SPARK_APPID : "YOUR_APPID" diff --git a/metagpt/config.py b/metagpt/config.py index eb3636c9a..d633c7d28 100644 --- a/metagpt/config.py +++ b/metagpt/config.py @@ -50,6 +50,9 @@ class LLMProviderEnum(Enum): AZURE_OPENAI = "azure_openai" OLLAMA = "ollama" + def __missing__(self, key): + return self.OPENAI + class Config(metaclass=Singleton): """ @@ -108,6 +111,11 @@ class Config(metaclass=Singleton): if v: provider = k break + if provider is None: + if self.DEFAULT_PROVIDER: + provider = LLMProviderEnum(self.DEFAULT_PROVIDER) + else: + raise NotConfiguredException("You should config a LLM configuration first") if provider is LLMProviderEnum.GEMINI and not require_python_version(req_version=(3, 10)): warnings.warn("Use Gemini requires Python >= 3.10") @@ -117,7 +125,6 @@ class Config(metaclass=Singleton): if provider: logger.info(f"API: {provider}") return provider - raise NotConfiguredException("You should config a LLM configuration first") def get_model_name(self, provider=None) -> str: provider = provider or self.get_default_llm_provider_enum()