add options to disable llm provider check

This commit is contained in:
shenchucheng 2023-12-23 21:56:19 +08:00
parent c7f47e80ad
commit 4b120a932f
3 changed files with 12 additions and 3 deletions

View file

@ -117,4 +117,6 @@ RPM: 10
### repair operation on the content extracted from LLM's raw output. Warning, it improves the result but not fix all cases.
# REPAIR_LLM_OUTPUT: false
# PROMPT_FORMAT: json #json or markdown
# PROMPT_FORMAT: json #json or markdown
# DISABLE_LLM_PROVIDER_CHECK: false

View file

@ -107,7 +107,9 @@ class Config(metaclass=Singleton):
self.gemini_api_key = self._get("GEMINI_API_KEY")
self.ollama_api_base = self._get("OLLAMA_API_BASE")
self.ollama_api_model = self._get("OLLAMA_API_MODEL")
_ = self.get_default_llm_provider_enum()
if not self._get("DISABLE_LLM_PROVIDER_CHECK"):
_ = self.get_default_llm_provider_enum()
self.openai_base_url = self._get("OPENAI_BASE_URL")
self.openai_proxy = self._get("OPENAI_PROXY") or self.global_proxy

View file

@ -6,6 +6,8 @@
@File : llm.py
"""
from typing import Optional
from metagpt.config import CONFIG, LLMProviderEnum
from metagpt.provider.base_gpt_api import BaseGPTAPI
from metagpt.provider.human_provider import HumanProvider
@ -14,6 +16,9 @@ from metagpt.provider.llm_provider_registry import LLM_REGISTRY
_ = HumanProvider() # Avoid pre-commit error
def LLM(provider: LLMProviderEnum = CONFIG.get_default_llm_provider_enum()) -> BaseGPTAPI:
def LLM(provider: Optional[LLMProviderEnum] = None) -> BaseGPTAPI:
"""get the default llm provider"""
if provider is None:
provider = CONFIG.get_default_llm_provider_enum()
return LLM_REGISTRY.get_provider(provider)