add options to disable llm provider check

This commit is contained in:
shenchucheng 2023-12-23 21:56:19 +08:00
parent 59586f30d6
commit 118ab8ac82
3 changed files with 10 additions and 2 deletions

View file

@ -141,3 +141,4 @@ TIMEOUT: 60 # Timeout for llm invocation
#REDIS_PASSWORD: "YOUR_REDIS_PASSWORD"
#REDIS_DB: "YOUR_REDIS_DB_INDEX, str, 0-based"
# DISABLE_LLM_PROVIDER_CHECK: false

View file

@ -138,7 +138,9 @@ class Config(metaclass=Singleton):
self.gemini_api_key = self._get("GEMINI_API_KEY")
self.ollama_api_base = self._get("OLLAMA_API_BASE")
self.ollama_api_model = self._get("OLLAMA_API_MODEL")
# _ = self.get_default_llm_provider_enum()
if not self._get("DISABLE_LLM_PROVIDER_CHECK"):
_ = self.get_default_llm_provider_enum()
# self.openai_base_url = self._get("OPENAI_BASE_URL")
self.openai_proxy = self._get("OPENAI_PROXY") or self.global_proxy

View file

@ -6,6 +6,8 @@
@File : llm.py
"""
from typing import Optional
from metagpt.config import CONFIG, LLMProviderEnum
from metagpt.provider.base_gpt_api import BaseGPTAPI
from metagpt.provider.human_provider import HumanProvider
@ -14,6 +16,9 @@ from metagpt.provider.llm_provider_registry import LLM_REGISTRY
_ = HumanProvider() # Avoid pre-commit error
def LLM(provider: LLMProviderEnum = CONFIG.get_default_llm_provider_enum()) -> BaseGPTAPI:
def LLM(provider: Optional[LLMProviderEnum] = None) -> BaseGPTAPI:
"""get the default llm provider"""
if provider is None:
provider = CONFIG.get_default_llm_provider_enum()
return LLM_REGISTRY.get_provider(provider)