mirror of
https://github.com/FoundationAgents/MetaGPT.git
synced 2026-05-15 11:02:36 +02:00
feat: merge send18:dev
This commit is contained in:
commit
7effe7f74c
92 changed files with 4830 additions and 302 deletions
|
|
@ -4,30 +4,63 @@
|
|||
@Time : 2023/5/11 14:45
|
||||
@Author : alexanderwu
|
||||
@File : llm.py
|
||||
@Modified By: mashenquan, 2023
|
||||
"""
|
||||
|
||||
from enum import Enum
|
||||
from metagpt.config import CONFIG
|
||||
from metagpt.provider.anthropic_api import Claude2 as Claude
|
||||
from metagpt.provider.openai_api import OpenAIGPTAPI
|
||||
from metagpt.provider.zhipuai_api import ZhiPuAIGPTAPI
|
||||
from metagpt.provider.spark_api import SparkAPI
|
||||
from metagpt.provider.human_provider import HumanProvider
|
||||
from metagpt.provider.metagpt_llm_api import MetaGPTLLMAPI
|
||||
|
||||
_ = HumanProvider() # Avoid pre-commit error
|
||||
|
||||
|
||||
class LLMType(Enum):
|
||||
OPENAI = "OpenAI"
|
||||
METAGPT = "MetaGPT"
|
||||
CLAUDE = "Claude"
|
||||
UNKNOWN = "UNKNOWN"
|
||||
|
||||
@classmethod
|
||||
def get(cls, value):
|
||||
for member in cls:
|
||||
if member.value == value:
|
||||
return member
|
||||
return cls.UNKNOWN
|
||||
|
||||
@classmethod
|
||||
def __missing__(cls, value):
|
||||
return cls.UNKNOWN
|
||||
|
||||
|
||||
# Used in agents
|
||||
class LLMFactory:
|
||||
@staticmethod
|
||||
def new_llm() -> "BaseGPTAPI":
|
||||
# Determine which type of LLM to use based on the validity of the key.
|
||||
if CONFIG.claude_api_key:
|
||||
return Claude()
|
||||
elif CONFIG.spark_api_key:
|
||||
return SparkAPI()
|
||||
elif CONFIG.zhipuai_api_key:
|
||||
return ZhiPuAIGPTAPI()
|
||||
|
||||
# MetaGPT uses the same parameters as OpenAI.
|
||||
constructors = {
|
||||
LLMType.OPENAI.value: OpenAIGPTAPI,
|
||||
LLMType.METAGPT.value: MetaGPTLLMAPI,
|
||||
}
|
||||
constructor = constructors.get(CONFIG.LLM_TYPE)
|
||||
if constructor:
|
||||
return constructor()
|
||||
|
||||
raise ValueError(f"Unsupported LLM TYPE: {CONFIG.LLM_TYPE}")
|
||||
|
||||
|
||||
# Used in metagpt
|
||||
def LLM() -> "BaseGPTAPI":
|
||||
""" initialize different LLM instance according to the key field existence"""
|
||||
# TODO a little trick, can use registry to initialize LLM instance further
|
||||
if CONFIG.openai_api_key:
|
||||
llm = OpenAIGPTAPI()
|
||||
elif CONFIG.claude_api_key:
|
||||
llm = Claude()
|
||||
elif CONFIG.spark_api_key:
|
||||
llm = SparkAPI()
|
||||
elif CONFIG.zhipuai_api_key:
|
||||
llm = ZhiPuAIGPTAPI()
|
||||
else:
|
||||
raise RuntimeError("You should config a LLM configuration first")
|
||||
|
||||
return llm
|
||||
return LLMFactory.new_llm()
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue