2023-06-30 17:10:48 +08:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
"""
|
|
|
|
|
@Time : 2023/5/11 14:45
|
|
|
|
|
@Author : alexanderwu
|
|
|
|
|
@File : llm.py
|
2023-09-07 11:01:27 +08:00
|
|
|
@Modified By: mashenquan, 2023
|
2023-06-30 17:10:48 +08:00
|
|
|
"""
|
2023-12-14 22:59:41 +08:00
|
|
|
|
2023-11-27 17:43:20 +08:00
|
|
|
from metagpt.config import CONFIG
|
2023-12-14 22:59:41 +08:00
|
|
|
from metagpt.provider import LLMType
|
2023-12-14 20:34:04 +08:00
|
|
|
from metagpt.provider.base_gpt_api import BaseGPTAPI
|
2023-11-23 01:46:14 +08:00
|
|
|
from metagpt.provider.fireworks_api import FireWorksGPTAPI
|
2023-12-06 10:10:30 +08:00
|
|
|
from metagpt.provider.human_provider import HumanProvider
|
2023-12-14 15:06:04 +08:00
|
|
|
from metagpt.provider.metagpt_llm_api import MetaGPTLLMAPI
|
2023-12-14 20:34:04 +08:00
|
|
|
from metagpt.provider.open_llm_api import OpenLLMGPTAPI
|
2023-12-14 22:59:41 +08:00
|
|
|
from metagpt.provider.openai_api import OpenAIGPTAPI
|
|
|
|
|
from metagpt.provider.spark_api import SparkAPI
|
|
|
|
|
from metagpt.provider.zhipuai_api import ZhiPuAIGPTAPI
|
2023-06-30 17:10:48 +08:00
|
|
|
|
2023-11-29 09:52:55 +08:00
|
|
|
_ = HumanProvider() # Avoid pre-commit error
|
2023-11-29 09:52:26 +08:00
|
|
|
|
2023-06-30 17:10:48 +08:00
|
|
|
|
2023-12-14 15:06:04 +08:00
|
|
|
# Used in agents
|
2023-09-07 11:20:27 +08:00
|
|
|
class LLMFactory:
|
|
|
|
|
@staticmethod
|
2023-12-14 15:06:04 +08:00
|
|
|
def new_llm() -> "BaseGPTAPI":
|
|
|
|
|
# Determine which type of LLM to use based on the validity of the key.
|
2023-12-15 17:05:09 +08:00
|
|
|
if CONFIG.spark_api_key:
|
2023-12-14 15:06:04 +08:00
|
|
|
return SparkAPI()
|
|
|
|
|
elif CONFIG.zhipuai_api_key:
|
|
|
|
|
return ZhiPuAIGPTAPI()
|
2023-12-15 17:05:09 +08:00
|
|
|
elif CONFIG.open_llm_api_base:
|
|
|
|
|
return OpenLLMGPTAPI()
|
|
|
|
|
elif CONFIG.fireworks_api_key:
|
|
|
|
|
return FireWorksGPTAPI()
|
2023-12-14 15:06:04 +08:00
|
|
|
|
|
|
|
|
# MetaGPT uses the same parameters as OpenAI.
|
|
|
|
|
constructors = {
|
|
|
|
|
LLMType.OPENAI.value: OpenAIGPTAPI,
|
|
|
|
|
LLMType.METAGPT.value: MetaGPTLLMAPI,
|
|
|
|
|
}
|
|
|
|
|
constructor = constructors.get(CONFIG.LLM_TYPE)
|
|
|
|
|
if constructor:
|
|
|
|
|
return constructor()
|
|
|
|
|
|
2023-11-27 17:43:20 +08:00
|
|
|
raise RuntimeError("You should config a LLM configuration first")
|
2023-12-14 15:06:04 +08:00
|
|
|
|
2023-09-08 10:16:07 +08:00
|
|
|
|
2023-12-14 15:06:04 +08:00
|
|
|
# Used in metagpt
|
2023-11-27 17:43:20 +08:00
|
|
|
def LLM() -> "BaseGPTAPI":
|
2023-12-14 22:59:41 +08:00
|
|
|
"""initialize different LLM instance according to the key field existence"""
|
2023-12-14 15:06:04 +08:00
|
|
|
return LLMFactory.new_llm()
|