MetaGPT/metagpt/llm.py

45 lines
1.1 KiB
Python
Raw Normal View History

2023-08-28 17:45:45 +08:00
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@Time : 2023/5/11 14:45
@Author : alexanderwu
@File : llm.py
2023-09-07 11:01:27 +08:00
@Modified By: mashenquan, 2023
2023-08-28 17:45:45 +08:00
"""
2023-09-07 11:01:27 +08:00
from enum import Enum
2023-08-28 17:45:45 +08:00
2023-09-08 10:16:07 +08:00
import openai
2023-09-07 11:20:27 +08:00
from metagpt.config import CONFIG
2023-08-28 17:45:45 +08:00
2023-09-07 11:01:27 +08:00
class LLMType(Enum):
OPENAI = "OpenAI"
METAGPT = "MetaGPT"
2023-09-08 10:16:07 +08:00
CLAUDE = "Claude"
2023-09-07 11:01:27 +08:00
UNKNOWN = "UNKNOWN"
@classmethod
def get(cls, value):
for member in cls:
if member.value == value:
return member
return cls.UNKNOWN
2023-09-07 11:20:27 +08:00
class LLMFactory:
@staticmethod
2023-09-07 12:44:49 +08:00
def new_llm() -> object:
2023-09-08 10:16:07 +08:00
from metagpt.provider.anthropic_api import Claude2 as Claude
from metagpt.provider.metagpt_llm_api import MetaGPTLLMAPI as MetaGPT_LLM
from metagpt.provider.openai_api import OpenAIGPTAPI as OpenAI_LLM
if CONFIG.LLM_TYPE == LLMType.OPENAI.value:
return OpenAI_LLM()
if CONFIG.LLM_TYPE == LLMType.METAGPT.value:
return MetaGPT_LLM()
if CONFIG.LLM_TYPE == LLMType.CLAUDE.value:
return Claude()
2023-09-08 13:03:05 +08:00
raise openai.InvalidRequestError(message=f"Unsupported LLM TYPE: {CONFIG.LLM_TYPE}", param=None)