update provider uniform name and check tests

This commit is contained in:
better629 2023-12-28 17:18:18 +08:00
parent d40c4f5025
commit 255e2d3fa7
13 changed files with 48 additions and 50 deletions

View file

@ -17,7 +17,7 @@ from pydantic import BaseModel, Field
from metagpt.config import CONFIG
from metagpt.const import DEFAULT_LANGUAGE, DEFAULT_MAX_TOKENS, DEFAULT_TOKEN_SIZE
from metagpt.logs import logger
from metagpt.provider import MetaGPTAPI
from metagpt.provider import MetaGPTLLM
from metagpt.provider.base_llm import BaseLLM
from metagpt.schema import Message, SimpleMessage
from metagpt.utils.redis import Redis
@ -122,7 +122,7 @@ class BrainMemory(BaseModel):
return v
async def summarize(self, llm, max_words=200, keep_language: bool = False, limit: int = -1, **kwargs):
if isinstance(llm, MetaGPTAPI):
if isinstance(llm, MetaGPTLLM):
return await self._metagpt_summarize(max_words=max_words)
self.llm = llm
@ -175,7 +175,7 @@ class BrainMemory(BaseModel):
async def get_title(self, llm, max_words=5, **kwargs) -> str:
"""Generate text title"""
if isinstance(llm, MetaGPTAPI):
if isinstance(llm, MetaGPTLLM):
return self.history[0].content if self.history else "New"
summary = await self.summarize(llm=llm, max_words=500)
@ -190,7 +190,7 @@ class BrainMemory(BaseModel):
return response
async def is_related(self, text1, text2, llm):
if isinstance(llm, MetaGPTAPI):
if isinstance(llm, MetaGPTLLM):
return await self._metagpt_is_related(text1=text1, text2=text2, llm=llm)
return await self._openai_is_related(text1=text1, text2=text2, llm=llm)
@ -212,7 +212,7 @@ class BrainMemory(BaseModel):
return result
async def rewrite(self, sentence: str, context: str, llm):
if isinstance(llm, MetaGPTAPI):
if isinstance(llm, MetaGPTLLM):
return await self._metagpt_rewrite(sentence=sentence, context=context, llm=llm)
return await self._openai_rewrite(sentence=sentence, context=context, llm=llm)

View file

@ -7,21 +7,21 @@
"""
from metagpt.provider.fireworks_api import FireworksLLM
from metagpt.provider.google_gemini_api import GeminiGPTAPI
from metagpt.provider.google_gemini_api import GeminiLLM
from metagpt.provider.ollama_api import OllamaLLM
from metagpt.provider.open_llm_api import OpenLLMGPTAPI
from metagpt.provider.open_llm_api import OpenLLM
from metagpt.provider.openai_api import OpenAILLM
from metagpt.provider.zhipuai_api import ZhiPuAIGPTAPI
from metagpt.provider.zhipuai_api import ZhiPuAILLM
from metagpt.provider.azure_openai_api import AzureOpenAILLM
from metagpt.provider.metagpt_api import MetaGPTAPI
from metagpt.provider.metagpt_api import MetaGPTLLM
__all__ = [
"FireworksLLM",
"GeminiGPTAPI",
"OpenLLMGPTAPI",
"GeminiLLM",
"OpenLLM",
"OpenAILLM",
"ZhiPuAIGPTAPI",
"ZhiPuAILLM",
"AzureOpenAILLM",
"MetaGPTAPI",
"MetaGPTLLM",
"OllamaLLM",
]

View file

@ -42,7 +42,7 @@ class GeminiGenerativeModel(GenerativeModel):
@register_provider(LLMProviderEnum.GEMINI)
class GeminiGPTAPI(BaseLLM):
class GeminiLLM(BaseLLM):
"""
Refs to `https://ai.google.dev/tutorials/python_quickstart`
"""

View file

@ -11,6 +11,6 @@ from metagpt.provider.llm_provider_registry import register_provider
@register_provider(LLMProviderEnum.METAGPT)
class MetaGPTAPI(OpenAILLM):
class MetaGPTLLM(OpenAILLM):
def __init__(self):
super().__init__()

View file

@ -35,7 +35,7 @@ class OpenLLMCostManager(CostManager):
@register_provider(LLMProviderEnum.OPEN_LLM)
class OpenLLMGPTAPI(OpenAILLM):
class OpenLLM(OpenAILLM):
def __init__(self):
self.config: Config = CONFIG
self.__init_openllm()

View file

@ -5,6 +5,7 @@
import json
from enum import Enum
import openai
import zhipuai
from requests import ConnectionError
from tenacity import (
@ -31,7 +32,7 @@ class ZhiPuEvent(Enum):
@register_provider(LLMProviderEnum.ZHIPUAI)
class ZhiPuAIGPTAPI(BaseLLM):
class ZhiPuAILLM(BaseLLM):
"""
Refs to `https://open.bigmodel.cn/dev/api#chatglm_turbo`
From now, there is only one model named `chatglm_turbo`