refine code

This commit is contained in:
geekan 2024-01-10 16:02:05 +08:00 committed by 莘权 马
parent ba477a93d5
commit cd29edcc4f
5 changed files with 14 additions and 23 deletions

View file

@ -16,17 +16,14 @@ from typing import Optional
import pandas as pd
from paddleocr import PaddleOCR
from pydantic import Field
from metagpt.actions import Action
from metagpt.const import INVOICE_OCR_TABLE_PATH
from metagpt.llm import LLM
from metagpt.logs import logger
from metagpt.prompts.invoice_ocr import (
EXTRACT_OCR_MAIN_INFO_PROMPT,
REPLY_OCR_QUESTION_PROMPT,
)
from metagpt.provider.base_llm import BaseLLM
from metagpt.utils.common import OutputParser
from metagpt.utils.file import File
@ -175,9 +172,6 @@ class ReplyQuestion(Action):
"""
name: str = "ReplyQuestion"
i_context: Optional[str] = None
llm: BaseLLM = Field(default_factory=LLM)
language: str = "ch"
async def run(self, query: str, ocr_result: list, *args, **kwargs) -> str:

View file

@ -9,9 +9,7 @@ from pydantic import Field, parse_obj_as
from metagpt.actions import Action
from metagpt.config import CONFIG
from metagpt.llm import LLM
from metagpt.logs import logger
from metagpt.provider.base_llm import BaseLLM
from metagpt.tools.search_engine import SearchEngine
from metagpt.tools.web_browser_engine import WebBrowserEngine, WebBrowserEngineType
from metagpt.utils.common import OutputParser
@ -246,10 +244,6 @@ class WebBrowseAndSummarize(Action):
class ConductResearch(Action):
"""Action class to conduct research and generate a research report."""
name: str = "ConductResearch"
i_context: Optional[str] = None
llm: BaseLLM = Field(default_factory=LLM)
def __init__(self, **kwargs):
super().__init__(**kwargs)
if CONFIG.model_for_researcher_report:

View file

@ -78,11 +78,11 @@ class Context(BaseModel):
# return self._llm
def llm(self, name: Optional[str] = None, provider: LLMType = LLMType.OPENAI) -> BaseLLM:
"""Return a LLM instance, fixme: support multiple llm instances"""
if self._llm is None:
self._llm = create_llm_instance(self.config.get_llm_config(name, provider))
if self._llm.cost_manager is None:
self._llm.cost_manager = self.cost_manager
"""Return a LLM instance, fixme: support cache"""
# if self._llm is None:
self._llm = create_llm_instance(self.config.get_llm_config(name, provider))
if self._llm.cost_manager is None:
self._llm.cost_manager = self.cost_manager
return self._llm