- change reserved parameter back to config.llm.max_token

This commit is contained in:
Azure Wang 2024-03-12 16:49:04 +08:00
parent 6487ae84b0
commit 73d0d29e81
3 changed files with 3 additions and 3 deletions

View file

@ -134,7 +134,7 @@ class CollectLinks(Action):
break
model_name = config.llm.model
prompt = reduce_message_length(gen_msg(), model_name, system_text, 0)
prompt = reduce_message_length(gen_msg(), model_name, system_text, config.llm.max_token)
logger.debug(prompt)
queries = await self._aask(prompt, [system_text])
try:

View file

@ -134,3 +134,4 @@ def merge_dict(dicts: Iterable[Dict]) -> Dict:
return result
config = Config.default()

View file

@ -1,14 +1,13 @@
import json
from typing import Optional, Union
from metagpt.config2 import Config
from metagpt.config2 import config
from metagpt.configs.llm_config import LLMType
from metagpt.logs import logger
from metagpt.provider.azure_openai_api import AzureOpenAILLM
from metagpt.provider.openai_api import OpenAILLM
from metagpt.schema import Message
config = Config.default()
OriginalLLM = OpenAILLM if config.llm.api_type == LLMType.OPENAI else AzureOpenAILLM