Merge pull request #1561 from zhaoweiguo/patch-3

fix bug in multi llm
This commit is contained in:
Alexander Wu 2024-11-04 10:28:44 +08:00 committed by GitHub
commit 59715c01d8
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 73 additions and 1 deletions

View file

@ -247,7 +247,7 @@ class Role(SerializationMixin, ContextMixin, BaseModel):
return self
def _init_action(self, action: Action):
if not action.private_config:
if not action.private_llm:
action.set_llm(self.llm, override=True)
else:
action.set_llm(self.llm, override=False)

View file

@ -0,0 +1,27 @@
llm:
api_type: "openai" # or azure / ollama / groq etc.
base_url: "YOUR_gpt-3.5-turbo_BASE_URL"
api_key: "YOUR_gpt-3.5-turbo_API_KEY"
model: "gpt-3.5-turbo" # or gpt-3.5-turbo
# proxy: "YOUR_gpt-3.5-turbo_PROXY" # for LLM API requests
# timeout: 600 # Optional. If set to 0, default value is 300.
# Details: https://azure.microsoft.com/en-us/pricing/details/cognitive-services/openai-service/
pricing_plan: "" # Optional. Use for Azure LLM when its model name is not the same as OpenAI's
models:
"YOUR_MODEL_NAME_1": # model: "gpt-4-turbo" # or gpt-3.5-turbo
api_type: "openai" # or azure / ollama / groq etc.
base_url: "YOUR_MODEL_1_BASE_URL"
api_key: "YOUR_MODEL_1_API_KEY"
# proxy: "YOUR_MODEL_1_PROXY" # for LLM API requests
# timeout: 600 # Optional. If set to 0, default value is 300.
# Details: https://azure.microsoft.com/en-us/pricing/details/cognitive-services/openai-service/
pricing_plan: "" # Optional. Use for Azure LLM when its model name is not the same as OpenAI's
"YOUR_MODEL_NAME_2": # model: "gpt-4-turbo" # or gpt-3.5-turbo
api_type: "openai" # or azure / ollama / groq etc.
base_url: "YOUR_MODEL_2_BASE_URL"
api_key: "YOUR_MODEL_2_API_KEY"
proxy: "YOUR_MODEL_2_PROXY" # for LLM API requests
# timeout: 600 # Optional. If set to 0, default value is 300.
# Details: https://azure.microsoft.com/en-us/pricing/details/cognitive-services/openai-service/
pricing_plan: "" # Optional. Use for Azure LLM when its model name is not the same as OpenAI's

View file

@ -0,0 +1,45 @@
from metagpt.actions.action import Action
from metagpt.config2 import Config
from metagpt.const import TEST_DATA_PATH
from metagpt.context import Context
from metagpt.provider.llm_provider_registry import create_llm_instance
from metagpt.roles.role import Role
def test_set_llm():
config1 = Config.default()
config2 = Config.default()
config2.llm.model = "gpt-3.5-turbo"
context = Context(config=config1)
act = Action(context=context)
assert act.config.llm.model == config1.llm.model
llm2 = create_llm_instance(config2.llm)
act.llm = llm2
assert act.llm.model == llm2.model
role = Role(context=context)
role.set_actions([act])
assert act.llm.model == llm2.model
role1 = Role(context=context)
act1 = Action(context=context)
assert act1.config.llm.model == config1.llm.model
act1.config = config2
role1.set_actions([act1])
assert act1.llm.model == llm2.model
# multiple LLM
config3_path = TEST_DATA_PATH / "config/config2_multi_llm.yaml"
dict3 = Config.read_yaml(config3_path)
config3 = Config(**dict3)
context3 = Context(config=config3)
role3 = Role(context=context3)
act3 = Action(context=context3, llm_name_or_type="YOUR_MODEL_NAME_1")
assert act3.config.llm.model == "gpt-3.5-turbo"
assert act3.llm.model == "gpt-4-turbo"
role3.set_actions([act3])
assert act3.config.llm.model == "gpt-3.5-turbo"
assert act3.llm.model == "gpt-4-turbo"