fix bug in multi llm

This commit is contained in:
zhaoweiguo 2024-10-31 16:55:47 +08:00 committed by GitHub
parent 21d9c5aeed
commit 0f27029b2c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -49,6 +49,7 @@ class Action(SerializationMixin, ContextMixin, BaseModel):
llm = create_llm_instance(config)
llm.cost_manager = data.llm.cost_manager
data.llm = llm
data.config = config # if not set self.config, self.llm will be reset when you call Role.set_actions function
return data
@property