FIX multiLLM bug

This commit is contained in:
zhaowg3 2024-11-01 10:41:21 +08:00
parent 0f27029b2c
commit 6d1ab98c41
4 changed files with 81 additions and 2 deletions

View file

@ -49,7 +49,6 @@ class Action(SerializationMixin, ContextMixin, BaseModel):
llm = create_llm_instance(config)
llm.cost_manager = data.llm.cost_manager
data.llm = llm
data.config = config # if not set self.config, self.llm will be reset when you call Role.set_actions function
return data
@property