LLM dynamic settings, using the llm-model and llm-rag-model paramters to a flow (#531)

* Ported LLMs to dynamic models
This commit is contained in:
cybermaggedon 2025-09-24 16:36:25 +01:00 committed by GitHub
parent 9a34ab1b93
commit 7a3bfad826
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
15 changed files with 266 additions and 143 deletions

View file

@ -80,11 +80,6 @@ class LlmService(FlowProcessor):
try:
try:
logger.debug(f"MODEL IS {flow('model')}")
except:
logger.debug(f"CAN'T GET MODEL")
request = msg.value()
# Sender-produced ID
@ -96,8 +91,10 @@ class LlmService(FlowProcessor):
flow=f"{flow.name}-{consumer.name}",
).time():
model = flow("model")
response = await self.generate_content(
request.system, request.prompt
request.system, request.prompt, model
)
await flow("response").send(