Flow temperature parameter (#533)

* Add temperature parameter to LlmService and roll out to all LLMs
This commit is contained in:
cybermaggedon 2025-09-25 21:26:11 +01:00 committed by GitHub
parent aa8e422e8c
commit 6f4f7ce6b4
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
15 changed files with 164 additions and 72 deletions

View file

@ -48,12 +48,15 @@ class Processor(LlmService):
logger.info("Mistral LLM service initialized")
async def generate_content(self, system, prompt, model=None):
async def generate_content(self, system, prompt, model=None, temperature=None):
# Use provided model or fall back to default
model_name = model or self.default_model
# Use provided temperature or fall back to default
effective_temperature = temperature if temperature is not None else self.temperature
logger.debug(f"Using model: {model_name}")
logger.debug(f"Using temperature: {effective_temperature}")
prompt = system + "\n\n" + prompt
@ -72,7 +75,7 @@ class Processor(LlmService):
]
}
],
temperature=self.temperature,
temperature=effective_temperature,
max_tokens=self.max_output,
top_p=1,
frequency_penalty=0,