diff --git a/templates/components/azure-openai.jsonnet b/templates/components/azure-openai.jsonnet index aa759b51..cc3847c0 100644 --- a/templates/components/azure-openai.jsonnet +++ b/templates/components/azure-openai.jsonnet @@ -28,7 +28,7 @@ local prompts = import "prompts/mixtral.jsonnet"; "-x", std.toString($["azure-openai-max-output-tokens"]), "-t", - std.toString($["azure-openai-temperature"]), + "%0.3f" % $["azure-openai-temperature"], ]) .with_env_var_secrets(envSecrets) .with_limits("0.5", "128M") @@ -44,7 +44,7 @@ local prompts = import "prompts/mixtral.jsonnet"; "-x", std.toString($["azure-openai-max-output-tokens"]), "-t", - std.toString($["azure-openai-temperature"]), + "%0.3f" % $["azure-openai-temperature"], "-i", "non-persistent://tg/request/text-completion-rag", "-o", diff --git a/templates/components/azure.jsonnet b/templates/components/azure.jsonnet index aacbeac4..82b79133 100644 --- a/templates/components/azure.jsonnet +++ b/templates/components/azure.jsonnet @@ -26,7 +26,7 @@ local prompts = import "prompts/mixtral.jsonnet"; "-x", std.toString($["azure-max-output-tokens"]), "-t", - std.toString($["azure-temperature"]), + "%0.3f" % $["azure-temperature"], ]) .with_env_var_secrets(envSecrets) .with_limits("0.5", "128M") @@ -42,7 +42,7 @@ local prompts = import "prompts/mixtral.jsonnet"; "-x", std.toString($["azure-max-output-tokens"]), "-t", - std.toString($["azure-temperature"]), + "%0.3f" % $["azure-temperature"], "-i", "non-persistent://tg/request/text-completion-rag", "-o", diff --git a/templates/components/bedrock.jsonnet b/templates/components/bedrock.jsonnet index 11f112e1..93978a59 100644 --- a/templates/components/bedrock.jsonnet +++ b/templates/components/bedrock.jsonnet @@ -29,7 +29,7 @@ local chunker = import "chunker-recursive.jsonnet"; "-x", std.toString($["bedrock-max-output-tokens"]), "-t", - std.toString($["bedrock-temperature"]), + "%0.3f" % $["bedrock-temperature"], "-m", $["bedrock-model"], ]) @@ -47,7 +47,7 @@ local chunker = import "chunker-recursive.jsonnet"; "-x", std.toString($["bedrock-max-output-tokens"]), "-t", - std.toString($["bedrock-temperature"]), + "%0.3f" % $["bedrock-temperature"], "-m", $["bedrock-model"], "-i", diff --git a/templates/components/claude.jsonnet b/templates/components/claude.jsonnet index b723a16f..c6c94e21 100644 --- a/templates/components/claude.jsonnet +++ b/templates/components/claude.jsonnet @@ -25,7 +25,7 @@ local prompts = import "prompts/mixtral.jsonnet"; "-x", std.toString($["claude-max-output-tokens"]), "-t", - std.toString($["claude-temperature"]), + "%0.3f" % $["claude-temperature"], ]) .with_env_var_secrets(envSecrets) .with_limits("0.5", "128M") @@ -41,7 +41,7 @@ local prompts = import "prompts/mixtral.jsonnet"; "-x", std.toString($["claude-max-output-tokens"]), "-t", - std.toString($["claude-temperature"]), + "%0.3f" % $["claude-temperature"], "-i", "non-persistent://tg/request/text-completion-rag", "-o", diff --git a/templates/components/cohere.jsonnet b/templates/components/cohere.jsonnet index c2027f3c..11c30fbd 100644 --- a/templates/components/cohere.jsonnet +++ b/templates/components/cohere.jsonnet @@ -26,7 +26,7 @@ local prompts = import "prompts/mixtral.jsonnet"; "-p", url.pulsar, "-t", - std.toString($["cohere-temperature"]), + "%0.3f" % $["cohere-temperature"], ]) .with_limits("0.5", "128M") .with_reservations("0.1", "128M"); @@ -39,7 +39,7 @@ local prompts = import "prompts/mixtral.jsonnet"; "-p", url.pulsar, "-t", - std.toString($["cohere-temperature"]), + "%0.3f" % $["cohere-temperature"], "-i", "non-persistent://tg/request/text-completion-rag", "-o", diff --git a/templates/components/googleaistudio.jsonnet b/templates/components/googleaistudio.jsonnet index e4f45ae9..ae880e36 100644 --- a/templates/components/googleaistudio.jsonnet +++ b/templates/components/googleaistudio.jsonnet @@ -25,7 +25,7 @@ local prompts = import "prompts/mixtral.jsonnet"; "-x", std.toString($["googleaistudio-max-output-tokens"]), "-t", - std.toString($["googleaistudio-temperature"]), + "%0.3f" % $["googleaistudio-temperature"], ]) .with_env_var_secrets(envSecrets) .with_limits("0.5", "128M") @@ -41,7 +41,7 @@ local prompts = import "prompts/mixtral.jsonnet"; "-x", std.toString($["googleaistudio-max-output-tokens"]), "-t", - std.toString($["googleaistudio-temperature"]), + "%0.3f" % $["googleaistudio-temperature"], "-i", "non-persistent://tg/request/text-completion-rag", "-o", diff --git a/templates/components/openai.jsonnet b/templates/components/openai.jsonnet index 74290420..83cbd406 100644 --- a/templates/components/openai.jsonnet +++ b/templates/components/openai.jsonnet @@ -26,7 +26,7 @@ local prompts = import "prompts/mixtral.jsonnet"; "-x", std.toString($["openai-max-output-tokens"]), "-t", - std.toString($["openai-temperature"]), + "%0.3f" % $["openai-temperature"], "-m", $["openai-model"], ]) @@ -44,7 +44,7 @@ local prompts = import "prompts/mixtral.jsonnet"; "-x", std.toString($["openai-max-output-tokens"]), "-t", - std.toString($["openai-temperature"]), + "%0.3f" % $["openai-temperature"], "-m", $["openai-model"], "-i", diff --git a/templates/components/vertexai.jsonnet b/templates/components/vertexai.jsonnet index 2bc97799..44fe27c6 100644 --- a/templates/components/vertexai.jsonnet +++ b/templates/components/vertexai.jsonnet @@ -37,7 +37,7 @@ local prompts = import "prompts/mixtral.jsonnet"; "-x", std.toString($["vertexai-max-output-tokens"]), "-t", - std.toString($["vertexai-temperature"]), + "%0.3f" % $["vertexai-temperature"], "-m", $["vertexai-model"], ]) @@ -87,7 +87,7 @@ local prompts = import "prompts/mixtral.jsonnet"; "-x", std.toString($["vertexai-max-output-tokens"]), "-t", - std.toString($["vertexai-temperature"]), + "%0.3f" % $["vertexai-temperature"], "-m", $["vertexai-model"], "-i",