Make temperature round to 3 dec places (#138)

This commit is contained in:
cybermaggedon 2024-11-05 23:04:21 +00:00 committed by GitHub
parent 65cda7b276
commit ee9373d55e
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 16 additions and 16 deletions

View file

@ -28,7 +28,7 @@ local prompts = import "prompts/mixtral.jsonnet";
"-x",
std.toString($["azure-openai-max-output-tokens"]),
"-t",
std.toString($["azure-openai-temperature"]),
"%0.3f" % $["azure-openai-temperature"],
])
.with_env_var_secrets(envSecrets)
.with_limits("0.5", "128M")
@ -44,7 +44,7 @@ local prompts = import "prompts/mixtral.jsonnet";
"-x",
std.toString($["azure-openai-max-output-tokens"]),
"-t",
std.toString($["azure-openai-temperature"]),
"%0.3f" % $["azure-openai-temperature"],
"-i",
"non-persistent://tg/request/text-completion-rag",
"-o",

View file

@ -26,7 +26,7 @@ local prompts = import "prompts/mixtral.jsonnet";
"-x",
std.toString($["azure-max-output-tokens"]),
"-t",
std.toString($["azure-temperature"]),
"%0.3f" % $["azure-temperature"],
])
.with_env_var_secrets(envSecrets)
.with_limits("0.5", "128M")
@ -42,7 +42,7 @@ local prompts = import "prompts/mixtral.jsonnet";
"-x",
std.toString($["azure-max-output-tokens"]),
"-t",
std.toString($["azure-temperature"]),
"%0.3f" % $["azure-temperature"],
"-i",
"non-persistent://tg/request/text-completion-rag",
"-o",

View file

@ -29,7 +29,7 @@ local chunker = import "chunker-recursive.jsonnet";
"-x",
std.toString($["bedrock-max-output-tokens"]),
"-t",
std.toString($["bedrock-temperature"]),
"%0.3f" % $["bedrock-temperature"],
"-m",
$["bedrock-model"],
])
@ -47,7 +47,7 @@ local chunker = import "chunker-recursive.jsonnet";
"-x",
std.toString($["bedrock-max-output-tokens"]),
"-t",
std.toString($["bedrock-temperature"]),
"%0.3f" % $["bedrock-temperature"],
"-m",
$["bedrock-model"],
"-i",

View file

@ -25,7 +25,7 @@ local prompts = import "prompts/mixtral.jsonnet";
"-x",
std.toString($["claude-max-output-tokens"]),
"-t",
std.toString($["claude-temperature"]),
"%0.3f" % $["claude-temperature"],
])
.with_env_var_secrets(envSecrets)
.with_limits("0.5", "128M")
@ -41,7 +41,7 @@ local prompts = import "prompts/mixtral.jsonnet";
"-x",
std.toString($["claude-max-output-tokens"]),
"-t",
std.toString($["claude-temperature"]),
"%0.3f" % $["claude-temperature"],
"-i",
"non-persistent://tg/request/text-completion-rag",
"-o",

View file

@ -26,7 +26,7 @@ local prompts = import "prompts/mixtral.jsonnet";
"-p",
url.pulsar,
"-t",
std.toString($["cohere-temperature"]),
"%0.3f" % $["cohere-temperature"],
])
.with_limits("0.5", "128M")
.with_reservations("0.1", "128M");
@ -39,7 +39,7 @@ local prompts = import "prompts/mixtral.jsonnet";
"-p",
url.pulsar,
"-t",
std.toString($["cohere-temperature"]),
"%0.3f" % $["cohere-temperature"],
"-i",
"non-persistent://tg/request/text-completion-rag",
"-o",

View file

@ -25,7 +25,7 @@ local prompts = import "prompts/mixtral.jsonnet";
"-x",
std.toString($["googleaistudio-max-output-tokens"]),
"-t",
std.toString($["googleaistudio-temperature"]),
"%0.3f" % $["googleaistudio-temperature"],
])
.with_env_var_secrets(envSecrets)
.with_limits("0.5", "128M")
@ -41,7 +41,7 @@ local prompts = import "prompts/mixtral.jsonnet";
"-x",
std.toString($["googleaistudio-max-output-tokens"]),
"-t",
std.toString($["googleaistudio-temperature"]),
"%0.3f" % $["googleaistudio-temperature"],
"-i",
"non-persistent://tg/request/text-completion-rag",
"-o",

View file

@ -26,7 +26,7 @@ local prompts = import "prompts/mixtral.jsonnet";
"-x",
std.toString($["openai-max-output-tokens"]),
"-t",
std.toString($["openai-temperature"]),
"%0.3f" % $["openai-temperature"],
"-m",
$["openai-model"],
])
@ -44,7 +44,7 @@ local prompts = import "prompts/mixtral.jsonnet";
"-x",
std.toString($["openai-max-output-tokens"]),
"-t",
std.toString($["openai-temperature"]),
"%0.3f" % $["openai-temperature"],
"-m",
$["openai-model"],
"-i",

View file

@ -37,7 +37,7 @@ local prompts = import "prompts/mixtral.jsonnet";
"-x",
std.toString($["vertexai-max-output-tokens"]),
"-t",
std.toString($["vertexai-temperature"]),
"%0.3f" % $["vertexai-temperature"],
"-m",
$["vertexai-model"],
])
@ -87,7 +87,7 @@ local prompts = import "prompts/mixtral.jsonnet";
"-x",
std.toString($["vertexai-max-output-tokens"]),
"-t",
std.toString($["vertexai-temperature"]),
"%0.3f" % $["vertexai-temperature"],
"-m",
$["vertexai-model"],
"-i",