mirror of
https://github.com/trustgraph-ai/trustgraph.git
synced 2026-04-30 19:06:21 +02:00
Make temperature round to 3 dec places (#138)
This commit is contained in:
parent
65cda7b276
commit
ee9373d55e
8 changed files with 16 additions and 16 deletions
|
|
@ -28,7 +28,7 @@ local prompts = import "prompts/mixtral.jsonnet";
|
|||
"-x",
|
||||
std.toString($["azure-openai-max-output-tokens"]),
|
||||
"-t",
|
||||
std.toString($["azure-openai-temperature"]),
|
||||
"%0.3f" % $["azure-openai-temperature"],
|
||||
])
|
||||
.with_env_var_secrets(envSecrets)
|
||||
.with_limits("0.5", "128M")
|
||||
|
|
@ -44,7 +44,7 @@ local prompts = import "prompts/mixtral.jsonnet";
|
|||
"-x",
|
||||
std.toString($["azure-openai-max-output-tokens"]),
|
||||
"-t",
|
||||
std.toString($["azure-openai-temperature"]),
|
||||
"%0.3f" % $["azure-openai-temperature"],
|
||||
"-i",
|
||||
"non-persistent://tg/request/text-completion-rag",
|
||||
"-o",
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ local prompts = import "prompts/mixtral.jsonnet";
|
|||
"-x",
|
||||
std.toString($["azure-max-output-tokens"]),
|
||||
"-t",
|
||||
std.toString($["azure-temperature"]),
|
||||
"%0.3f" % $["azure-temperature"],
|
||||
])
|
||||
.with_env_var_secrets(envSecrets)
|
||||
.with_limits("0.5", "128M")
|
||||
|
|
@ -42,7 +42,7 @@ local prompts = import "prompts/mixtral.jsonnet";
|
|||
"-x",
|
||||
std.toString($["azure-max-output-tokens"]),
|
||||
"-t",
|
||||
std.toString($["azure-temperature"]),
|
||||
"%0.3f" % $["azure-temperature"],
|
||||
"-i",
|
||||
"non-persistent://tg/request/text-completion-rag",
|
||||
"-o",
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ local chunker = import "chunker-recursive.jsonnet";
|
|||
"-x",
|
||||
std.toString($["bedrock-max-output-tokens"]),
|
||||
"-t",
|
||||
std.toString($["bedrock-temperature"]),
|
||||
"%0.3f" % $["bedrock-temperature"],
|
||||
"-m",
|
||||
$["bedrock-model"],
|
||||
])
|
||||
|
|
@ -47,7 +47,7 @@ local chunker = import "chunker-recursive.jsonnet";
|
|||
"-x",
|
||||
std.toString($["bedrock-max-output-tokens"]),
|
||||
"-t",
|
||||
std.toString($["bedrock-temperature"]),
|
||||
"%0.3f" % $["bedrock-temperature"],
|
||||
"-m",
|
||||
$["bedrock-model"],
|
||||
"-i",
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ local prompts = import "prompts/mixtral.jsonnet";
|
|||
"-x",
|
||||
std.toString($["claude-max-output-tokens"]),
|
||||
"-t",
|
||||
std.toString($["claude-temperature"]),
|
||||
"%0.3f" % $["claude-temperature"],
|
||||
])
|
||||
.with_env_var_secrets(envSecrets)
|
||||
.with_limits("0.5", "128M")
|
||||
|
|
@ -41,7 +41,7 @@ local prompts = import "prompts/mixtral.jsonnet";
|
|||
"-x",
|
||||
std.toString($["claude-max-output-tokens"]),
|
||||
"-t",
|
||||
std.toString($["claude-temperature"]),
|
||||
"%0.3f" % $["claude-temperature"],
|
||||
"-i",
|
||||
"non-persistent://tg/request/text-completion-rag",
|
||||
"-o",
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ local prompts = import "prompts/mixtral.jsonnet";
|
|||
"-p",
|
||||
url.pulsar,
|
||||
"-t",
|
||||
std.toString($["cohere-temperature"]),
|
||||
"%0.3f" % $["cohere-temperature"],
|
||||
])
|
||||
.with_limits("0.5", "128M")
|
||||
.with_reservations("0.1", "128M");
|
||||
|
|
@ -39,7 +39,7 @@ local prompts = import "prompts/mixtral.jsonnet";
|
|||
"-p",
|
||||
url.pulsar,
|
||||
"-t",
|
||||
std.toString($["cohere-temperature"]),
|
||||
"%0.3f" % $["cohere-temperature"],
|
||||
"-i",
|
||||
"non-persistent://tg/request/text-completion-rag",
|
||||
"-o",
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ local prompts = import "prompts/mixtral.jsonnet";
|
|||
"-x",
|
||||
std.toString($["googleaistudio-max-output-tokens"]),
|
||||
"-t",
|
||||
std.toString($["googleaistudio-temperature"]),
|
||||
"%0.3f" % $["googleaistudio-temperature"],
|
||||
])
|
||||
.with_env_var_secrets(envSecrets)
|
||||
.with_limits("0.5", "128M")
|
||||
|
|
@ -41,7 +41,7 @@ local prompts = import "prompts/mixtral.jsonnet";
|
|||
"-x",
|
||||
std.toString($["googleaistudio-max-output-tokens"]),
|
||||
"-t",
|
||||
std.toString($["googleaistudio-temperature"]),
|
||||
"%0.3f" % $["googleaistudio-temperature"],
|
||||
"-i",
|
||||
"non-persistent://tg/request/text-completion-rag",
|
||||
"-o",
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ local prompts = import "prompts/mixtral.jsonnet";
|
|||
"-x",
|
||||
std.toString($["openai-max-output-tokens"]),
|
||||
"-t",
|
||||
std.toString($["openai-temperature"]),
|
||||
"%0.3f" % $["openai-temperature"],
|
||||
"-m",
|
||||
$["openai-model"],
|
||||
])
|
||||
|
|
@ -44,7 +44,7 @@ local prompts = import "prompts/mixtral.jsonnet";
|
|||
"-x",
|
||||
std.toString($["openai-max-output-tokens"]),
|
||||
"-t",
|
||||
std.toString($["openai-temperature"]),
|
||||
"%0.3f" % $["openai-temperature"],
|
||||
"-m",
|
||||
$["openai-model"],
|
||||
"-i",
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ local prompts = import "prompts/mixtral.jsonnet";
|
|||
"-x",
|
||||
std.toString($["vertexai-max-output-tokens"]),
|
||||
"-t",
|
||||
std.toString($["vertexai-temperature"]),
|
||||
"%0.3f" % $["vertexai-temperature"],
|
||||
"-m",
|
||||
$["vertexai-model"],
|
||||
])
|
||||
|
|
@ -87,7 +87,7 @@ local prompts = import "prompts/mixtral.jsonnet";
|
|||
"-x",
|
||||
std.toString($["vertexai-max-output-tokens"]),
|
||||
"-t",
|
||||
std.toString($["vertexai-temperature"]),
|
||||
"%0.3f" % $["vertexai-temperature"],
|
||||
"-m",
|
||||
$["vertexai-model"],
|
||||
"-i",
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue