Feature/environment var creds (#116)

- Change templates to interpolate environment variables in docker compose
- Change templates to invoke secrets for environment variable credentials in K8s configuration
- Update LLMs to pull in credentials from environment variables if not specified
This commit is contained in:
cybermaggedon 2024-10-15 00:34:52 +01:00 committed by GitHub
parent 43756d872b
commit 86288339cf
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
20 changed files with 327 additions and 271 deletions

View file

@ -5,7 +5,6 @@ local prompts = import "prompts/mixtral.jsonnet";
{
"azure-openai-token":: "${AZURE_OPENAI_TOKEN}",
"azure-openai-model":: "GPT-3.5-Turbo",
"azure-openai-max-output-tokens":: 4192,
"azure-openai-temperature":: 0.0,
@ -14,6 +13,9 @@ local prompts = import "prompts/mixtral.jsonnet";
create:: function(engine)
local envSecrets = engine.envSecrets("azure-openai-credentials")
.with_env_var("AZURE_TOKEN", "azure-token");
local container =
engine.container("text-completion")
.with_image(images.trustgraph)
@ -21,8 +23,6 @@ local prompts = import "prompts/mixtral.jsonnet";
"text-completion-azure-openai",
"-p",
url.pulsar,
"-k",
$["azure-openai-token"],
"-m",
$["azure-openai-model"],
"-x",
@ -30,39 +30,17 @@ local prompts = import "prompts/mixtral.jsonnet";
"-t",
std.toString($["azure-openai-temperature"]),
])
.with_env_var_secrets(envSecrets)
.with_limits("0.5", "128M")
.with_reservations("0.1", "128M");
local containerSet = engine.containers(
"text-completion", [ container ]
);
local service =
engine.internalService(containerSet)
.with_port(8000, 8000, "metrics");
engine.resources([
containerSet,
service,
])
},
"text-completion-rag" +: {
create:: function(engine)
local container =
local containerRag =
engine.container("text-completion-rag")
.with_image(images.trustgraph)
.with_command([
"text-completion-azure",
"-p",
url.pulsar,
"-k",
$["azure-openai-token"],
"-e",
$["azure-openai-model"],
"-x",
std.toString($["azure-openai-max-output-tokens"]),
"-t",
@ -72,24 +50,35 @@ local prompts = import "prompts/mixtral.jsonnet";
"-o",
"non-persistent://tg/response/text-completion-rag-response",
])
.with_env_var_secrets(envSecrets)
.with_limits("0.5", "128M")
.with_reservations("0.1", "128M");
local containerSet = engine.containers(
"text-completion-rag", [ container ]
"text-completion", [ container ]
);
local containerSetRag = engine.containers(
"text-completion-rag", [ containerRag ]
);
local service =
engine.internalService(containerSet)
.with_port(8000, 8000, "metrics");
local serviceRag =
engine.internalService(containerSetRag)
.with_port(8000, 8000, "metrics");
engine.resources([
envSecrets,
containerSet,
containerSetRag,
service,
serviceRag,
])
}
},
} + prompts

View file

@ -5,8 +5,6 @@ local prompts = import "prompts/mixtral.jsonnet";
{
"azure-token":: "${AZURE_TOKEN}",
"azure-endpoint":: "${AZURE_ENDPOINT}",
"azure-max-output-tokens":: 4096,
"azure-temperature":: 0.0,
@ -14,6 +12,10 @@ local prompts = import "prompts/mixtral.jsonnet";
create:: function(engine)
local envSecrets = engine.envSecrets("azure-credentials")
.with_env_var("AZURE_TOKEN", "azure-token")
.with_env_var("AZURE_ENDPOINT", "azure-endpoint");
local container =
engine.container("text-completion")
.with_image(images.trustgraph)
@ -21,48 +23,22 @@ local prompts = import "prompts/mixtral.jsonnet";
"text-completion-azure",
"-p",
url.pulsar,
"-k",
$["azure-token"],
"-e",
$["azure-endpoint"],
"-x",
std.toString($["azure-max-output-tokens"]),
"-t",
std.toString($["azure-temperature"]),
])
.with_env_var_secrets(envSecrets)
.with_limits("0.5", "128M")
.with_reservations("0.1", "128M");
local containerSet = engine.containers(
"text-completion", [ container ]
);
local service =
engine.internalService(containerSet)
.with_port(8000, 8000, "metrics");
engine.resources([
containerSet,
service,
])
},
"text-completion-rag" +: {
create:: function(engine)
local container =
local containerRag =
engine.container("text-completion-rag")
.with_image(images.trustgraph)
.with_command([
"text-completion-azure",
"-p",
url.pulsar,
"-k",
$["azure-token"],
"-e",
$["azure-endpoint"],
"-x",
std.toString($["azure-max-output-tokens"]),
"-t",
@ -72,22 +48,33 @@ local prompts = import "prompts/mixtral.jsonnet";
"-o",
"non-persistent://tg/response/text-completion-rag-response",
])
.with_env_var_secrets(envSecrets)
.with_limits("0.5", "128M")
.with_reservations("0.1", "128M");
local containerSet = engine.containers(
"text-completion-rag", [ container ]
"text-completion", [ container ]
);
local containerSetRag = engine.containers(
"text-completion-rag", [ containerRag ]
);
local service =
engine.internalService(containerSet)
.with_port(8000, 8000, "metrics");
engine.resources([
containerSet,
service,
])
local serviceRag =
engine.internalService(containerSetRag)
.with_port(8000, 8000, "metrics");
engine.resources([
envSecrets,
containerSet,
containerSetRag,
service,
serviceRag,
])
}

View file

@ -6,9 +6,6 @@ local chunker = import "chunker-recursive.jsonnet";
{
"aws-id-key":: "${AWS_ID_KEY}",
"aws-secret-key":: "${AWS_SECRET_KEY}",
"aws-region":: "us-west-2",
"bedrock-max-output-tokens":: 4096,
"bedrock-temperature":: 0.0,
"bedrock-model":: "mistral.mixtral-8x7b-instruct-v0:1",
@ -17,6 +14,11 @@ local chunker = import "chunker-recursive.jsonnet";
create:: function(engine)
local envSecrets = engine.envSecrets("bedrock-credentials")
.with_env_var("AWS_ID_KEY", "aws-id-key")
.with_env_var("AWS_SECRET_KEY", "aws-secret-key")
.with_env_var("AWS_REGION", "aws-region");
local container =
engine.container("text-completion")
.with_image(images.trustgraph)
@ -24,12 +26,6 @@ local chunker = import "chunker-recursive.jsonnet";
"text-completion-bedrock",
"-p",
url.pulsar,
"-z",
$["aws-id-key"],
"-k",
$["aws-secret-key"],
"-r",
$["aws-region"],
"-x",
std.toString($["bedrock-max-output-tokens"]),
"-t",
@ -37,41 +33,17 @@ local chunker = import "chunker-recursive.jsonnet";
"-m",
$["bedrock-model"],
])
.with_env_var_secrets(envSecrets)
.with_limits("0.5", "128M")
.with_reservations("0.1", "128M");
local containerSet = engine.containers(
"text-completion", [ container ]
);
local service =
engine.internalService(containerSet)
.with_port(8000, 8000, "metrics");
engine.resources([
containerSet,
service,
])
},
"text-completion-rag" +: {
create:: function(engine)
local container =
local containerRag =
engine.container("text-completion-rag")
.with_image(images.trustgraph)
.with_command([
"text-completion-bedrock",
"-p",
url.pulsar,
"-z",
$["aws-id-key"],
"-k",
$["aws-secret-key"],
"-r",
$["aws-region"],
"-x",
std.toString($["bedrock-max-output-tokens"]),
"-t",
@ -83,24 +55,35 @@ local chunker = import "chunker-recursive.jsonnet";
"-o",
"non-persistent://tg/response/text-completion-rag-response",
])
.with_env_var_secrets(envSecrets)
.with_limits("0.5", "128M")
.with_reservations("0.1", "128M");
local containerSet = engine.containers(
"text-completion-rag", [ container ]
"text-completion", [ container ]
);
local containerSetRag = engine.containers(
"text-completion-rag", [ containerRag ]
);
local service =
engine.internalService(containerSet)
.with_port(8000, 8000, "metrics");
local serviceRag =
engine.internalService(containerSetRag)
.with_port(8000, 8000, "metrics");
engine.resources([
envSecrets,
containerSet,
containerSetRag,
service,
serviceRag,
])
}
},
} + prompts + chunker

View file

@ -5,7 +5,6 @@ local prompts = import "prompts/mixtral.jsonnet";
{
"claude-key":: "${CLAUDE_KEY}",
"claude-max-output-tokens":: 4096,
"claude-temperature":: 0.0,
@ -13,6 +12,9 @@ local prompts = import "prompts/mixtral.jsonnet";
create:: function(engine)
local envSecrets = engine.envSecrets("claude-credentials")
.with_env_var("CLAUDE_KEY_TOKEN", "claude-key");
local container =
engine.container("text-completion")
.with_image(images.trustgraph)
@ -20,44 +22,22 @@ local prompts = import "prompts/mixtral.jsonnet";
"text-completion-claude",
"-p",
url.pulsar,
"-k",
$["claude-key"],
"-x",
std.toString($["claude-max-output-tokens"]),
"-t",
std.toString($["claude-temperature"]),
])
.with_env_var_secrets(envSecrets)
.with_limits("0.5", "128M")
.with_reservations("0.1", "128M");
local containerSet = engine.containers(
"text-completion", [ container ]
);
local service =
engine.internalService(containerSet)
.with_port(8000, 8000, "metrics");
engine.resources([
containerSet,
service,
])
},
"text-completion-rag" +: {
create:: function(engine)
local container =
local containerRag =
engine.container("text-completion-rag")
.with_image(images.trustgraph)
.with_command([
"text-completion-claude",
"-p",
url.pulsar,
"-k",
$["claude-key"],
"-x",
std.toString($["claude-max-output-tokens"]),
"-t",
@ -67,24 +47,35 @@ local prompts = import "prompts/mixtral.jsonnet";
"-o",
"non-persistent://tg/response/text-completion-rag-response",
])
.with_env_var_secrets(envSecrets)
.with_limits("0.5", "128M")
.with_reservations("0.1", "128M");
local containerSet = engine.containers(
"text-completion-rag", [ container ]
"text-completion", [ container ]
);
local containerSetRag = engine.containers(
"text-completion-rag", [ containerRag ]
);
local service =
engine.internalService(containerSet)
.with_port(8000, 8000, "metrics");
local serviceRag =
engine.internalService(containerSetRag)
.with_port(8000, 8000, "metrics");
engine.resources([
envSecrets,
containerSet,
containerSetRag,
service,
serviceRag,
])
}
},
} + prompts

View file

@ -9,13 +9,15 @@ local prompts = import "prompts/mixtral.jsonnet";
"chunk-size":: 150,
"chunk-overlap":: 10,
"cohere-key":: "${COHERE_KEY}",
"cohere-temperature":: 0.0,
"text-completion" +: {
create:: function(engine)
local envSecrets = engine.envSecrets("cohere-credentials")
.with_env_var("COHERE_KEY", "cohere-key");
local container =
engine.container("text-completion")
.with_image(images.trustgraph)
@ -23,42 +25,19 @@ local prompts = import "prompts/mixtral.jsonnet";
"text-completion-cohere",
"-p",
url.pulsar,
"-k",
$["cohere-key"],
"-t",
std.toString($["cohere-temperature"]),
])
.with_limits("0.5", "128M")
.with_reservations("0.1", "128M");
local containerSet = engine.containers(
"text-completion", [ container ]
);
local service =
engine.internalService(containerSet)
.with_port(8000, 8000, "metrics");
engine.resources([
containerSet,
service,
])
},
"text-completion-rag" +: {
create:: function(engine)
local container =
local containerRag =
engine.container("text-completion-rag")
.with_image(images.trustgraph)
.with_command([
"text-completion-cohere",
"-p",
url.pulsar,
"-k",
$["cohere-key"],
"-t",
std.toString($["cohere-temperature"]),
"-i",
@ -70,20 +49,30 @@ local prompts = import "prompts/mixtral.jsonnet";
.with_reservations("0.1", "128M");
local containerSet = engine.containers(
"text-completion-rag", [ container ]
"text-completion", [ container ]
);
local containerSetRag = engine.containers(
"text-completion-rag", [ containerRag ]
);
local service =
engine.internalService(containerSet)
.with_port(8000, 8000, "metrics");
local serviceRag =
engine.internalService(containerSetRag)
.with_port(8000, 8000, "metrics");
engine.resources([
envSecrets,
containerSet,
containerSetRag,
service,
serviceRag,
])
}
},
} + prompts

View file

@ -6,12 +6,14 @@ local prompts = import "prompts/slm.jsonnet";
{
"llamafile-model":: "LLaMA_CPP",
"llamafile-url":: "${LLAMAFILE_URL}",
"text-completion" +: {
create:: function(engine)
local envSecrets = engine.envSecrets("llamafile-credentials")
.with_env_var("LLAMAFILE_URL", "llamafile-url");
local container =
engine.container("text-completion")
.with_image(images.trustgraph)
@ -21,27 +23,12 @@ local prompts = import "prompts/slm.jsonnet";
url.pulsar,
"-m",
$["llamafile-model"],
"-r",
$["llamafile-url"],
])
.with_env_var_secrets(envSecrets)
.with_limits("0.5", "128M")
.with_reservations("0.1", "128M");
local containerSet = engine.containers(
"text-completion", [ container ]
);
engine.resources([
containerSet,
])
},
"text-completion-rag" +: {
create:: function(engine)
local container =
local containerRag =
engine.container("text-completion-rag")
.with_image(images.trustgraph)
.with_command([
@ -50,26 +37,40 @@ local prompts = import "prompts/slm.jsonnet";
url.pulsar,
"-m",
$["llamafile-model"],
"-r",
$["llamafile-url"],
"-i",
"non-persistent://tg/request/text-completion-rag",
"-o",
"non-persistent://tg/response/text-completion-rag-response",
])
.with_env_var_secrets(envSecrets)
.with_limits("0.5", "128M")
.with_reservations("0.1", "128M");
local containerSet = engine.containers(
"text-completion-rag", [ container ]
"text-completion", [ container ]
);
local containerSetRag = engine.containers(
"text-completion-rag", [ containerRag ]
);
local service =
engine.internalService(containerSet)
.with_port(8080, 8080, "metrics");
local serviceRag =
engine.internalService(containerSetRag)
.with_port(8080, 8080, "metrics");
engine.resources([
envSecrets,
containerSet,
containerSetRag,
service,
serviceRag,
])
}
},
} + prompts

View file

@ -6,12 +6,14 @@ local prompts = import "prompts/slm.jsonnet";
{
"ollama-model":: "gemma2:9b",
"ollama-url":: "${OLLAMA_HOST}",
"text-completion" +: {
create:: function(engine)
local envSecrets = engine.envSecrets("ollama-credentials")
.with_env_var("OLLAMA_HOST", "ollama-host");
local container =
engine.container("text-completion")
.with_image(images.trustgraph)
@ -21,32 +23,12 @@ local prompts = import "prompts/slm.jsonnet";
url.pulsar,
"-m",
$["ollama-model"],
"-r",
$["ollama-url"],
])
.with_env_var_secrets(envSecrets)
.with_limits("0.5", "128M")
.with_reservations("0.1", "128M");
local containerSet = engine.containers(
"text-completion", [ container ]
);
local service =
engine.internalService(containerSet)
.with_port(8080, 8080, "metrics");
engine.resources([
containerSet,
service,
])
},
"text-completion-rag" +: {
create:: function(engine)
local container =
local containerRag =
engine.container("text-completion-rag")
.with_image(images.trustgraph)
.with_command([
@ -55,31 +37,40 @@ local prompts = import "prompts/slm.jsonnet";
url.pulsar,
"-m",
$["ollama-model"],
"-r",
$["ollama-url"],
"-i",
"non-persistent://tg/request/text-completion-rag",
"-o",
"non-persistent://tg/response/text-completion-rag-response",
])
.with_env_var_secrets(envSecrets)
.with_limits("0.5", "128M")
.with_reservations("0.1", "128M");
local containerSet = engine.containers(
"text-completion-rag", [ container ]
"text-completion", [ container ]
);
local containerSetRag = engine.containers(
"text-completion-rag", [ containerRag ]
);
local service =
engine.internalService(containerSet)
.with_port(8080, 8080, "metrics");
local serviceRag =
engine.internalService(containerSetRag)
.with_port(8080, 8080, "metrics");
engine.resources([
envSecrets,
containerSet,
containerSetRag,
service,
serviceRag,
])
}
},
} + prompts

View file

@ -5,7 +5,6 @@ local prompts = import "prompts/mixtral.jsonnet";
{
"openai-key":: "${OPENAI_KEY}",
"openai-max-output-tokens":: 4096,
"openai-temperature":: 0.0,
"openai-model":: "GPT-3.5-Turbo",
@ -14,6 +13,9 @@ local prompts = import "prompts/mixtral.jsonnet";
create:: function(engine)
local envSecrets = engine.envSecrets("openai-credentials")
.with_env_var("OPENAI_TOKEN", "openai-token");
local container =
engine.container("text-completion")
.with_image(images.trustgraph)
@ -21,8 +23,6 @@ local prompts = import "prompts/mixtral.jsonnet";
"text-completion-openai",
"-p",
url.pulsar,
"-k",
$["openai-key"],
"-x",
std.toString($["openai-max-output-tokens"]),
"-t",
@ -30,37 +30,17 @@ local prompts = import "prompts/mixtral.jsonnet";
"-m",
$["openai-model"],
])
.with_env_var_secrets(envSecrets)
.with_limits("0.5", "128M")
.with_reservations("0.1", "128M");
local containerSet = engine.containers(
"text-completion", [ container ]
);
local service =
engine.internalService(containerSet)
.with_port(8080, 8080, "metrics");
engine.resources([
containerSet,
service,
])
},
"text-completion-rag" +: {
create:: function(engine)
local container =
local containerRag =
engine.container("text-completion-rag")
.with_image(images.trustgraph)
.with_command([
"text-completion-openai",
"-p",
url.pulsar,
"-k",
$["openai-key"],
"-x",
std.toString($["openai-max-output-tokens"]),
"-t",
@ -72,24 +52,35 @@ local prompts = import "prompts/mixtral.jsonnet";
"-o",
"non-persistent://tg/response/text-completion-rag-response",
])
.with_env_var_secrets(envSecrets)
.with_limits("0.5", "128M")
.with_reservations("0.1", "128M");
local containerSet = engine.containers(
"text-completion-rag", [ container ]
"text-completion", [ container ]
);
local containerSetRag = engine.containers(
"text-completion-rag", [ containerRag ]
);
local service =
engine.internalService(containerSet)
.with_port(8080, 8080, "metrics");
local serviceRag =
engine.internalService(containerSetRag)
.with_port(8080, 8080, "metrics");
engine.resources([
envSecrets,
containerSet,
containerSetRag,
service,
serviceRag,
])
}
},
} + prompts