diff --git a/templates/components/ollama.jsonnet b/templates/components/ollama.jsonnet index a1cb0bb7..a6cddcef 100644 --- a/templates/components/ollama.jsonnet +++ b/templates/components/ollama.jsonnet @@ -4,6 +4,7 @@ local url = import "values/url.jsonnet"; local prompts = import "prompts/slm.jsonnet"; { services +: { + "ollama-model":: "gemma2:9b", "text-completion": base + { image: images.trustgraph, @@ -12,7 +13,7 @@ local prompts = import "prompts/slm.jsonnet"; "-p", url.pulsar, "-m", - "gemma2:9b", + $["ollama-model"], "-r", "${OLLAMA_HOST}", ], @@ -37,7 +38,7 @@ local prompts = import "prompts/slm.jsonnet"; "-p", url.pulsar, "-m", - "gemma2:9b", + $["ollama-model"], "-r", "${OLLAMA_HOST}", "-i", diff --git a/templates/components/openai.jsonnet b/templates/components/openai.jsonnet index b13621fe..8bcdf557 100644 --- a/templates/components/openai.jsonnet +++ b/templates/components/openai.jsonnet @@ -7,6 +7,7 @@ local prompts = import "prompts/openai.jsonnet"; "openai-key":: "${OPENAI_KEY}", "openai-max-output-tokens":: 4096, "openai-temperature":: 0.0, + "openai-model":: "GPT-3.5-Turbo", services +: { @@ -22,6 +23,8 @@ local prompts = import "prompts/openai.jsonnet"; std.toString($["openai-max-output-tokens"]), "-t", std.toString($["openai-temperature"]), + "-m", + $["openai-model"], ], deploy: { resources: { @@ -49,6 +52,8 @@ local prompts = import "prompts/openai.jsonnet"; std.toString($["openai-max-output-tokens"]), "-t", std.toString($["openai-temperature"]), + "-m", + $["openai-model"], "-i", "non-persistent://tg/request/text-completion-rag", "-o", diff --git a/templates/config-loader.jsonnet b/templates/config-loader.jsonnet index d599eaaf..6555dea8 100644 --- a/templates/config-loader.jsonnet +++ b/templates/config-loader.jsonnet @@ -19,7 +19,8 @@ local components = { "trustgraph-base": import "components/trustgraph.jsonnet", "vector-store-milvus": import "components/milvus.jsonnet", "vector-store-qdrant": import "components/qdrant.jsonnet", - "vertexai": import "components/vertexai.jsonnet" + "vertexai": import "components/vertexai.jsonnet", + "null": {} }; local config = function(p)