Feature/separate containers (#287)

* Separate containerfiles

* Add push to Makefile

* Update image names in the templates
This commit is contained in:
cybermaggedon 2025-01-28 19:36:05 +00:00 committed by GitHub
parent 6c3d2e7f97
commit edcdc4d59d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
44 changed files with 363 additions and 65 deletions

View file

@ -30,7 +30,7 @@ packages: update-package-versions
pypi-upload:
twine upload dist/*-${VERSION}.*
CONTAINER=docker.io/trustgraph/trustgraph-flow
CONTAINER_BASE=docker.io/trustgraph
update-package-versions:
mkdir -p trustgraph-cli/trustgraph
@ -44,11 +44,23 @@ update-package-versions:
echo __version__ = \"${VERSION}\" > trustgraph/trustgraph/trustgraph_version.py
container: update-package-versions
${DOCKER} build -f Containerfile -t ${CONTAINER}:${VERSION} \
--format docker
${DOCKER} build -f containers/Containerfile.base \
-t ${CONTAINER_BASE}/trustgraph-base:${VERSION} .
${DOCKER} build -f containers/Containerfile.flow \
-t ${CONTAINER_BASE}/trustgraph-flow:${VERSION} .
${DOCKER} build -f containers/Containerfile.bedrock \
-t ${CONTAINER_BASE}/trustgraph-bedrock:${VERSION} .
${DOCKER} build -f containers/Containerfile.vertexai \
-t ${CONTAINER_BASE}/trustgraph-vertexai:${VERSION} .
${DOCKER} build -f containers/Containerfile.hf \
-t ${CONTAINER_BASE}/trustgraph-hf:${VERSION} .
push:
${DOCKER} push ${CONTAINER}:${VERSION}
${DOCKER} push ${CONTAINER_BASE}/trustgraph-base:${VERSION}
${DOCKER} push ${CONTAINER_BASE}/trustgraph-flow:${VERSION}
${DOCKER} push ${CONTAINER_BASE}/trustgraph-bedrock:${VERSION}
${DOCKER} push ${CONTAINER_BASE}/trustgraph-vertexai:${VERSION}
${DOCKER} push ${CONTAINER_BASE}/trustgraph-hf:${VERSION}
clean:
rm -rf wheels/

View file

@ -0,0 +1,48 @@
# ----------------------------------------------------------------------------
# Build an AI container. This does the torch install which is huge, and I
# like to avoid re-doing this.
# ----------------------------------------------------------------------------
FROM docker.io/fedora:40 AS base
ENV PIP_BREAK_SYSTEM_PACKAGES=1
RUN dnf install -y python3 python3-pip python3-wheel python3-aiohttp && \
dnf clean all
RUN pip3 install --no-cache-dir pulsar-client==3.5.0
# ----------------------------------------------------------------------------
# Build a container which contains the built Python packages. The build
# creates a bunch of left-over cruft, a separate phase means this is only
# needed to support package build
# ----------------------------------------------------------------------------
FROM base AS build
COPY trustgraph-base/ /root/build/trustgraph-base/
COPY trustgraph-cli/ /root/build/trustgraph-cli/
WORKDIR /root/build/
RUN pip3 wheel -w /root/wheels/ --no-deps ./trustgraph-base/
RUN pip3 wheel -w /root/wheels/ --no-deps ./trustgraph-cli/
# ----------------------------------------------------------------------------
# Finally, the target container. Start with base and add the package.
# ----------------------------------------------------------------------------
FROM base
COPY --from=build /root/wheels /root/wheels
RUN \
pip3 install --no-cache-dir /root/wheels/trustgraph_base-* && \
pip3 install --no-cache-dir /root/wheels/trustgraph_cli-* && \
rm -rf /root/wheels
WORKDIR /

View file

@ -0,0 +1,48 @@
# ----------------------------------------------------------------------------
# Build an AI container. This does the torch install which is huge, and I
# like to avoid re-doing this.
# ----------------------------------------------------------------------------
FROM docker.io/fedora:40 AS base
ENV PIP_BREAK_SYSTEM_PACKAGES=1
RUN dnf install -y python3 python3-pip python3-wheel python3-aiohttp \
python3-rdflib
RUN pip3 install --no-cache-dir boto3 pulsar-client==3.5.0
# ----------------------------------------------------------------------------
# Build a container which contains the built Python packages. The build
# creates a bunch of left-over cruft, a separate phase means this is only
# needed to support package build
# ----------------------------------------------------------------------------
FROM base AS build
COPY trustgraph-base/ /root/build/trustgraph-base/
COPY trustgraph-bedrock/ /root/build/trustgraph-bedrock/
WORKDIR /root/build/
RUN pip3 wheel -w /root/wheels/ --no-deps ./trustgraph-base/
RUN pip3 wheel -w /root/wheels/ --no-deps ./trustgraph-bedrock/
RUN ls /root/wheels
# ----------------------------------------------------------------------------
# Finally, the target container. Start with base and add the package.
# ----------------------------------------------------------------------------
FROM base
COPY --from=build /root/wheels /root/wheels
RUN \
pip3 install --no-cache-dir /root/wheels/trustgraph_base-* && \
pip3 install --no-cache-dir /root/wheels/trustgraph_bedrock-* && \
rm -rf /root/wheels
WORKDIR /

View file

@ -0,0 +1,60 @@
# ----------------------------------------------------------------------------
# Build an AI container. This does the torch install which is huge, and I
# like to avoid re-doing this.
# ----------------------------------------------------------------------------
FROM docker.io/fedora:40 AS base
ENV PIP_BREAK_SYSTEM_PACKAGES=1
RUN dnf install -y python3 python3-pip python3-wheel python3-aiohttp \
python3-rdflib
RUN pip3 install --no-cache-dir \
anthropic cohere openai google-generativeai \
ollama \
langchain==0.3.13 langchain-core==0.3.28 \
langchain-text-splitters==0.3.4 \
langchain-community==0.3.13 \
pymilvus \
pulsar-client==3.5.0 cassandra-driver pyyaml \
neo4j tiktoken falkordb && \
pip3 cache purge
# ----------------------------------------------------------------------------
# Build a container which contains the built Python packages. The build
# creates a bunch of left-over cruft, a separate phase means this is only
# needed to support package build
# ----------------------------------------------------------------------------
FROM base AS build
COPY trustgraph-base/ /root/build/trustgraph-base/
COPY trustgraph-flow/ /root/build/trustgraph-flow/
COPY trustgraph-cli/ /root/build/trustgraph-cli/
WORKDIR /root/build/
RUN pip3 wheel -w /root/wheels/ --no-deps ./trustgraph-base/
RUN pip3 wheel -w /root/wheels/ --no-deps ./trustgraph-flow/
RUN pip3 wheel -w /root/wheels/ --no-deps ./trustgraph-cli/
RUN ls /root/wheels
# ----------------------------------------------------------------------------
# Finally, the target container. Start with base and add the package.
# ----------------------------------------------------------------------------
FROM base
COPY --from=build /root/wheels /root/wheels
RUN \
pip3 install --no-cache-dir /root/wheels/trustgraph_base-* && \
pip3 install --no-cache-dir /root/wheels/trustgraph_flow-* && \
pip3 install --no-cache-dir /root/wheels/trustgraph_cli-* && \
rm -rf /root/wheels
WORKDIR /

View file

@ -0,0 +1,75 @@
# ----------------------------------------------------------------------------
# Build an AI container. This does the torch install which is huge, and I
# like to avoid re-doing this.
# ----------------------------------------------------------------------------
FROM docker.io/fedora:40 AS ai
ENV PIP_BREAK_SYSTEM_PACKAGES=1
RUN dnf install -y python3 python3-pip python3-wheel python3-aiohttp \
python3-rdflib
RUN pip3 install torch==2.5.1+cpu \
--index-url https://download.pytorch.org/whl/cpu
RUN pip3 install --no-cache-dir \
langchain==0.3.13 langchain-core==0.3.28 langchain-huggingface==0.1.2 \
langchain-community==0.3.13 \
sentence-transformers==3.4.0 transformers==4.47.1 \
huggingface-hub==0.27.0 \
pulsar-client==3.5.0
# Most commonly used embeddings model, just build it into the container
# image
RUN huggingface-cli download sentence-transformers/all-MiniLM-L6-v2
# ----------------------------------------------------------------------------
# Build a container which contains the built Python packages. The build
# creates a bunch of left-over cruft, a separate phase means this is only
# needed to support package build
# ----------------------------------------------------------------------------
FROM ai AS build
COPY trustgraph-base/ /root/build/trustgraph-base/
COPY trustgraph-flow/ /root/build/trustgraph-flow/
COPY trustgraph-vertexai/ /root/build/trustgraph-vertexai/
COPY trustgraph-bedrock/ /root/build/trustgraph-bedrock/
COPY trustgraph-embeddings-hf/ /root/build/trustgraph-embeddings-hf/
COPY trustgraph-cli/ /root/build/trustgraph-cli/
WORKDIR /root/build/
RUN pip3 wheel -w /root/wheels/ --no-deps ./trustgraph-base/
RUN pip3 wheel -w /root/wheels/ --no-deps ./trustgraph-flow/
RUN pip3 wheel -w /root/wheels/ --no-deps ./trustgraph-vertexai/
RUN pip3 wheel -w /root/wheels/ --no-deps ./trustgraph-bedrock/
RUN pip3 wheel -w /root/wheels/ --no-deps ./trustgraph-embeddings-hf/
RUN pip3 wheel -w /root/wheels/ --no-deps ./trustgraph-cli/
RUN ls /root/wheels
# ----------------------------------------------------------------------------
# Finally, the target container. Start with base and add the package.
# ----------------------------------------------------------------------------
FROM ai
COPY --from=build /root/wheels /root/wheels
RUN \
pip3 install /root/wheels/trustgraph_base-* && \
pip3 install /root/wheels/trustgraph_flow-* && \
pip3 install /root/wheels/trustgraph_vertexai-* && \
pip3 install /root/wheels/trustgraph_bedrock-* && \
pip3 install /root/wheels/trustgraph_embeddings_hf-* && \
pip3 install /root/wheels/trustgraph_cli-* && \
pip3 cache purge && \
rm -rf /root/wheels
WORKDIR /
CMD sleep 1000000

View file

@ -0,0 +1,51 @@
# ----------------------------------------------------------------------------
# Build an AI container. This does the torch install which is huge, and I
# like to avoid re-doing this.
# ----------------------------------------------------------------------------
FROM docker.io/fedora:40 AS base
ENV PIP_BREAK_SYSTEM_PACKAGES=1
RUN dnf install -y python3 python3-pip python3-wheel python3-aiohttp \
python3-rdflib
RUN pip3 install --no-cache-dir \
google-cloud-aiplatform pulsar-client==3.5.0
# ----------------------------------------------------------------------------
# Build a container which contains the built Python packages. The build
# creates a bunch of left-over cruft, a separate phase means this is only
# needed to support package build
# ----------------------------------------------------------------------------
FROM base AS build
COPY trustgraph-base/ /root/build/trustgraph-base/
COPY trustgraph-vertexai/ /root/build/trustgraph-vertexai/
WORKDIR /root/build/
RUN pip3 wheel -w /root/wheels/ --no-deps ./trustgraph-base/
RUN pip3 wheel -w /root/wheels/ --no-deps ./trustgraph-vertexai/
RUN ls /root/wheels
# ----------------------------------------------------------------------------
# Finally, the target container. Start with base and add the package.
# ----------------------------------------------------------------------------
FROM base
COPY --from=build /root/wheels /root/wheels
RUN \
pip3 install --no-cache-dir /root/wheels/trustgraph_base-* && \
pip3 install --no-cache-dir /root/wheels/trustgraph_vertexai-* && \
rm -rf /root/wheels
WORKDIR /

View file

@ -14,7 +14,7 @@ local default_prompts = import "prompts/default-prompts.jsonnet";
local container =
engine.container("agent-manager")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"agent-manager-react",
"-p",

View file

@ -23,7 +23,7 @@ local prompts = import "prompts/mixtral.jsonnet";
local containerRag =
engine.container("text-completion-rag")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"text-completion-azure",
"-p",

View file

@ -23,7 +23,7 @@ local prompts = import "prompts/mixtral.jsonnet";
local container =
engine.container("text-completion")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"text-completion-azure-openai",
"-p",

View file

@ -22,7 +22,7 @@ local prompts = import "prompts/mixtral.jsonnet";
local containerRag =
engine.container("text-completion-rag")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"text-completion-azure",
"-p",

View file

@ -22,7 +22,7 @@ local prompts = import "prompts/mixtral.jsonnet";
local container =
engine.container("text-completion")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"text-completion-azure",
"-p",

View file

@ -26,7 +26,7 @@ local chunker = import "chunker-recursive.jsonnet";
local containerRag =
engine.container("text-completion-rag")
.with_image(images.trustgraph)
.with_image(images.trustgraph_bedrock)
.with_command([
"text-completion-bedrock",
"-p",

View file

@ -26,7 +26,7 @@ local chunker = import "chunker-recursive.jsonnet";
local container =
engine.container("text-completion")
.with_image(images.trustgraph)
.with_image(images.trustgraph_bedrock)
.with_command([
"text-completion-bedrock",
"-p",

View file

@ -12,7 +12,7 @@ cassandra + {
local container =
engine.container("store-triples")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"triples-write-cassandra",
"-p",
@ -44,7 +44,7 @@ cassandra + {
local container =
engine.container("query-triples")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"triples-query-cassandra",
"-p",

View file

@ -14,7 +14,7 @@ local prompts = import "prompts/mixtral.jsonnet";
local container =
engine.container("chunker")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"chunker-recursive",
"-p",

View file

@ -23,7 +23,7 @@ local prompts = import "prompts/mixtral.jsonnet";
local containerRag =
engine.container("text-completion-rag")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"text-completion-claude",
"-p",

View file

@ -23,7 +23,7 @@ local prompts = import "prompts/mixtral.jsonnet";
local container =
engine.container("text-completion")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"text-completion-claude",
"-p",

View file

@ -21,7 +21,7 @@ local prompts = import "prompts/mixtral.jsonnet";
local containerRag =
engine.container("text-completion-rag")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"text-completion-cohere",
"-p",

View file

@ -21,7 +21,7 @@ local prompts = import "prompts/mixtral.jsonnet";
local container =
engine.container("text-completion")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"text-completion-cohere",
"-p",

View file

@ -11,7 +11,7 @@ local prompts = import "prompts/mixtral.jsonnet";
local container =
engine.container("document-rag")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"document-rag",
"-p",
@ -45,7 +45,7 @@ local prompts = import "prompts/mixtral.jsonnet";
local container =
engine.container("document-embeddings")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"document-embeddings",
"-p",

View file

@ -13,7 +13,7 @@ local prompts = import "prompts/mixtral.jsonnet";
local container =
engine.container("embeddings")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"embeddings-fastembed",
"-p",

View file

@ -13,7 +13,7 @@ local prompts = import "prompts/mixtral.jsonnet";
local container =
engine.container("embeddings")
.with_image(images.trustgraph)
.with_image(images.trustgraph_hf)
.with_command([
"embeddings-hf",
"-p",

View file

@ -13,7 +13,7 @@ local url = import "values/url.jsonnet";
local container =
engine.container("embeddings")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"embeddings-ollama",
"-p",

View file

@ -13,7 +13,7 @@ falkordb + {
local container =
engine.container("store-triples")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"triples-write-falkordb",
"-p",
@ -45,7 +45,7 @@ falkordb + {
local container =
engine.container("query-triples")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"triples-query-falkordb",
"-p",

View file

@ -23,7 +23,7 @@ local prompts = import "prompts/mixtral.jsonnet";
local containerRag =
engine.container("text-completion-rag")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"text-completion-googleaistudio",
"-p",

View file

@ -23,7 +23,7 @@ local prompts = import "prompts/mixtral.jsonnet";
local container =
engine.container("text-completion")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"text-completion-googleaistudio",
"-p",

View file

@ -14,7 +14,7 @@ local url = import "values/url.jsonnet";
local container =
engine.container("kg-extract-definitions")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"kg-extract-definitions",
"-p",
@ -44,7 +44,7 @@ local url = import "values/url.jsonnet";
local container =
engine.container("kg-extract-relationships")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"kg-extract-relationships",
"-p",
@ -74,7 +74,7 @@ local url = import "values/url.jsonnet";
local container =
engine.container("kg-extract-topics")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"kg-extract-topics",
"-p",
@ -104,7 +104,7 @@ local url = import "values/url.jsonnet";
local container =
engine.container("graph-rag")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"graph-rag",
"-p",
@ -144,7 +144,7 @@ local url = import "values/url.jsonnet";
local container =
engine.container("graph-embeddings")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"graph-embeddings",
"-p",

View file

@ -21,7 +21,7 @@ local prompts = import "prompts/slm.jsonnet";
local containerRag =
engine.container("text-completion-rag")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"text-completion-llamafile",
"-p",

View file

@ -21,7 +21,7 @@ local prompts = import "prompts/slm.jsonnet";
local container =
engine.container("text-completion")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"text-completion-llamafile",
"-p",

View file

@ -14,7 +14,7 @@ memgraph + {
local container =
engine.container("store-triples")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"triples-write-memgraph",
"-p",
@ -48,7 +48,7 @@ memgraph + {
local container =
engine.container("query-triples")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"triples-query-memgraph",
"-p",

View file

@ -12,7 +12,7 @@ milvus + {
local container =
engine.container("store-graph-embeddings")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"ge-write-milvus",
"-p",
@ -44,7 +44,7 @@ milvus + {
local container =
engine.container("query-graph-embeddings")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"ge-query-milvus",
"-p",
@ -76,7 +76,7 @@ milvus + {
local container =
engine.container("store-doc-embeddings")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"de-write-milvus",
"-p",
@ -108,7 +108,7 @@ milvus + {
local container =
engine.container("query-doc-embeddings")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"de-query-milvus",
"-p",

View file

@ -13,7 +13,7 @@ neo4j + {
local container =
engine.container("store-triples")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"triples-write-neo4j",
"-p",
@ -45,7 +45,7 @@ neo4j + {
local container =
engine.container("query-triples")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"triples-query-neo4j",
"-p",

View file

@ -21,7 +21,7 @@ local prompts = import "prompts/mixtral.jsonnet";
local containerRag =
engine.container("text-completion-rag")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"text-completion-ollama",
"-p",

View file

@ -21,7 +21,7 @@ local prompts = import "prompts/mixtral.jsonnet";
local container =
engine.container("text-completion")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"text-completion-ollama",
"-p",

View file

@ -23,7 +23,7 @@ local prompts = import "prompts/mixtral.jsonnet";
local containerRag =
engine.container("text-completion-rag")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"text-completion-openai",
"-p",

View file

@ -23,7 +23,7 @@ local prompts = import "prompts/mixtral.jsonnet";
local container =
engine.container("text-completion")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"text-completion-openai",
"-p",

View file

@ -17,7 +17,7 @@ local cassandra_hosts = "cassandra";
local container =
engine.container("store-graph-embeddings")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"ge-write-pinecone",
"-p",
@ -52,7 +52,7 @@ local cassandra_hosts = "cassandra";
local container =
engine.container("query-graph-embeddings")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"ge-query-pinecone",
"-p",
@ -87,7 +87,7 @@ local cassandra_hosts = "cassandra";
local container =
engine.container("store-doc-embeddings")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"de-write-pinecone",
"-p",
@ -122,7 +122,7 @@ local cassandra_hosts = "cassandra";
local container =
engine.container("query-doc-embeddings")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"de-query-pinecone",
"-p",

View file

@ -44,7 +44,7 @@ local default_prompts = import "prompts/default-prompts.jsonnet";
local container =
engine.container("prompt")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"prompt-template",
"-p",
@ -84,7 +84,7 @@ local default_prompts = import "prompts/default-prompts.jsonnet";
local container =
engine.container("prompt-rag")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"prompt-template",
"-p",

View file

@ -109,7 +109,7 @@ local url = import "values/url.jsonnet";
// Trustgraph Pulsar initialisation
local adminContainer =
engine.container("init-trustgraph")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"tg-init-pulsar",
"-p",

View file

@ -12,7 +12,7 @@ qdrant + {
local container =
engine.container("store-graph-embeddings")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"ge-write-qdrant",
"-p",
@ -44,7 +44,7 @@ qdrant + {
local container =
engine.container("query-graph-embeddings")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"ge-query-qdrant",
"-p",
@ -76,7 +76,7 @@ qdrant + {
local container =
engine.container("store-doc-embeddings")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"de-write-qdrant",
"-p",
@ -108,7 +108,7 @@ qdrant + {
local container =
engine.container("query-doc-embeddings")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"de-query-qdrant",
"-p",

View file

@ -21,7 +21,7 @@ local url = import "values/url.jsonnet";
local container =
engine.container("api-gateway")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"api-gateway",
"-p",
@ -60,7 +60,7 @@ local url = import "values/url.jsonnet";
local container =
engine.container("chunker")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"chunker-token",
"-p",
@ -94,7 +94,7 @@ local url = import "values/url.jsonnet";
local container =
engine.container("pdf-decoder")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"pdf-decoder",
"-p",
@ -124,7 +124,7 @@ local url = import "values/url.jsonnet";
local container =
engine.container("metering")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"metering",
"-p",
@ -154,7 +154,7 @@ local url = import "values/url.jsonnet";
local container =
engine.container("metering-rag")
.with_image(images.trustgraph)
.with_image(images.trustgraph_flow)
.with_command([
"metering",
"-p",

View file

@ -30,7 +30,7 @@ local prompts = import "prompts/mixtral.jsonnet";
local container =
engine.container("text-completion-rag")
.with_image(images.trustgraph)
.with_image(images.trustgraph_vertexai)
.with_command([
"text-completion-vertexai",
"-p",

View file

@ -30,7 +30,7 @@ local prompts = import "prompts/mixtral.jsonnet";
local container =
engine.container("text-completion")
.with_image(images.trustgraph)
.with_image(images.trustgraph_vertexai)
.with_command([
"text-completion-vertexai",
"-p",

View file

@ -9,7 +9,11 @@ local version = import "version.jsonnet";
milvus: "docker.io/milvusdb/milvus:v2.4.9",
prometheus: "docker.io/prom/prometheus:v2.53.2",
grafana: "docker.io/grafana/grafana:11.1.4",
trustgraph: "docker.io/trustgraph/trustgraph-flow:" + version,
trustgraph_base: "docker.io/trustgraph/trustgraph-base:" + version,
trustgraph_flow: "docker.io/trustgraph/trustgraph-flow:" + version,
trustgraph_bedrock: "docker.io/trustgraph/trustgraph-bedrock:" + version,
trustgraph_vertexai: "docker.io/trustgraph/trustgraph-vertexai:" + version,
trustgraph_hf: "docker.io/trustgraph/trustgraph-hf:" + version,
qdrant: "docker.io/qdrant/qdrant:v1.11.1",
memgraph_mage: "docker.io/memgraph/memgraph-mage:1.22-memgraph-2.22",
memgraph_lab: "docker.io/memgraph/lab:2.19.1",