Feature/azure openai templates (#104)

* Azure OpenAI LLM templates
* Bump version, fix package versions
* Add azure-openai to template generation
This commit is contained in:
cybermaggedon 2024-10-04 15:47:46 +01:00 committed by GitHub
parent d96ef8269a
commit 222dc9982c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
6 changed files with 133 additions and 4 deletions

View file

@ -1,6 +1,6 @@
# VERSION=$(shell git describe | sed 's/^v//') # VERSION=$(shell git describe | sed 's/^v//')
VERSION=0.11.19 VERSION=0.12.0
DOCKER=podman DOCKER=podman
@ -16,7 +16,7 @@ wheels:
pip3 wheel --no-deps --wheel-dir dist trustgraph-embeddings-hf/ pip3 wheel --no-deps --wheel-dir dist trustgraph-embeddings-hf/
pip3 wheel --no-deps --wheel-dir dist trustgraph-cli/ pip3 wheel --no-deps --wheel-dir dist trustgraph-cli/
packages: packages: update-package-versions
rm -rf dist/ rm -rf dist/
cd trustgraph-base && python3 setup.py sdist --dist-dir ../dist/ cd trustgraph-base && python3 setup.py sdist --dist-dir ../dist/
cd trustgraph-flow && python3 setup.py sdist --dist-dir ../dist/ cd trustgraph-flow && python3 setup.py sdist --dist-dir ../dist/

View file

@ -7,6 +7,7 @@
import "patterns/triple-store-neo4j.jsonnet", import "patterns/triple-store-neo4j.jsonnet",
import "patterns/graph-rag.jsonnet", import "patterns/graph-rag.jsonnet",
import "patterns/llm-azure.jsonnet", import "patterns/llm-azure.jsonnet",
import "patterns/llm-azure-openai.jsonnet",
import "patterns/llm-bedrock.jsonnet", import "patterns/llm-bedrock.jsonnet",
import "patterns/llm-claude.jsonnet", import "patterns/llm-claude.jsonnet",
import "patterns/llm-cohere.jsonnet", import "patterns/llm-cohere.jsonnet",

View file

@ -1,5 +1,6 @@
{ {
"azure": import "components/azure.jsonnet", "azure": import "components/azure.jsonnet",
"azure-openai": import "components/azure-openai.jsonnet",
"bedrock": import "components/bedrock.jsonnet", "bedrock": import "components/bedrock.jsonnet",
"claude": import "components/claude.jsonnet", "claude": import "components/claude.jsonnet",
"cohere": import "components/cohere.jsonnet", "cohere": import "components/cohere.jsonnet",

View file

@ -0,0 +1,95 @@
local base = import "base/base.jsonnet";
local images = import "values/images.jsonnet";
local url = import "values/url.jsonnet";
local prompts = import "prompts/mixtral.jsonnet";
{
"azure-openai-token":: "${AZURE_OPENAI_TOKEN}",
"azure-openai-model":: "GPT-3.5-Turbo",
"azure-openai-max-output-tokens":: 4192,
"azure-openai-temperature":: 0.0,
"text-completion" +: {
create:: function(engine)
local container =
engine.container("text-completion")
.with_image(images.trustgraph)
.with_command([
"text-completion-azure-openai",
"-p",
url.pulsar,
"-k",
$["azure-openai-token"],
"-m",
$["azure-openai-model"],
"-x",
std.toString($["azure-openai-max-output-tokens"]),
"-t",
std.toString($["azure-openai-temperature"]),
])
.with_limits("0.5", "128M")
.with_reservations("0.1", "128M");
local containerSet = engine.containers(
"text-completion", [ container ]
);
local service =
engine.internalService(containerSet)
.with_port(8000, 8000, "metrics");
engine.resources([
containerSet,
service,
])
},
"text-completion-rag" +: {
create:: function(engine)
local container =
engine.container("text-completion-rag")
.with_image(images.trustgraph)
.with_command([
"text-completion-azure",
"-p",
url.pulsar,
"-k",
$["azure-openai-token"],
"-e",
$["azure-openai-model"],
"-x",
std.toString($["azure-openai-max-output-tokens"]),
"-t",
std.toString($["azure-openai-temperature"]),
"-i",
"non-persistent://tg/request/text-completion-rag",
"-o",
"non-persistent://tg/response/text-completion-rag-response",
])
.with_limits("0.5", "128M")
.with_reservations("0.1", "128M");
local containerSet = engine.containers(
"text-completion-rag", [ container ]
);
local service =
engine.internalService(containerSet)
.with_port(8000, 8000, "metrics");
engine.resources([
containerSet,
service,
])
}
} + prompts

View file

@ -122,8 +122,8 @@ def generate_all(output, version):
"docker-compose", "minikube-k8s", "gcp-k8s" "docker-compose", "minikube-k8s", "gcp-k8s"
]: ]:
for model in [ for model in [
"azure", "bedrock", "claude", "cohere", "llamafile", "ollama", "azure", "azure-openai", "bedrock", "claude", "cohere",
"openai", "vertexai" "llamafile", "ollama", "openai", "vertexai"
]: ]:
for graph in [ "cassandra", "neo4j" ]: for graph in [ "cassandra", "neo4j" ]:

View file

@ -0,0 +1,32 @@
{
pattern: {
name: "azure-openai",
icon: "🤖💬",
title: "Add Azure OpenAI LLM endpoint for text completion",
description: "This pattern integrates an Azure OpenAI LLM endpoint hosted in the Azure cloud for text completion operations. You need an Azure subscription to be able to use this service.",
requires: ["pulsar", "trustgraph"],
features: ["llm"],
args: [
{
name: "azure-openai-max-output-tokens",
label: "Maximum output tokens",
type: "integer",
description: "Limit on number tokens to generate",
default: 4096,
required: true,
},
{
name: "azure-openai-temperature",
label: "Temperature",
type: "slider",
description: "Controlling predictability / creativity balance",
min: 0,
max: 1,
step: 0.05,
default: 0.5,
},
],
category: [ "llm" ],
},
module: "components/azure.jsonnet",
}