From 6565adb1ec9a3ced4ddd5598c2a4f3a4c2d2c105 Mon Sep 17 00:00:00 2001 From: cybermaggedon Date: Tue, 18 Mar 2025 20:27:45 +0000 Subject: [PATCH] Enable AKS integration of text-completion-azure-openai (#319) Reconfigure so that AZURE_TOKEN, AZURE_MODEL and AZURE_ENDPOINT can be used to set the token/model/endpoint parameters. This allows it to be deployed in K8s and use secrets to set these environment variables --- .../model/text_completion/azure_openai/llm.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/trustgraph-flow/trustgraph/model/text_completion/azure_openai/llm.py b/trustgraph-flow/trustgraph/model/text_completion/azure_openai/llm.py index 53833898..01eb2c9b 100755 --- a/trustgraph-flow/trustgraph/model/text_completion/azure_openai/llm.py +++ b/trustgraph-flow/trustgraph/model/text_completion/azure_openai/llm.py @@ -23,9 +23,10 @@ default_output_queue = text_completion_response_queue default_subscriber = module default_temperature = 0.0 default_max_output = 4192 -default_api = "2024-02-15-preview" -default_endpoint = os.getenv("AZURE_ENDPOINT") -default_token = os.getenv("AZURE_TOKEN") +default_api = "2024-12-01-preview" +default_endpoint = os.getenv("AZURE_ENDPOINT", None) +default_token = os.getenv("AZURE_TOKEN", None) +default_modeel = os.getenv("AZURE_MODEL", None) class Processor(ConsumerProducer): @@ -34,12 +35,13 @@ class Processor(ConsumerProducer): input_queue = params.get("input_queue", default_input_queue) output_queue = params.get("output_queue", default_output_queue) subscriber = params.get("subscriber", default_subscriber) - endpoint = params.get("endpoint", default_endpoint) - token = params.get("token", default_token) temperature = params.get("temperature", default_temperature) max_output = params.get("max_output", default_max_output) - model = params.get("model") + api = params.get("api_version", default_api) + endpoint = params.get("endpoint", default_endpoint) + token = params.get("token", default_token) + model = params.get("model", default_model) if endpoint is None: raise RuntimeError("Azure endpoint not specified") @@ -177,6 +179,7 @@ class Processor(ConsumerProducer): parser.add_argument( '-e', '--endpoint', + default=default_endpoint, help=f'LLM model endpoint' ) @@ -188,11 +191,13 @@ class Processor(ConsumerProducer): parser.add_argument( '-k', '--token', + default=default_token, help=f'LLM model token' ) parser.add_argument( '-m', '--model', + default=default_model, help=f'LLM model' )