trustgraph/trustgraph-cli/scripts/tg-invoke-llm
cybermaggedon 3b8b9ea866
Feature/flow api 3 (#358)
* Working mux socket

* Change API to incorporate flow

* Add Flow ID to all relevant CLIs, not completely implemented

* Change tg-processor-state to use API gateway

* Updated all CLIs

* New tg-show-flow-state command

* tg-show-flow-state shows classes too
2025-05-03 10:39:53 +01:00

70 lines
1.3 KiB
Python
Executable file

#!/usr/bin/env python3
"""
Invokes the text completion service by specifying an LLM system prompt
and user prompt. Both arguments are required.
"""
import argparse
import os
import json
from trustgraph.api import Api
default_url = os.getenv("TRUSTGRAPH_URL", 'http://localhost:8088/')
def query(url, flow_id, system, prompt):
api = Api(url)
resp = api.flow(flow_id).text_completion(system=system, prompt=prompt)
print(resp)
def main():
parser = argparse.ArgumentParser(
prog='tg-invoke-llm',
description=__doc__,
)
parser.add_argument(
'-u', '--url',
default=default_url,
help=f'API URL (default: {default_url})',
)
parser.add_argument(
'system',
nargs=1,
help='LLM system prompt e.g. You are a helpful assistant',
)
parser.add_argument(
'prompt',
nargs=1,
help='LLM prompt e.g. What is 2 + 2?',
)
parser.add_argument(
'-f', '--flow-id',
default="0000",
help=f'Flow ID (default: 0000)'
)
args = parser.parse_args()
try:
query(
url=args.url,
flow = args.flow_id,
system=args.system[0],
prompt=args.prompt[0],
)
except Exception as e:
print("Exception:", e, flush=True)
main()