2024-11-07 21:01:51 +00:00
|
|
|
"""
|
|
|
|
|
Invokes the text completion service by specifying an LLM system prompt
|
|
|
|
|
and user prompt. Both arguments are required.
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
import argparse
|
|
|
|
|
import os
|
2025-12-04 17:38:57 +00:00
|
|
|
from trustgraph.api import Api
|
2024-11-07 21:01:51 +00:00
|
|
|
|
2025-12-04 17:38:57 +00:00
|
|
|
default_url = os.getenv("TRUSTGRAPH_URL", 'http://localhost:8088/')
|
|
|
|
|
default_token = os.getenv("TRUSTGRAPH_TOKEN", None)
|
2024-11-07 21:01:51 +00:00
|
|
|
|
2025-12-04 17:38:57 +00:00
|
|
|
def query(url, flow_id, system, prompt, streaming=True, token=None):
|
2024-11-07 21:01:51 +00:00
|
|
|
|
2025-12-04 17:38:57 +00:00
|
|
|
# Create API client
|
|
|
|
|
api = Api(url=url, token=token)
|
|
|
|
|
socket = api.socket()
|
|
|
|
|
flow = socket.flow(flow_id)
|
2024-11-07 21:01:51 +00:00
|
|
|
|
2025-12-04 17:38:57 +00:00
|
|
|
try:
|
|
|
|
|
# Call text completion
|
|
|
|
|
response = flow.text_completion(
|
|
|
|
|
system=system,
|
|
|
|
|
prompt=prompt,
|
|
|
|
|
streaming=streaming
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
if streaming:
|
|
|
|
|
# Stream output to stdout without newline
|
|
|
|
|
for chunk in response:
|
|
|
|
|
print(chunk.content, end="", flush=True)
|
|
|
|
|
# Add final newline after streaming
|
|
|
|
|
print()
|
|
|
|
|
else:
|
|
|
|
|
# Non-streaming: print complete response
|
|
|
|
|
print(response)
|
|
|
|
|
|
|
|
|
|
finally:
|
|
|
|
|
# Clean up socket connection
|
|
|
|
|
socket.close()
|
2024-11-07 21:01:51 +00:00
|
|
|
|
|
|
|
|
def main():
|
|
|
|
|
|
|
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
|
prog='tg-invoke-llm',
|
|
|
|
|
description=__doc__,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
parser.add_argument(
|
2025-01-02 19:49:22 +00:00
|
|
|
'-u', '--url',
|
|
|
|
|
default=default_url,
|
|
|
|
|
help=f'API URL (default: {default_url})',
|
2024-11-07 21:01:51 +00:00
|
|
|
)
|
|
|
|
|
|
2025-12-04 17:38:57 +00:00
|
|
|
parser.add_argument(
|
|
|
|
|
'-t', '--token',
|
|
|
|
|
default=default_token,
|
|
|
|
|
help='Authentication token (default: $TRUSTGRAPH_TOKEN)',
|
|
|
|
|
)
|
|
|
|
|
|
2024-11-07 21:01:51 +00:00
|
|
|
parser.add_argument(
|
|
|
|
|
'system',
|
|
|
|
|
nargs=1,
|
|
|
|
|
help='LLM system prompt e.g. You are a helpful assistant',
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
parser.add_argument(
|
|
|
|
|
'prompt',
|
|
|
|
|
nargs=1,
|
|
|
|
|
help='LLM prompt e.g. What is 2 + 2?',
|
|
|
|
|
)
|
2025-05-03 10:39:53 +01:00
|
|
|
|
|
|
|
|
parser.add_argument(
|
|
|
|
|
'-f', '--flow-id',
|
2025-05-24 12:27:56 +01:00
|
|
|
default="default",
|
|
|
|
|
help=f'Flow ID (default: default)'
|
2025-05-03 10:39:53 +01:00
|
|
|
)
|
2024-11-07 21:01:51 +00:00
|
|
|
|
2025-11-26 09:59:10 +00:00
|
|
|
parser.add_argument(
|
|
|
|
|
'--no-streaming',
|
|
|
|
|
action='store_true',
|
|
|
|
|
help='Disable streaming (default: streaming enabled)'
|
|
|
|
|
)
|
|
|
|
|
|
2024-11-07 21:01:51 +00:00
|
|
|
args = parser.parse_args()
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
|
2025-12-04 17:38:57 +00:00
|
|
|
query(
|
2025-01-02 19:49:22 +00:00
|
|
|
url=args.url,
|
2025-11-26 09:59:10 +00:00
|
|
|
flow_id=args.flow_id,
|
2024-11-07 21:01:51 +00:00
|
|
|
system=args.system[0],
|
|
|
|
|
prompt=args.prompt[0],
|
2025-12-04 17:38:57 +00:00
|
|
|
streaming=not args.no_streaming,
|
|
|
|
|
token=args.token,
|
|
|
|
|
)
|
2024-11-07 21:01:51 +00:00
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
|
|
|
|
print("Exception:", e, flush=True)
|
|
|
|
|
|
2025-07-23 21:22:08 +01:00
|
|
|
if __name__ == "__main__":
|
2025-12-04 17:38:57 +00:00
|
|
|
main()
|