mirror of
https://github.com/trustgraph-ai/trustgraph.git
synced 2026-05-04 04:42:36 +02:00
Introduces `workspace` as the isolation boundary for config, flows,
library, and knowledge data. Removes `user` as a schema-level field
throughout the code, API specs, and tests; workspace provides the
same separation more cleanly at the trusted flow.workspace layer
rather than through client-supplied message fields.
Design
------
- IAM tech spec (docs/tech-specs/iam.md) documents current state,
proposed auth/access model, and migration direction.
- Data ownership model (docs/tech-specs/data-ownership-model.md)
captures the workspace/collection/flow hierarchy.
Schema + messaging
------------------
- Drop `user` field from AgentRequest/Step, GraphRagQuery,
DocumentRagQuery, Triples/Graph/Document/Row EmbeddingsRequest,
Sparql/Rows/Structured QueryRequest, ToolServiceRequest.
- Keep collection/workspace routing via flow.workspace at the
service layer.
- Translators updated to not serialise/deserialise user.
API specs
---------
- OpenAPI schemas and path examples cleaned of user fields.
- Websocket async-api messages updated.
- Removed the unused parameters/User.yaml.
Services + base
---------------
- Librarian, collection manager, knowledge, config: all operations
scoped by workspace. Config client API takes workspace as first
positional arg.
- `flow.workspace` set at flow start time by the infrastructure;
no longer pass-through from clients.
- Tool service drops user-personalisation passthrough.
CLI + SDK
---------
- tg-init-workspace and workspace-aware import/export.
- All tg-* commands drop user args; accept --workspace.
- Python API/SDK (flow, socket_client, async_*, explainability,
library) drop user kwargs from every method signature.
MCP server
----------
- All tool endpoints drop user parameters; socket_manager no longer
keyed per user.
Flow service
------------
- Closure-based topic cleanup on flow stop: only delete topics
whose blueprint template was parameterised AND no remaining
live flow (across all workspaces) still resolves to that topic.
Three scopes fall out naturally from template analysis:
* {id} -> per-flow, deleted on stop
* {blueprint} -> per-blueprint, kept while any flow of the
same blueprint exists
* {workspace} -> per-workspace, kept while any flow in the
workspace exists
* literal -> global, never deleted (e.g. tg.request.librarian)
Fixes a bug where stopping a flow silently destroyed the global
librarian exchange, wedging all library operations until manual
restart.
RabbitMQ backend
----------------
- heartbeat=60, blocked_connection_timeout=300. Catches silently
dead connections (broker restart, orphaned channels, network
partitions) within ~2 heartbeat windows, so the consumer
reconnects and re-binds its queue rather than sitting forever
on a zombie connection.
Tests
-----
- Full test refresh: unit, integration, contract, provenance.
- Dropped user-field assertions and constructor kwargs across
~100 test files.
- Renamed user-collection isolation tests to workspace-collection.
223 lines
5.2 KiB
Python
223 lines
5.2 KiB
Python
"""
|
|
This utility takes a document embeddings core and loads it into a running
|
|
TrustGraph through the API. The document embeddings core should be in msgpack
|
|
format, which is the default format produce by tg-save-doc-embeds.
|
|
"""
|
|
|
|
import aiohttp
|
|
import asyncio
|
|
import msgpack
|
|
import json
|
|
import sys
|
|
import argparse
|
|
import os
|
|
import signal
|
|
|
|
class Running:
|
|
def __init__(self): self.running = True
|
|
def get(self): return self.running
|
|
def stop(self): self.running = False
|
|
|
|
de_counts = 0
|
|
|
|
async def load_de(running, queue, url):
|
|
|
|
global de_counts
|
|
|
|
async with aiohttp.ClientSession() as session:
|
|
|
|
async with session.ws_connect(url) as ws:
|
|
|
|
while running.get():
|
|
|
|
try:
|
|
msg = await asyncio.wait_for(queue.get(), 1)
|
|
|
|
# End of load
|
|
if msg is None:
|
|
break
|
|
|
|
except:
|
|
# Hopefully it's TimeoutError. Annoying to match since
|
|
# it changed in 3.11.
|
|
continue
|
|
|
|
msg = {
|
|
"metadata": {
|
|
"id": msg["m"]["i"],
|
|
"metadata": msg["m"]["m"],
|
|
"collection": msg["m"]["c"],
|
|
},
|
|
"chunks": [
|
|
{
|
|
"chunk_id": chunk["c"],
|
|
"vectors": chunk["v"],
|
|
}
|
|
for chunk in msg["c"]
|
|
],
|
|
}
|
|
|
|
try:
|
|
await ws.send_json(msg)
|
|
except Exception as e:
|
|
print(e)
|
|
|
|
de_counts += 1
|
|
|
|
async def stats(running):
|
|
|
|
global de_counts
|
|
|
|
while running.get():
|
|
|
|
await asyncio.sleep(2)
|
|
|
|
print(
|
|
f"Graph embeddings: {de_counts:10d}"
|
|
)
|
|
|
|
async def loader(running, de_queue, path, format, collection):
|
|
|
|
if format == "json":
|
|
|
|
raise RuntimeError("Not implemented")
|
|
|
|
else:
|
|
|
|
with open(path, "rb") as f:
|
|
|
|
unpacker = msgpack.Unpacker(f, raw=False)
|
|
|
|
while running.get():
|
|
|
|
try:
|
|
unpacked = unpacker.unpack()
|
|
except:
|
|
break
|
|
|
|
if collection:
|
|
unpacked["metadata"]["collection"] = collection
|
|
|
|
if unpacked[0] == "de":
|
|
qtype = de_queue
|
|
|
|
while running.get():
|
|
|
|
try:
|
|
await asyncio.wait_for(qtype.put(unpacked[1]), 0.5)
|
|
|
|
# Successful put message, move on
|
|
break
|
|
|
|
except:
|
|
# Hopefully it's TimeoutError. Annoying to match since
|
|
# it changed in 3.11.
|
|
continue
|
|
|
|
if not running.get(): break
|
|
|
|
# Put 'None' on end of queue to finish
|
|
while running.get():
|
|
|
|
try:
|
|
await asyncio.wait_for(de_queue.put(None), 1)
|
|
|
|
# Successful put message, move on
|
|
break
|
|
|
|
except:
|
|
# Hopefully it's TimeoutError. Annoying to match since
|
|
# it changed in 3.11.
|
|
continue
|
|
|
|
async def run(running, **args):
|
|
|
|
# Maxsize on queues reduces back-pressure so tg-load-kg-core doesn't
|
|
# grow to eat all memory
|
|
de_q = asyncio.Queue(maxsize=10)
|
|
|
|
url = args["url"]
|
|
flow_id = args["flow_id"]
|
|
|
|
load_task = asyncio.create_task(
|
|
loader(
|
|
running=running,
|
|
de_queue=de_q,
|
|
path=args["input_file"], format=args["format"],
|
|
collection=args["collection"],
|
|
)
|
|
|
|
)
|
|
|
|
de_task = asyncio.create_task(
|
|
load_de(
|
|
running=running,
|
|
queue=de_q,
|
|
url = f"{url}api/v1/flow/{flow_id}/import/document-embeddings"
|
|
)
|
|
)
|
|
|
|
stats_task = asyncio.create_task(stats(running))
|
|
|
|
await de_task
|
|
|
|
running.stop()
|
|
|
|
await load_task
|
|
await stats_task
|
|
|
|
async def main(running):
|
|
|
|
parser = argparse.ArgumentParser(
|
|
prog='tg-load-doc-embeds',
|
|
description=__doc__,
|
|
)
|
|
|
|
default_url = os.getenv("TRUSTGRAPH_API", "http://localhost:8088/")
|
|
collection = "default"
|
|
|
|
parser.add_argument(
|
|
'-u', '--url',
|
|
default=default_url,
|
|
help=f'TrustGraph API URL (default: {default_url})',
|
|
)
|
|
|
|
parser.add_argument(
|
|
'-f', '--flow-id',
|
|
default="default",
|
|
help=f'Flow ID (default: default)'
|
|
)
|
|
|
|
parser.add_argument(
|
|
'-i', '--input-file',
|
|
# Make it mandatory, difficult to over-write an existing file
|
|
required=True,
|
|
help=f'Output file'
|
|
)
|
|
|
|
parser.add_argument(
|
|
'--format',
|
|
default="msgpack",
|
|
choices=["msgpack", "json"],
|
|
help=f'Output format (default: msgpack)',
|
|
)
|
|
|
|
parser.add_argument(
|
|
'--collection',
|
|
help=f'Collection ID to load as (default: from input)'
|
|
)
|
|
|
|
args = parser.parse_args()
|
|
|
|
await run(running, **vars(args))
|
|
|
|
running = Running()
|
|
|
|
def interrupt(sig, frame):
|
|
running.stop()
|
|
print('Interrupt')
|
|
|
|
signal.signal(signal.SIGINT, interrupt)
|
|
|
|
asyncio.run(main(running))
|
|
|