Update docs for 2.2 release (#766)

- Update protocol specs
- Update protocol docs
- Update API specs
This commit is contained in:
cybermaggedon 2026-04-07 22:24:59 +01:00 committed by GitHub
parent c20e6540ec
commit e899370d98
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
15 changed files with 488 additions and 867 deletions

View file

@ -10,6 +10,7 @@ properties:
- observation
- answer
- final-answer
- explain
- error
example: answer
content:
@ -29,6 +30,11 @@ properties:
type: string
description: Named graph containing the explainability data
example: urn:graph:retrieval
explain_triples:
type: array
description: Provenance triples for this explain event (inline, no follow-up query needed)
items:
$ref: '../common/Triple.yaml'
end-of-message:
type: boolean
description: Current chunk type is complete (streaming mode)

View file

@ -18,6 +18,11 @@ properties:
type: string
description: Named graph containing the explainability data
example: urn:graph:retrieval
explain_triples:
type: array
description: Provenance triples for this explain event (inline, no follow-up query needed)
items:
$ref: '../common/Triple.yaml'
end-of-stream:
type: boolean
description: Indicates LLM response stream is complete

View file

@ -18,6 +18,11 @@ properties:
type: string
description: Named graph containing the explainability data
example: urn:graph:retrieval
explain_triples:
type: array
description: Provenance triples for this explain event (inline, no follow-up query needed)
items:
$ref: '../common/Triple.yaml'
end_of_stream:
type: boolean
description: Indicates LLM response stream is complete

View file

@ -2,7 +2,7 @@ openapi: 3.1.0
info:
title: TrustGraph API Gateway
version: "2.1"
version: "2.2"
description: |
REST API for TrustGraph - an AI-powered knowledge graph and RAG system.
@ -28,7 +28,7 @@ info:
Require running flow instance, accessed via `/api/v1/flow/{flow}/service/{kind}`:
- AI services: agent, text-completion, prompt, RAG (document/graph)
- Embeddings: embeddings, graph-embeddings, document-embeddings
- Query: triples, rows, nlp-query, structured-query, row-embeddings
- Query: triples, rows, nlp-query, structured-query, sparql-query, row-embeddings
- Data loading: text-load, document-load
- Utilities: mcp-tool, structured-diag
@ -139,6 +139,8 @@ paths:
$ref: './paths/flow/text-load.yaml'
/api/v1/flow/{flow}/service/document-load:
$ref: './paths/flow/document-load.yaml'
/api/v1/flow/{flow}/service/sparql-query:
$ref: './paths/flow/sparql-query.yaml'
# Document streaming
/api/v1/document-stream:

View file

@ -29,6 +29,7 @@ post:
- `action`: Action being taken
- `observation`: Result from action
- `answer`: Final response to user
- `explain`: Provenance event with inline triples (`explain_triples`)
- `error`: Error occurred
Each chunk may have multiple messages. Check flags:
@ -116,6 +117,22 @@ post:
content: ""
end-of-message: true
end-of-dialog: true
explainEvent:
summary: Explain event with inline provenance triples
value:
chunk-type: explain
content: ""
explain_id: urn:trustgraph:agent:abc123
explain_graph: urn:graph:retrieval
explain_triples:
- s: {t: i, i: "urn:trustgraph:agent:abc123"}
p: {t: i, i: "http://www.w3.org/1999/02/22-rdf-syntax-ns#type"}
o: {t: i, i: "https://trustgraph.ai/ns/AgentSession"}
- s: {t: i, i: "urn:trustgraph:agent:abc123"}
p: {t: i, i: "https://trustgraph.ai/ns/query"}
o: {t: l, v: "Explain quantum computing"}
end-of-message: true
end-of-dialog: false
legacyResponse:
summary: Legacy non-streaming response
value:

View file

@ -24,8 +24,13 @@ post:
## Streaming
Enable `streaming: true` to receive the answer as it's generated:
- Multiple messages with `response` content
- Multiple `chunk` messages with `response` content
- `explain` messages with inline provenance triples (`explain_triples`)
- Final message with `end-of-stream: true`
- Session ends with `end_of_session: true`
Explain events carry `explain_id`, `explain_graph`, and `explain_triples`
inline in the stream, so no follow-up knowledge graph query is needed.
Without streaming, returns complete answer in single response.
@ -96,6 +101,21 @@ post:
value:
response: "The research papers present three"
end-of-stream: false
explainEvent:
summary: Explain event with inline provenance triples
value:
message_type: explain
explain_id: urn:trustgraph:question:abc123
explain_graph: urn:graph:retrieval
explain_triples:
- s: {t: i, i: "urn:trustgraph:question:abc123"}
p: {t: i, i: "http://www.w3.org/1999/02/22-rdf-syntax-ns#type"}
o: {t: i, i: "https://trustgraph.ai/ns/DocumentRagQuestion"}
- s: {t: i, i: "urn:trustgraph:question:abc123"}
p: {t: i, i: "https://trustgraph.ai/ns/query"}
o: {t: l, v: "What are the key findings in the research papers?"}
end-of-stream: false
end_of_session: false
streamingComplete:
summary: Streaming complete marker
value:

View file

@ -25,8 +25,13 @@ post:
## Streaming
Enable `streaming: true` to receive the answer as it's generated:
- Multiple messages with `response` content
- Multiple `chunk` messages with `response` content
- `explain` messages with inline provenance triples (`explain_triples`)
- Final message with `end-of-stream: true`
- Session ends with `end_of_session: true`
Explain events carry `explain_id`, `explain_graph`, and `explain_triples`
inline in the stream, so no follow-up knowledge graph query is needed.
Without streaming, returns complete answer in single response.
@ -116,6 +121,21 @@ post:
value:
response: "Quantum physics and computer science intersect"
end-of-stream: false
explainEvent:
summary: Explain event with inline provenance triples
value:
message_type: explain
explain_id: urn:trustgraph:question:abc123
explain_graph: urn:graph:retrieval
explain_triples:
- s: {t: i, i: "urn:trustgraph:question:abc123"}
p: {t: i, i: "http://www.w3.org/1999/02/22-rdf-syntax-ns#type"}
o: {t: i, i: "https://trustgraph.ai/ns/GraphRagQuestion"}
- s: {t: i, i: "urn:trustgraph:question:abc123"}
p: {t: i, i: "https://trustgraph.ai/ns/query"}
o: {t: l, v: "What connections exist between quantum physics and computer science?"}
end_of_stream: false
end_of_session: false
streamingComplete:
summary: Streaming complete marker
value: