Messaging fabric plugins (#592)

* Plugin architecture for messaging fabric

* Schemas use a technology neutral expression

* Schemas strictness has uncovered some incorrect schema use which is fixed
This commit is contained in:
cybermaggedon 2025-12-17 21:40:43 +00:00 committed by GitHub
parent 1865b3f3c8
commit 34eb083836
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
100 changed files with 2342 additions and 828 deletions

View file

@ -159,12 +159,12 @@ class AsyncFlowInstance:
result = await self.request("text-completion", request_data)
return result.get("response", "")
async def graph_rag(self, question: str, user: str, collection: str,
async def graph_rag(self, query: str, user: str, collection: str,
max_subgraph_size: int = 1000, max_subgraph_count: int = 5,
max_entity_distance: int = 3, **kwargs: Any) -> str:
"""Graph RAG (non-streaming, use async_socket for streaming)"""
request_data = {
"question": question,
"query": query,
"user": user,
"collection": collection,
"max-subgraph-size": max_subgraph_size,
@ -177,11 +177,11 @@ class AsyncFlowInstance:
result = await self.request("graph-rag", request_data)
return result.get("response", "")
async def document_rag(self, question: str, user: str, collection: str,
async def document_rag(self, query: str, user: str, collection: str,
doc_limit: int = 10, **kwargs: Any) -> str:
"""Document RAG (non-streaming, use async_socket for streaming)"""
request_data = {
"question": question,
"query": query,
"user": user,
"collection": collection,
"doc-limit": doc_limit,

View file

@ -208,12 +208,12 @@ class AsyncSocketFlowInstance:
if hasattr(chunk, 'content'):
yield chunk.content
async def graph_rag(self, question: str, user: str, collection: str,
async def graph_rag(self, query: str, user: str, collection: str,
max_subgraph_size: int = 1000, max_subgraph_count: int = 5,
max_entity_distance: int = 3, streaming: bool = False, **kwargs):
"""Graph RAG with optional streaming"""
request = {
"question": question,
"query": query,
"user": user,
"collection": collection,
"max-subgraph-size": max_subgraph_size,
@ -235,11 +235,11 @@ class AsyncSocketFlowInstance:
if hasattr(chunk, 'content'):
yield chunk.content
async def document_rag(self, question: str, user: str, collection: str,
async def document_rag(self, query: str, user: str, collection: str,
doc_limit: int = 10, streaming: bool = False, **kwargs):
"""Document RAG with optional streaming"""
request = {
"question": question,
"query": query,
"user": user,
"collection": collection,
"doc-limit": doc_limit,

View file

@ -160,14 +160,14 @@ class FlowInstance:
)["answer"]
def graph_rag(
self, question, user="trustgraph", collection="default",
self, query, user="trustgraph", collection="default",
entity_limit=50, triple_limit=30, max_subgraph_size=150,
max_path_length=2,
):
# The input consists of a question
input = {
"query": question,
"query": query,
"user": user,
"collection": collection,
"entity-limit": entity_limit,
@ -182,13 +182,13 @@ class FlowInstance:
)["response"]
def document_rag(
self, question, user="trustgraph", collection="default",
self, query, user="trustgraph", collection="default",
doc_limit=10,
):
# The input consists of a question
input = {
"query": question,
"query": query,
"user": user,
"collection": collection,
"doc-limit": doc_limit,

View file

@ -284,7 +284,7 @@ class SocketFlowInstance:
def graph_rag(
self,
question: str,
query: str,
user: str,
collection: str,
max_subgraph_size: int = 1000,
@ -295,7 +295,7 @@ class SocketFlowInstance:
) -> Union[str, Iterator[str]]:
"""Graph RAG with optional streaming"""
request = {
"question": question,
"query": query,
"user": user,
"collection": collection,
"max-subgraph-size": max_subgraph_size,
@ -316,7 +316,7 @@ class SocketFlowInstance:
def document_rag(
self,
question: str,
query: str,
user: str,
collection: str,
doc_limit: int = 10,
@ -325,7 +325,7 @@ class SocketFlowInstance:
) -> Union[str, Iterator[str]]:
"""Document RAG with optional streaming"""
request = {
"question": question,
"query": query,
"user": user,
"collection": collection,
"doc-limit": doc_limit,