Messaging fabric plugins (#592)

* Plugin architecture for messaging fabric

* Schemas use a technology neutral expression

* Schemas strictness has uncovered some incorrect schema use which is fixed
This commit is contained in:
cybermaggedon 2025-12-17 21:40:43 +00:00 committed by GitHub
parent 1865b3f3c8
commit 34eb083836
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
100 changed files with 2342 additions and 828 deletions

View file

@ -433,13 +433,11 @@ class Processor(AgentService):
end_of_dialog=True,
# Legacy fields for backward compatibility
error=error_obj,
response=None,
)
else:
# Legacy format
r = AgentResponse(
error=error_obj,
response=None,
)
await respond(r)

View file

@ -95,9 +95,6 @@ class Configuration:
return ConfigResponse(
version = await self.get_version(),
values = values,
directory = None,
config = None,
error = None,
)
async def handle_list(self, v):
@ -117,10 +114,7 @@ class Configuration:
return ConfigResponse(
version = await self.get_version(),
values = None,
directory = await self.table_store.get_keys(v.type),
config = None,
error = None,
)
async def handle_getvalues(self, v):
@ -150,9 +144,6 @@ class Configuration:
return ConfigResponse(
version = await self.get_version(),
values = list(values),
directory = None,
config = None,
error = None,
)
async def handle_delete(self, v):
@ -179,12 +170,6 @@ class Configuration:
await self.push()
return ConfigResponse(
version = None,
value = None,
directory = None,
values = None,
config = None,
error = None,
)
async def handle_put(self, v):
@ -198,11 +183,6 @@ class Configuration:
await self.push()
return ConfigResponse(
version = None,
value = None,
directory = None,
values = None,
error = None,
)
async def get_config(self):
@ -224,11 +204,7 @@ class Configuration:
return ConfigResponse(
version = await self.get_version(),
value = None,
directory = None,
values = None,
config = config,
error = None,
)
async def handle(self, msg):
@ -262,9 +238,6 @@ class Configuration:
else:
resp = ConfigResponse(
value=None,
directory=None,
values=None,
error=Error(
type = "bad-operation",
message = "Bad operation"

View file

@ -361,9 +361,6 @@ class FlowConfig:
else:
resp = FlowResponse(
value=None,
directory=None,
values=None,
error=Error(
type = "bad-operation",
message = "Bad operation"

View file

@ -112,7 +112,7 @@ class Processor(AsyncProcessor):
self.config_request_consumer = Consumer(
taskgroup = self.taskgroup,
client = self.pulsar_client,
backend = self.pubsub,
flow = None,
topic = config_request_queue,
subscriber = id,
@ -122,14 +122,14 @@ class Processor(AsyncProcessor):
)
self.config_response_producer = Producer(
client = self.pulsar_client,
backend = self.pubsub,
topic = config_response_queue,
schema = ConfigResponse,
metrics = config_response_metrics,
)
self.config_push_producer = Producer(
client = self.pulsar_client,
backend = self.pubsub,
topic = config_push_queue,
schema = ConfigPush,
metrics = config_push_metrics,
@ -137,7 +137,7 @@ class Processor(AsyncProcessor):
self.flow_request_consumer = Consumer(
taskgroup = self.taskgroup,
client = self.pulsar_client,
backend = self.pubsub,
flow = None,
topic = flow_request_queue,
subscriber = id,
@ -147,7 +147,7 @@ class Processor(AsyncProcessor):
)
self.flow_response_producer = Producer(
client = self.pulsar_client,
backend = self.pubsub,
topic = flow_response_queue,
schema = FlowResponse,
metrics = flow_response_metrics,
@ -178,11 +178,7 @@ class Processor(AsyncProcessor):
resp = ConfigPush(
version = version,
value = None,
directory = None,
values = None,
config = config,
error = None,
)
await self.config_push_producer.send(resp)
@ -215,7 +211,6 @@ class Processor(AsyncProcessor):
type = "config-error",
message = str(e),
),
text=None,
)
await self.config_response_producer.send(
@ -240,13 +235,12 @@ class Processor(AsyncProcessor):
)
except Exception as e:
resp = FlowResponse(
error=Error(
type = "flow-error",
message = str(e),
),
text=None,
)
await self.flow_response_producer.send(

View file

@ -234,11 +234,11 @@ class KnowledgeManager:
logger.debug(f"Graph embeddings queue: {ge_q}")
t_pub = Publisher(
self.flow_config.pulsar_client, t_q,
self.flow_config.pubsub, t_q,
schema=Triples,
)
ge_pub = Publisher(
self.flow_config.pulsar_client, ge_q,
self.flow_config.pubsub, ge_q,
schema=GraphEmbeddings
)

View file

@ -84,7 +84,7 @@ class Processor(AsyncProcessor):
self.knowledge_request_consumer = Consumer(
taskgroup = self.taskgroup,
client = self.pulsar_client,
backend = self.pubsub,
flow = None,
topic = knowledge_request_queue,
subscriber = id,
@ -94,7 +94,7 @@ class Processor(AsyncProcessor):
)
self.knowledge_response_producer = Producer(
client = self.pulsar_client,
backend = self.pubsub,
topic = knowledge_response_queue,
schema = KnowledgeResponse,
metrics = knowledge_response_metrics,

View file

@ -34,9 +34,9 @@ logger.setLevel(logging.INFO)
class ConfigReceiver:
def __init__(self, pulsar_client):
def __init__(self, backend):
self.pulsar_client = pulsar_client
self.backend = backend
self.flow_handlers = []
@ -104,8 +104,8 @@ class ConfigReceiver:
self.config_cons = Consumer(
taskgroup = tg,
flow = None,
client = self.pulsar_client,
subscriber = f"gateway-{id}",
backend = self.backend,
subscriber = f"gateway-{id}",
topic = config_push_queue,
schema = ConfigPush,
handler = self.on_config,

View file

@ -6,12 +6,12 @@ from . requestor import ServiceRequestor
class AgentRequestor(ServiceRequestor):
def __init__(
self, pulsar_client, request_queue, response_queue, timeout,
self, backend, request_queue, response_queue, timeout,
consumer, subscriber,
):
super(AgentRequestor, self).__init__(
pulsar_client=pulsar_client,
backend=backend,
request_queue=request_queue,
response_queue=response_queue,
request_schema=AgentRequest,

View file

@ -5,7 +5,7 @@ from ... messaging import TranslatorRegistry
from . requestor import ServiceRequestor
class CollectionManagementRequestor(ServiceRequestor):
def __init__(self, pulsar_client, consumer, subscriber, timeout=120,
def __init__(self, backend, consumer, subscriber, timeout=120,
request_queue=None, response_queue=None):
if request_queue is None:
@ -14,7 +14,7 @@ class CollectionManagementRequestor(ServiceRequestor):
response_queue = collection_response_queue
super(CollectionManagementRequestor, self).__init__(
pulsar_client=pulsar_client,
backend=backend,
consumer_name = consumer,
subscription = subscriber,
request_queue=request_queue,

View file

@ -7,7 +7,7 @@ from ... messaging import TranslatorRegistry
from . requestor import ServiceRequestor
class ConfigRequestor(ServiceRequestor):
def __init__(self, pulsar_client, consumer, subscriber, timeout=120,
def __init__(self, backend, consumer, subscriber, timeout=120,
request_queue=None, response_queue=None):
if request_queue is None:
@ -16,7 +16,7 @@ class ConfigRequestor(ServiceRequestor):
response_queue = config_response_queue
super(ConfigRequestor, self).__init__(
pulsar_client=pulsar_client,
backend=backend,
consumer_name = consumer,
subscription = subscriber,
request_queue=request_queue,

View file

@ -10,9 +10,9 @@ logger = logging.getLogger(__name__)
class CoreExport:
def __init__(self, pulsar_client):
self.pulsar_client = pulsar_client
def __init__(self, backend):
self.backend = backend
async def process(self, data, error, ok, request):
id = request.query["id"]
@ -21,7 +21,7 @@ class CoreExport:
response = await ok()
kr = KnowledgeRequestor(
pulsar_client = self.pulsar_client,
backend = self.backend,
consumer = "api-gateway-core-export-" + str(uuid.uuid4()),
subscriber = "api-gateway-core-export-" + str(uuid.uuid4()),
)

View file

@ -11,8 +11,8 @@ logger = logging.getLogger(__name__)
class CoreImport:
def __init__(self, pulsar_client):
self.pulsar_client = pulsar_client
def __init__(self, backend):
self.backend = backend
async def process(self, data, error, ok, request):
@ -20,7 +20,7 @@ class CoreImport:
user = request.query["user"]
kr = KnowledgeRequestor(
pulsar_client = self.pulsar_client,
backend = self.backend,
consumer = "api-gateway-core-import-" + str(uuid.uuid4()),
subscriber = "api-gateway-core-import-" + str(uuid.uuid4()),
)

View file

@ -15,12 +15,12 @@ logger = logging.getLogger(__name__)
class DocumentEmbeddingsExport:
def __init__(
self, ws, running, pulsar_client, queue, consumer, subscriber
self, ws, running, backend, queue, consumer, subscriber
):
self.ws = ws
self.running = running
self.pulsar_client = pulsar_client
self.backend = backend
self.queue = queue
self.consumer = consumer
self.subscriber = subscriber
@ -48,9 +48,9 @@ class DocumentEmbeddingsExport:
async def run(self):
"""Enhanced run with better error handling"""
self.subs = Subscriber(
client = self.pulsar_client,
backend = self.backend,
topic = self.queue,
consumer_name = self.consumer,
consumer_name = self.consumer,
subscription = self.subscriber,
schema = DocumentEmbeddings,
backpressure_strategy = "block" # Configurable

View file

@ -15,7 +15,7 @@ logger = logging.getLogger(__name__)
class DocumentEmbeddingsImport:
def __init__(
self, ws, running, pulsar_client, queue
self, ws, running, backend, queue
):
self.ws = ws
@ -23,7 +23,7 @@ class DocumentEmbeddingsImport:
self.translator = DocumentEmbeddingsTranslator()
self.publisher = Publisher(
pulsar_client, topic = queue, schema = DocumentEmbeddings
backend, topic = queue, schema = DocumentEmbeddings
)
async def start(self):

View file

@ -11,10 +11,10 @@ from . sender import ServiceSender
logger = logging.getLogger(__name__)
class DocumentLoad(ServiceSender):
def __init__(self, pulsar_client, queue):
def __init__(self, backend, queue):
super(DocumentLoad, self).__init__(
pulsar_client = pulsar_client,
backend = backend,
queue = queue,
schema = Document,
)

View file

@ -6,12 +6,12 @@ from . requestor import ServiceRequestor
class DocumentRagRequestor(ServiceRequestor):
def __init__(
self, pulsar_client, request_queue, response_queue, timeout,
self, backend, request_queue, response_queue, timeout,
consumer, subscriber,
):
super(DocumentRagRequestor, self).__init__(
pulsar_client=pulsar_client,
backend=backend,
request_queue=request_queue,
response_queue=response_queue,
request_schema=DocumentRagQuery,

View file

@ -6,12 +6,12 @@ from . requestor import ServiceRequestor
class EmbeddingsRequestor(ServiceRequestor):
def __init__(
self, pulsar_client, request_queue, response_queue, timeout,
self, backend, request_queue, response_queue, timeout,
consumer, subscriber,
):
super(EmbeddingsRequestor, self).__init__(
pulsar_client=pulsar_client,
backend=backend,
request_queue=request_queue,
response_queue=response_queue,
request_schema=EmbeddingsRequest,

View file

@ -15,12 +15,12 @@ logger = logging.getLogger(__name__)
class EntityContextsExport:
def __init__(
self, ws, running, pulsar_client, queue, consumer, subscriber
self, ws, running, backend, queue, consumer, subscriber
):
self.ws = ws
self.running = running
self.pulsar_client = pulsar_client
self.backend = backend
self.queue = queue
self.consumer = consumer
self.subscriber = subscriber
@ -48,9 +48,9 @@ class EntityContextsExport:
async def run(self):
"""Enhanced run with better error handling"""
self.subs = Subscriber(
client = self.pulsar_client,
backend = self.backend,
topic = self.queue,
consumer_name = self.consumer,
consumer_name = self.consumer,
subscription = self.subscriber,
schema = EntityContexts,
backpressure_strategy = "block" # Configurable

View file

@ -16,14 +16,14 @@ logger = logging.getLogger(__name__)
class EntityContextsImport:
def __init__(
self, ws, running, pulsar_client, queue
self, ws, running, backend, queue
):
self.ws = ws
self.running = running
self.publisher = Publisher(
pulsar_client, topic = queue, schema = EntityContexts
backend, topic = queue, schema = EntityContexts
)
async def start(self):

View file

@ -7,7 +7,7 @@ from ... messaging import TranslatorRegistry
from . requestor import ServiceRequestor
class FlowRequestor(ServiceRequestor):
def __init__(self, pulsar_client, consumer, subscriber, timeout=120,
def __init__(self, backend, consumer, subscriber, timeout=120,
request_queue=None, response_queue=None):
if request_queue is None:
@ -16,7 +16,7 @@ class FlowRequestor(ServiceRequestor):
response_queue = flow_response_queue
super(FlowRequestor, self).__init__(
pulsar_client=pulsar_client,
backend=backend,
consumer_name = consumer,
subscription = subscriber,
request_queue=request_queue,

View file

@ -15,12 +15,12 @@ logger = logging.getLogger(__name__)
class GraphEmbeddingsExport:
def __init__(
self, ws, running, pulsar_client, queue, consumer, subscriber
self, ws, running, backend, queue, consumer, subscriber
):
self.ws = ws
self.running = running
self.pulsar_client = pulsar_client
self.backend = backend
self.queue = queue
self.consumer = consumer
self.subscriber = subscriber
@ -48,9 +48,9 @@ class GraphEmbeddingsExport:
async def run(self):
"""Enhanced run with better error handling"""
self.subs = Subscriber(
client = self.pulsar_client,
backend = self.backend,
topic = self.queue,
consumer_name = self.consumer,
consumer_name = self.consumer,
subscription = self.subscriber,
schema = GraphEmbeddings,
backpressure_strategy = "block" # Configurable

View file

@ -16,14 +16,14 @@ logger = logging.getLogger(__name__)
class GraphEmbeddingsImport:
def __init__(
self, ws, running, pulsar_client, queue
self, ws, running, backend, queue
):
self.ws = ws
self.running = running
self.publisher = Publisher(
pulsar_client, topic = queue, schema = GraphEmbeddings
backend, topic = queue, schema = GraphEmbeddings
)
async def start(self):

View file

@ -6,12 +6,12 @@ from . requestor import ServiceRequestor
class GraphEmbeddingsQueryRequestor(ServiceRequestor):
def __init__(
self, pulsar_client, request_queue, response_queue, timeout,
self, backend, request_queue, response_queue, timeout,
consumer, subscriber,
):
super(GraphEmbeddingsQueryRequestor, self).__init__(
pulsar_client=pulsar_client,
backend=backend,
request_queue=request_queue,
response_queue=response_queue,
request_schema=GraphEmbeddingsRequest,

View file

@ -6,12 +6,12 @@ from . requestor import ServiceRequestor
class GraphRagRequestor(ServiceRequestor):
def __init__(
self, pulsar_client, request_queue, response_queue, timeout,
self, backend, request_queue, response_queue, timeout,
consumer, subscriber,
):
super(GraphRagRequestor, self).__init__(
pulsar_client=pulsar_client,
backend=backend,
request_queue=request_queue,
response_queue=response_queue,
request_schema=GraphRagQuery,

View file

@ -10,7 +10,7 @@ from ... messaging import TranslatorRegistry
from . requestor import ServiceRequestor
class KnowledgeRequestor(ServiceRequestor):
def __init__(self, pulsar_client, consumer, subscriber, timeout=120,
def __init__(self, backend, consumer, subscriber, timeout=120,
request_queue=None, response_queue=None):
if request_queue is None:
@ -19,7 +19,7 @@ class KnowledgeRequestor(ServiceRequestor):
response_queue = knowledge_response_queue
super(KnowledgeRequestor, self).__init__(
pulsar_client=pulsar_client,
backend=backend,
consumer_name = consumer,
subscription = subscriber,
request_queue=request_queue,

View file

@ -9,7 +9,7 @@ from ... messaging import TranslatorRegistry
from . requestor import ServiceRequestor
class LibrarianRequestor(ServiceRequestor):
def __init__(self, pulsar_client, consumer, subscriber, timeout=120,
def __init__(self, backend, consumer, subscriber, timeout=120,
request_queue=None, response_queue=None):
if request_queue is None:
@ -18,7 +18,7 @@ class LibrarianRequestor(ServiceRequestor):
response_queue = librarian_response_queue
super(LibrarianRequestor, self).__init__(
pulsar_client=pulsar_client,
backend=backend,
consumer_name = consumer,
subscription = subscriber,
request_queue=request_queue,

View file

@ -98,9 +98,9 @@ class DispatcherWrapper:
class DispatcherManager:
def __init__(self, pulsar_client, config_receiver, prefix="api-gateway",
def __init__(self, backend, config_receiver, prefix="api-gateway",
queue_overrides=None):
self.pulsar_client = pulsar_client
self.backend = backend
self.config_receiver = config_receiver
self.config_receiver.add_handler(self)
self.prefix = prefix
@ -133,12 +133,12 @@ class DispatcherManager:
async def process_core_import(self, data, error, ok, request):
ci = CoreImport(self.pulsar_client)
ci = CoreImport(self.backend)
return await ci.process(data, error, ok, request)
async def process_core_export(self, data, error, ok, request):
ce = CoreExport(self.pulsar_client)
ce = CoreExport(self.backend)
return await ce.process(data, error, ok, request)
async def process_global_service(self, data, responder, params):
@ -161,7 +161,7 @@ class DispatcherManager:
response_queue = self.queue_overrides[kind].get("response")
dispatcher = global_dispatchers[kind](
pulsar_client = self.pulsar_client,
backend = self.backend,
timeout = 120,
consumer = f"{self.prefix}-{kind}-request",
subscriber = f"{self.prefix}-{kind}-request",
@ -216,7 +216,7 @@ class DispatcherManager:
id = str(uuid.uuid4())
dispatcher = import_dispatchers[kind](
pulsar_client = self.pulsar_client,
backend = self.backend,
ws = ws,
running = running,
queue = qconfig,
@ -254,7 +254,7 @@ class DispatcherManager:
id = str(uuid.uuid4())
dispatcher = export_dispatchers[kind](
pulsar_client = self.pulsar_client,
backend = self.backend,
ws = ws,
running = running,
queue = qconfig,
@ -296,7 +296,7 @@ class DispatcherManager:
if kind in request_response_dispatchers:
dispatcher = request_response_dispatchers[kind](
pulsar_client = self.pulsar_client,
backend = self.backend,
request_queue = qconfig["request"],
response_queue = qconfig["response"],
timeout = 120,
@ -305,7 +305,7 @@ class DispatcherManager:
)
elif kind in sender_dispatchers:
dispatcher = sender_dispatchers[kind](
pulsar_client = self.pulsar_client,
backend = self.backend,
queue = qconfig,
)
else:

View file

@ -6,12 +6,12 @@ from . requestor import ServiceRequestor
class McpToolRequestor(ServiceRequestor):
def __init__(
self, pulsar_client, request_queue, response_queue, timeout,
self, backend, request_queue, response_queue, timeout,
consumer, subscriber,
):
super(McpToolRequestor, self).__init__(
pulsar_client=pulsar_client,
backend=backend,
request_queue=request_queue,
response_queue=response_queue,
request_schema=ToolRequest,

View file

@ -5,12 +5,12 @@ from . requestor import ServiceRequestor
class NLPQueryRequestor(ServiceRequestor):
def __init__(
self, pulsar_client, request_queue, response_queue, timeout,
self, backend, request_queue, response_queue, timeout,
consumer, subscriber,
):
super(NLPQueryRequestor, self).__init__(
pulsar_client=pulsar_client,
backend=backend,
request_queue=request_queue,
response_queue=response_queue,
request_schema=QuestionToStructuredQueryRequest,

View file

@ -15,14 +15,14 @@ logger = logging.getLogger(__name__)
class ObjectsImport:
def __init__(
self, ws, running, pulsar_client, queue
self, ws, running, backend, queue
):
self.ws = ws
self.running = running
self.publisher = Publisher(
pulsar_client, topic = queue, schema = ExtractedObject
backend, topic = queue, schema = ExtractedObject
)
async def start(self):

View file

@ -5,12 +5,12 @@ from . requestor import ServiceRequestor
class ObjectsQueryRequestor(ServiceRequestor):
def __init__(
self, pulsar_client, request_queue, response_queue, timeout,
self, backend, request_queue, response_queue, timeout,
consumer, subscriber,
):
super(ObjectsQueryRequestor, self).__init__(
pulsar_client=pulsar_client,
backend=backend,
request_queue=request_queue,
response_queue=response_queue,
request_schema=ObjectsQueryRequest,

View file

@ -8,12 +8,12 @@ from . requestor import ServiceRequestor
class PromptRequestor(ServiceRequestor):
def __init__(
self, pulsar_client, request_queue, response_queue, timeout,
self, backend, request_queue, response_queue, timeout,
consumer, subscriber,
):
super(PromptRequestor, self).__init__(
pulsar_client=pulsar_client,
backend=backend,
request_queue=request_queue,
response_queue=response_queue,
request_schema=PromptRequest,

View file

@ -13,7 +13,7 @@ class ServiceRequestor:
def __init__(
self,
pulsar_client,
backend,
request_queue, request_schema,
response_queue, response_schema,
subscription="api-gateway", consumer_name="api-gateway",
@ -21,12 +21,12 @@ class ServiceRequestor:
):
self.pub = Publisher(
pulsar_client, request_queue,
backend, request_queue,
schema=request_schema,
)
self.sub = Subscriber(
pulsar_client, response_queue,
backend, response_queue,
subscription, consumer_name,
response_schema
)

View file

@ -14,12 +14,12 @@ class ServiceSender:
def __init__(
self,
pulsar_client,
backend,
queue, schema,
):
self.pub = Publisher(
pulsar_client, queue,
backend, queue,
schema=schema,
)

View file

@ -13,7 +13,7 @@ class ServiceRequestor:
def __init__(
self,
pulsar_client,
backend,
queue, schema,
handler,
subscription="api-gateway", consumer_name="api-gateway",
@ -21,7 +21,7 @@ class ServiceRequestor:
):
self.sub = Subscriber(
pulsar_client, queue,
backend, queue,
subscription, consumer_name,
schema
)

View file

@ -5,12 +5,12 @@ from . requestor import ServiceRequestor
class StructuredDiagRequestor(ServiceRequestor):
def __init__(
self, pulsar_client, request_queue, response_queue, timeout,
self, backend, request_queue, response_queue, timeout,
consumer, subscriber,
):
super(StructuredDiagRequestor, self).__init__(
pulsar_client=pulsar_client,
backend=backend,
request_queue=request_queue,
response_queue=response_queue,
request_schema=StructuredDataDiagnosisRequest,

View file

@ -5,12 +5,12 @@ from . requestor import ServiceRequestor
class StructuredQueryRequestor(ServiceRequestor):
def __init__(
self, pulsar_client, request_queue, response_queue, timeout,
self, backend, request_queue, response_queue, timeout,
consumer, subscriber,
):
super(StructuredQueryRequestor, self).__init__(
pulsar_client=pulsar_client,
backend=backend,
request_queue=request_queue,
response_queue=response_queue,
request_schema=StructuredQueryRequest,

View file

@ -6,12 +6,12 @@ from . requestor import ServiceRequestor
class TextCompletionRequestor(ServiceRequestor):
def __init__(
self, pulsar_client, request_queue, response_queue, timeout,
self, backend, request_queue, response_queue, timeout,
consumer, subscriber,
):
super(TextCompletionRequestor, self).__init__(
pulsar_client=pulsar_client,
backend=backend,
request_queue=request_queue,
response_queue=response_queue,
request_schema=TextCompletionRequest,

View file

@ -11,10 +11,10 @@ from . sender import ServiceSender
logger = logging.getLogger(__name__)
class TextLoad(ServiceSender):
def __init__(self, pulsar_client, queue):
def __init__(self, backend, queue):
super(TextLoad, self).__init__(
pulsar_client = pulsar_client,
backend = backend,
queue = queue,
schema = TextDocument,
)

View file

@ -15,12 +15,12 @@ logger = logging.getLogger(__name__)
class TriplesExport:
def __init__(
self, ws, running, pulsar_client, queue, consumer, subscriber
self, ws, running, backend, queue, consumer, subscriber
):
self.ws = ws
self.running = running
self.pulsar_client = pulsar_client
self.backend = backend
self.queue = queue
self.consumer = consumer
self.subscriber = subscriber
@ -48,9 +48,9 @@ class TriplesExport:
async def run(self):
"""Enhanced run with better error handling"""
self.subs = Subscriber(
client = self.pulsar_client,
backend = self.backend,
topic = self.queue,
consumer_name = self.consumer,
consumer_name = self.consumer,
subscription = self.subscriber,
schema = Triples,
backpressure_strategy = "block" # Configurable

View file

@ -16,14 +16,14 @@ logger = logging.getLogger(__name__)
class TriplesImport:
def __init__(
self, ws, running, pulsar_client, queue
self, ws, running, backend, queue
):
self.ws = ws
self.running = running
self.publisher = Publisher(
pulsar_client, topic = queue, schema = Triples
backend, topic = queue, schema = Triples
)
async def start(self):

View file

@ -6,12 +6,12 @@ from . requestor import ServiceRequestor
class TriplesQueryRequestor(ServiceRequestor):
def __init__(
self, pulsar_client, request_queue, response_queue, timeout,
self, backend, request_queue, response_queue, timeout,
consumer, subscriber,
):
super(TriplesQueryRequestor, self).__init__(
pulsar_client=pulsar_client,
backend=backend,
request_queue=request_queue,
response_queue=response_queue,
request_schema=TriplesQueryRequest,

View file

@ -10,6 +10,7 @@ import logging
import os
from trustgraph.base.logging import setup_logging
from trustgraph.base.pubsub import get_pubsub
from . auth import Authenticator
from . config.receiver import ConfigReceiver
@ -50,15 +51,8 @@ class Api:
self.pulsar_listener = config.get("pulsar_listener", None)
if self.pulsar_api_key:
self.pulsar_client = pulsar.Client(
self.pulsar_host, listener_name=self.pulsar_listener,
authentication=pulsar.AuthenticationToken(self.pulsar_api_key)
)
else:
self.pulsar_client = pulsar.Client(
self.pulsar_host, listener_name=self.pulsar_listener,
)
# Create backend using factory
self.pubsub_backend = get_pubsub(**config)
self.prometheus_url = config.get(
"prometheus_url", default_prometheus_url,
@ -75,7 +69,7 @@ class Api:
else:
self.auth = Authenticator(allow_all=True)
self.config_receiver = ConfigReceiver(self.pulsar_client)
self.config_receiver = ConfigReceiver(self.pubsub_backend)
# Build queue overrides dictionary from CLI arguments
queue_overrides = {}
@ -121,7 +115,7 @@ class Api:
queue_overrides["librarian"]["response"] = librarian_resp
self.dispatcher_manager = DispatcherManager(
pulsar_client = self.pulsar_client,
backend = self.pubsub_backend,
config_receiver = self.config_receiver,
prefix = "gateway",
queue_overrides = queue_overrides,
@ -174,6 +168,14 @@ def run():
help='Service identifier for logging and metrics (default: api-gateway)',
)
# Pub/sub backend selection
parser.add_argument(
'--pubsub-backend',
default=os.getenv('PUBSUB_BACKEND', 'pulsar'),
choices=['pulsar', 'mqtt'],
help='Pub/sub backend (default: pulsar, env: PUBSUB_BACKEND)',
)
parser.add_argument(
'-p', '--pulsar-host',
default=default_pulsar_host,

View file

@ -143,7 +143,7 @@ class Processor(AsyncProcessor):
self.librarian_request_consumer = Consumer(
taskgroup = self.taskgroup,
client = self.pulsar_client,
backend = self.pubsub,
flow = None,
topic = librarian_request_queue,
subscriber = id,
@ -153,7 +153,7 @@ class Processor(AsyncProcessor):
)
self.librarian_response_producer = Producer(
client = self.pulsar_client,
backend = self.pubsub,
topic = librarian_response_queue,
schema = LibrarianResponse,
metrics = librarian_response_metrics,
@ -161,7 +161,7 @@ class Processor(AsyncProcessor):
self.collection_request_consumer = Consumer(
taskgroup = self.taskgroup,
client = self.pulsar_client,
backend = self.pubsub,
flow = None,
topic = collection_request_queue,
subscriber = id,
@ -171,7 +171,7 @@ class Processor(AsyncProcessor):
)
self.collection_response_producer = Producer(
client = self.pulsar_client,
backend = self.pubsub,
topic = collection_response_queue,
schema = CollectionManagementResponse,
metrics = collection_response_metrics,
@ -183,7 +183,7 @@ class Processor(AsyncProcessor):
)
self.config_request_producer = Producer(
client = self.pulsar_client,
backend = self.pubsub,
topic = config_request_queue,
schema = ConfigRequest,
metrics = config_request_metrics,
@ -195,7 +195,7 @@ class Processor(AsyncProcessor):
self.config_response_consumer = Consumer(
taskgroup = self.taskgroup,
client = self.pulsar_client,
backend = self.pubsub,
flow = None,
topic = config_response_queue,
subscriber = f"{id}-config",
@ -299,14 +299,13 @@ class Processor(AsyncProcessor):
collection = processing.collection
),
data = base64.b64encode(content).decode("utf-8")
)
schema = Document
logger.debug(f"Submitting to queue {q}...")
pub = Publisher(
self.pulsar_client, q, schema=schema
self.pubsub, q, schema=schema
)
await pub.start()

View file

@ -98,16 +98,16 @@ class Processor(FlowProcessor):
async def send_chunk(chunk):
await flow("response").send(
DocumentRagResponse(
chunk=chunk,
response=chunk,
end_of_stream=False,
response=None,
error=None
),
properties={"id": id}
)
# Query with streaming enabled
full_response = await self.rag.query(
# The query returns the last chunk (not accumulated text)
final_response = await self.rag.query(
v.query,
user=v.user,
collection=v.collection,
@ -116,12 +116,11 @@ class Processor(FlowProcessor):
chunk_callback=send_chunk,
)
# Send final message with complete response
# Send final message with last chunk
await flow("response").send(
DocumentRagResponse(
chunk=None,
response=final_response if final_response else "",
end_of_stream=True,
response=full_response,
error=None
),
properties={"id": id}

View file

@ -141,16 +141,16 @@ class Processor(FlowProcessor):
async def send_chunk(chunk):
await flow("response").send(
GraphRagResponse(
chunk=chunk,
response=chunk,
end_of_stream=False,
response=None,
error=None
),
properties={"id": id}
)
# Query with streaming enabled
full_response = await rag.query(
# The query will send chunks via callback AND return the complete text
final_response = await rag.query(
query = v.query, user = v.user, collection = v.collection,
entity_limit = entity_limit, triple_limit = triple_limit,
max_subgraph_size = max_subgraph_size,
@ -159,12 +159,12 @@ class Processor(FlowProcessor):
chunk_callback = send_chunk,
)
# Send final message with complete response
# Send final message - may have last chunk of content with end_of_stream=True
# (prompt service may send final chunk with text, so we pass through whatever we got)
await flow("response").send(
GraphRagResponse(
chunk=None,
response=final_response if final_response else "",
end_of_stream=True,
response=full_response,
error=None
),
properties={"id": id}

View file

@ -26,19 +26,19 @@ class WebSocketResponder:
self.completed = True
class MessageDispatcher:
def __init__(self, max_workers: int = 10, config_receiver=None, pulsar_client=None):
def __init__(self, max_workers: int = 10, config_receiver=None, backend=None):
self.max_workers = max_workers
self.semaphore = asyncio.Semaphore(max_workers)
self.active_tasks = set()
self.pulsar_client = pulsar_client
self.backend = backend
# Use DispatcherManager for flow and service management
if pulsar_client and config_receiver:
self.dispatcher_manager = DispatcherManager(pulsar_client, config_receiver, prefix="rev-gateway")
if backend and config_receiver:
self.dispatcher_manager = DispatcherManager(backend, config_receiver, prefix="rev-gateway")
else:
self.dispatcher_manager = None
logger.warning("No pulsar_client or config_receiver provided - using fallback mode")
logger.warning("No backend or config_receiver provided - using fallback mode")
# Service name mapping from websocket protocol to translator registry
self.service_mapping = {
@ -78,7 +78,7 @@ class MessageDispatcher:
try:
if not self.dispatcher_manager:
raise RuntimeError("DispatcherManager not available - pulsar_client and config_receiver required")
raise RuntimeError("DispatcherManager not available - backend and config_receiver required")
# Use DispatcherManager for flow-based processing
responder = WebSocketResponder()

View file

@ -7,10 +7,10 @@ import os
from aiohttp import ClientSession, WSMsgType, ClientWebSocketResponse
from typing import Optional
from urllib.parse import urlparse, urlunparse
import pulsar
from .dispatcher import MessageDispatcher
from ..gateway.config.receiver import ConfigReceiver
from ..base import get_pubsub
logger = logging.getLogger("rev_gateway")
logger.setLevel(logging.INFO)
@ -56,25 +56,20 @@ class ReverseGateway:
self.pulsar_host = pulsar_host or os.getenv("PULSAR_HOST", "pulsar://pulsar:6650")
self.pulsar_api_key = pulsar_api_key or os.getenv("PULSAR_API_KEY", None)
self.pulsar_listener = pulsar_listener
# Initialize Pulsar client
if self.pulsar_api_key:
self.pulsar_client = pulsar.Client(
self.pulsar_host,
listener_name=self.pulsar_listener,
authentication=pulsar.AuthenticationToken(self.pulsar_api_key)
)
else:
self.pulsar_client = pulsar.Client(
self.pulsar_host,
listener_name=self.pulsar_listener
)
# Create backend using factory
backend_params = {
'pulsar_host': self.pulsar_host,
'pulsar_api_key': self.pulsar_api_key,
'pulsar_listener': self.pulsar_listener,
}
self.backend = get_pubsub(**backend_params)
# Initialize config receiver
self.config_receiver = ConfigReceiver(self.pulsar_client)
# Initialize dispatcher with config_receiver and pulsar_client - must be created after config_receiver
self.dispatcher = MessageDispatcher(max_workers, self.config_receiver, self.pulsar_client)
self.config_receiver = ConfigReceiver(self.backend)
# Initialize dispatcher with config_receiver and backend - must be created after config_receiver
self.dispatcher = MessageDispatcher(max_workers, self.config_receiver, self.backend)
async def connect(self) -> bool:
try:
@ -170,10 +165,10 @@ class ReverseGateway:
self.running = False
await self.dispatcher.shutdown()
await self.disconnect()
# Close Pulsar client
if hasattr(self, 'pulsar_client'):
self.pulsar_client.close()
# Close backend
if hasattr(self, 'backend'):
self.backend.close()
def stop(self):
self.running = False

View file

@ -78,7 +78,7 @@ class Processor(FlowProcessor):
# Create storage management consumer
self.storage_request_consumer = Consumer(
taskgroup=self.taskgroup,
client=self.pulsar_client,
backend=self.pubsub,
flow=None,
topic=object_storage_management_topic,
subscriber=f"{id}-storage",
@ -89,7 +89,7 @@ class Processor(FlowProcessor):
# Create storage management response producer
self.storage_response_producer = Producer(
client=self.pulsar_client,
backend=self.pubsub,
topic=storage_management_response_topic,
schema=StorageManagementResponse,
metrics=storage_response_metrics,

View file

@ -338,7 +338,6 @@ class LibraryTableStore:
for m in row[5]
],
tags = row[6] if row[6] else [],
object_id = row[7],
)
for row in resp
]
@ -384,7 +383,6 @@ class LibraryTableStore:
for m in row[4]
],
tags = row[5] if row[5] else [],
object_id = row[6],
)
logger.debug("Done")