Maint/asyncio (#305)

* Move to asyncio services, even though everything is largely sync
This commit is contained in:
cybermaggedon 2025-02-11 23:24:46 +00:00 committed by GitHub
parent a0bf2362f6
commit f350abb415
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
60 changed files with 243 additions and 227 deletions

View file

@ -77,7 +77,7 @@ class Processor(ConsumerProducer):
return json.loads(json_str)
def handle(self, msg):
async def handle(self, msg):
v = msg.value()
@ -91,32 +91,32 @@ class Processor(ConsumerProducer):
if kind == "extract-definitions":
self.handle_extract_definitions(id, v)
await self.handle_extract_definitions(id, v)
return
elif kind == "extract-topics":
self.handle_extract_topics(id, v)
await self.handle_extract_topics(id, v)
return
elif kind == "extract-relationships":
self.handle_extract_relationships(id, v)
await self.handle_extract_relationships(id, v)
return
elif kind == "extract-rows":
self.handle_extract_rows(id, v)
await self.handle_extract_rows(id, v)
return
elif kind == "kg-prompt":
self.handle_kg_prompt(id, v)
await self.handle_kg_prompt(id, v)
return
elif kind == "document-prompt":
self.handle_document_prompt(id, v)
await self.handle_document_prompt(id, v)
return
else:
@ -124,7 +124,7 @@ class Processor(ConsumerProducer):
print("Invalid kind.", flush=True)
return
def handle_extract_definitions(self, id, v):
async def handle_extract_definitions(self, id, v):
try:
@ -163,7 +163,7 @@ class Processor(ConsumerProducer):
print("Send response...", flush=True)
r = PromptResponse(definitions=output, error=None)
self.producer.send(r, properties={"id": id})
await self.producer.send(r, properties={"id": id})
print("Done.", flush=True)
@ -181,9 +181,9 @@ class Processor(ConsumerProducer):
response=None,
)
self.producer.send(r, properties={"id": id})
await self.producer.send(r, properties={"id": id})
def handle_extract_topics(self, id, v):
async def handle_extract_topics(self, id, v):
try:
@ -222,7 +222,7 @@ class Processor(ConsumerProducer):
print("Send response...", flush=True)
r = PromptResponse(topics=output, error=None)
self.producer.send(r, properties={"id": id})
await self.producer.send(r, properties={"id": id})
print("Done.", flush=True)
@ -240,9 +240,9 @@ class Processor(ConsumerProducer):
response=None,
)
self.producer.send(r, properties={"id": id})
await self.producer.send(r, properties={"id": id})
def handle_extract_relationships(self, id, v):
async def handle_extract_relationships(self, id, v):
try:
@ -294,7 +294,7 @@ class Processor(ConsumerProducer):
print("Send response...", flush=True)
r = PromptResponse(relationships=output, error=None)
self.producer.send(r, properties={"id": id})
await self.producer.send(r, properties={"id": id})
print("Done.", flush=True)
@ -312,9 +312,9 @@ class Processor(ConsumerProducer):
response=None,
)
self.producer.send(r, properties={"id": id})
await self.producer.send(r, properties={"id": id})
def handle_extract_rows(self, id, v):
async def handle_extract_rows(self, id, v):
try:
@ -365,7 +365,7 @@ class Processor(ConsumerProducer):
print("Send response...", flush=True)
r = PromptResponse(rows=output, error=None)
self.producer.send(r, properties={"id": id})
await self.producer.send(r, properties={"id": id})
print("Done.", flush=True)
@ -383,9 +383,9 @@ class Processor(ConsumerProducer):
response=None,
)
self.producer.send(r, properties={"id": id})
await self.producer.send(r, properties={"id": id})
def handle_kg_prompt(self, id, v):
async def handle_kg_prompt(self, id, v):
try:
@ -399,7 +399,7 @@ class Processor(ConsumerProducer):
print("Send response...", flush=True)
r = PromptResponse(answer=ans, error=None)
self.producer.send(r, properties={"id": id})
await self.producer.send(r, properties={"id": id})
print("Done.", flush=True)
@ -417,9 +417,9 @@ class Processor(ConsumerProducer):
response=None,
)
self.producer.send(r, properties={"id": id})
await self.producer.send(r, properties={"id": id})
def handle_document_prompt(self, id, v):
async def handle_document_prompt(self, id, v):
try:
@ -436,7 +436,7 @@ class Processor(ConsumerProducer):
print("Send response...", flush=True)
r = PromptResponse(answer=ans, error=None)
self.producer.send(r, properties={"id": id})
await self.producer.send(r, properties={"id": id})
print("Done.", flush=True)
@ -454,7 +454,7 @@ class Processor(ConsumerProducer):
response=None,
)
self.producer.send(r, properties={"id": id})
await self.producer.send(r, properties={"id": id})
@staticmethod
def add_args(parser):
@ -480,5 +480,5 @@ def run():
raise RuntimeError("NOT IMPLEMENTED")
Processor.start(module, __doc__)
Processor.launch(module, __doc__)

View file

@ -155,7 +155,7 @@ class Processor(ConsumerProducer):
config = prompt_configuration,
)
def handle(self, msg):
async def handle(self, msg):
v = msg.value()
@ -190,7 +190,7 @@ class Processor(ConsumerProducer):
error=None,
)
self.producer.send(r, properties={"id": id})
await self.send(r, properties={"id": id})
return
@ -205,7 +205,7 @@ class Processor(ConsumerProducer):
error=None,
)
self.producer.send(r, properties={"id": id})
await self.send(r, properties={"id": id})
return
@ -223,7 +223,7 @@ class Processor(ConsumerProducer):
response=None,
)
self.producer.send(r, properties={"id": id})
await self.send(r, properties={"id": id})
except Exception as e:
@ -239,7 +239,7 @@ class Processor(ConsumerProducer):
response=None,
)
self.producer.send(r, properties={"id": id})
await self.send(r, properties={"id": id})
@staticmethod
def add_args(parser):
@ -293,5 +293,5 @@ class Processor(ConsumerProducer):
def run():
Processor.start(module, __doc__)
Processor.launch(module, __doc__)

View file

@ -123,7 +123,7 @@ class Processor(ConsumerProducer):
return result
def handle(self, msg):
async def handle(self, msg):
v = msg.value()
@ -154,7 +154,7 @@ class Processor(ConsumerProducer):
print("Send response...", flush=True)
r = TextCompletionResponse(response=resp, error=None, in_token=inputtokens, out_token=outputtokens, model=self.model)
self.producer.send(r, properties={"id": id})
await self.producer.send(r, properties={"id": id})
except TooManyRequests:
@ -182,7 +182,7 @@ class Processor(ConsumerProducer):
model=None,
)
self.producer.send(r, properties={"id": id})
await self.producer.send(r, properties={"id": id})
self.consumer.acknowledge(msg)
@ -224,4 +224,4 @@ class Processor(ConsumerProducer):
def run():
Processor.start(module, __doc__)
Processor.launch(module, __doc__)

View file

@ -84,7 +84,7 @@ class Processor(ConsumerProducer):
azure_endpoint = endpoint,
)
def handle(self, msg):
async def handle(self, msg):
v = msg.value()
@ -133,7 +133,7 @@ class Processor(ConsumerProducer):
model=self.model
)
self.producer.send(r, properties={"id": id})
await self.producer.send(r, properties={"id": id})
except RateLimitError:
@ -161,7 +161,7 @@ class Processor(ConsumerProducer):
model=None,
)
self.producer.send(r, properties={"id": id})
await self.producer.send(r, properties={"id": id})
self.consumer.acknowledge(msg)
@ -212,4 +212,4 @@ class Processor(ConsumerProducer):
def run():
Processor.start(module, __doc__)
Processor.launch(module, __doc__)

View file

@ -73,7 +73,7 @@ class Processor(ConsumerProducer):
print("Initialised", flush=True)
def handle(self, msg):
async def handle(self, msg):
v = msg.value()
@ -150,7 +150,7 @@ class Processor(ConsumerProducer):
model=None,
)
self.producer.send(r, properties={"id": id})
await self.send(r, properties={"id": id})
self.consumer.acknowledge(msg)
@ -190,6 +190,6 @@ class Processor(ConsumerProducer):
def run():
Processor.start(module, __doc__)
Processor.launch(module, __doc__)

View file

@ -69,7 +69,7 @@ class Processor(ConsumerProducer):
print("Initialised", flush=True)
def handle(self, msg):
async def handle(self, msg):
v = msg.value()
@ -106,7 +106,7 @@ class Processor(ConsumerProducer):
print("Send response...", flush=True)
r = TextCompletionResponse(response=resp, error=None, in_token=inputtokens, out_token=outputtokens, model=self.model)
self.send(r, properties={"id": id})
self.await send(r, properties={"id": id})
print("Done.", flush=True)
@ -136,7 +136,7 @@ class Processor(ConsumerProducer):
model=None,
)
self.producer.send(r, properties={"id": id})
await self.producer.send(r, properties={"id": id})
self.consumer.acknowledge(msg)
@ -169,6 +169,6 @@ class Processor(ConsumerProducer):
def run():
Processor.start(module, __doc__)
Processor.launch(module, __doc__)

View file

@ -102,7 +102,7 @@ class Processor(ConsumerProducer):
print("Initialised", flush=True)
def handle(self, msg):
async def handle(self, msg):
v = msg.value()
@ -146,7 +146,7 @@ class Processor(ConsumerProducer):
out_token=outputtokens,
model=self.model
)
self.send(r, properties={"id": id})
await self.send(r, properties={"id": id})
print("Done.", flush=True)
@ -177,7 +177,7 @@ class Processor(ConsumerProducer):
model=None,
)
self.producer.send(r, properties={"id": id})
await self.send(r, properties={"id": id})
self.consumer.acknowledge(msg)
@ -217,6 +217,6 @@ class Processor(ConsumerProducer):
def run():
Processor.start(module, __doc__)
Processor.launch(module, __doc__)

View file

@ -74,7 +74,7 @@ class Processor(ConsumerProducer):
print("Initialised", flush=True)
def handle(self, msg):
async def handle(self, msg):
v = msg.value()
@ -122,7 +122,7 @@ class Processor(ConsumerProducer):
out_token=outputtokens,
model="llama.cpp"
)
self.send(r, properties={"id": id})
await self.send(r, properties={"id": id})
print("Done.", flush=True)
@ -145,7 +145,7 @@ class Processor(ConsumerProducer):
model=None,
)
self.producer.send(r, properties={"id": id})
await self.send(r, properties={"id": id})
self.consumer.acknowledge(msg)
@ -185,6 +185,6 @@ class Processor(ConsumerProducer):
def run():
Processor.start(module, __doc__)
Processor.launch(module, __doc__)

View file

@ -71,7 +71,7 @@ class Processor(ConsumerProducer):
self.model = model
self.llm = Client(host=ollama)
def handle(self, msg):
async def handle(self, msg):
v = msg.value()
@ -96,7 +96,7 @@ class Processor(ConsumerProducer):
r = TextCompletionResponse(response=response_text, error=None, in_token=inputtokens, out_token=outputtokens, model="ollama")
self.send(r, properties={"id": id})
await self.send(r, properties={"id": id})
print("Done.", flush=True)
@ -119,7 +119,7 @@ class Processor(ConsumerProducer):
model=None,
)
self.producer.send(r, properties={"id": id})
await self.send(r, properties={"id": id})
self.consumer.acknowledge(msg)
@ -145,6 +145,6 @@ class Processor(ConsumerProducer):
def run():
Processor.start(module, __doc__)
Processor.launch(module, __doc__)

View file

@ -73,7 +73,7 @@ class Processor(ConsumerProducer):
print("Initialised", flush=True)
def handle(self, msg):
async def handle(self, msg):
v = msg.value()
@ -126,7 +126,7 @@ class Processor(ConsumerProducer):
out_token=outputtokens,
model=self.model
)
self.send(r, properties={"id": id})
await self.send(r, properties={"id": id})
print("Done.", flush=True)
@ -156,7 +156,7 @@ class Processor(ConsumerProducer):
model=None,
)
self.producer.send(r, properties={"id": id})
await self.send(r, properties={"id": id})
self.consumer.acknowledge(msg)
@ -196,6 +196,6 @@ class Processor(ConsumerProducer):
def run():
Processor.start(module, __doc__)
Processor.launch(module, __doc__)