Implement logging strategy (#444)

* Logging strategy and convert all prints() to logging invocations
This commit is contained in:
cybermaggedon 2025-07-30 23:18:38 +01:00 committed by GitHub
parent 3e0651222b
commit dd70aade11
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
117 changed files with 1216 additions and 667 deletions

View file

@ -18,6 +18,9 @@ import logging
import os
import base64
import uuid
# Module logger
logger = logging.getLogger(__name__)
import json
import pulsar
@ -48,7 +51,7 @@ class ConfigReceiver:
v = msg.value()
print(f"Config version", v.version)
logger.info(f"Config version: {v.version}")
if "flows" in v.config:
@ -68,29 +71,29 @@ class ConfigReceiver:
del self.flows[k]
except Exception as e:
print(f"Exception: {e}", flush=True)
logger.error(f"Config processing exception: {e}", exc_info=True)
async def start_flow(self, id, flow):
print("Start flow", id)
logger.info(f"Starting flow: {id}")
for handler in self.flow_handlers:
try:
await handler.start_flow(id, flow)
except Exception as e:
print(f"Exception: {e}", flush=True)
logger.error(f"Config processing exception: {e}", exc_info=True)
async def stop_flow(self, id, flow):
print("Stop flow", id)
logger.info(f"Stopping flow: {id}")
for handler in self.flow_handlers:
try:
await handler.stop_flow(id, flow)
except Exception as e:
print(f"Exception: {e}", flush=True)
logger.error(f"Config processing exception: {e}", exc_info=True)
async def config_loader(self):
@ -111,9 +114,9 @@ class ConfigReceiver:
await self.config_cons.start()
print("Waiting...")
logger.debug("Waiting for config updates...")
print("Config consumer done. :/")
logger.info("Config consumer finished")
async def start(self):

View file

@ -2,8 +2,12 @@
import asyncio
import uuid
import msgpack
import logging
from . knowledge import KnowledgeRequestor
# Module logger
logger = logging.getLogger(__name__)
class CoreExport:
def __init__(self, pulsar_client):
@ -84,7 +88,7 @@ class CoreExport:
except Exception as e:
print("Exception:", e)
logger.error(f"Core export exception: {e}", exc_info=True)
finally:

View file

@ -3,8 +3,12 @@ import asyncio
import json
import uuid
import msgpack
import logging
from . knowledge import KnowledgeRequestor
# Module logger
logger = logging.getLogger(__name__)
class CoreImport:
def __init__(self, pulsar_client):
@ -80,14 +84,14 @@ class CoreImport:
await kr.process(msg)
except Exception as e:
print("Exception:", e)
logger.error(f"Core import exception: {e}", exc_info=True)
await error(str(e))
finally:
await kr.stop()
print("All done.")
logger.info("Core import completed")
response = await ok()
await response.write_eof()

View file

@ -2,12 +2,16 @@
import asyncio
import queue
import uuid
import logging
from ... schema import DocumentEmbeddings
from ... base import Subscriber
from . serialize import serialize_document_embeddings
# Module logger
logger = logging.getLogger(__name__)
class DocumentEmbeddingsExport:
def __init__(
@ -55,7 +59,7 @@ class DocumentEmbeddingsExport:
continue
except Exception as e:
print(f"Exception: {str(e)}", flush=True)
logger.error(f"Exception: {str(e)}", exc_info=True)
break
await subs.unsubscribe_all(id)

View file

@ -1,11 +1,15 @@
import base64
import logging
from ... schema import Document, Metadata
from ... messaging import TranslatorRegistry
from . sender import ServiceSender
# Module logger
logger = logging.getLogger(__name__)
class DocumentLoad(ServiceSender):
def __init__(self, pulsar_client, queue):
@ -18,6 +22,6 @@ class DocumentLoad(ServiceSender):
self.translator = TranslatorRegistry.get_request_translator("document")
def to_request(self, body):
print("Document received")
logger.info("Document received")
return self.translator.to_pulsar(body)

View file

@ -2,12 +2,16 @@
import asyncio
import queue
import uuid
import logging
from ... schema import EntityContexts
from ... base import Subscriber
from . serialize import serialize_entity_contexts
# Module logger
logger = logging.getLogger(__name__)
class EntityContextsExport:
def __init__(
@ -55,7 +59,7 @@ class EntityContextsExport:
continue
except Exception as e:
print(f"Exception: {str(e)}", flush=True)
logger.error(f"Exception: {str(e)}", exc_info=True)
break
await subs.unsubscribe_all(id)

View file

@ -2,12 +2,16 @@
import asyncio
import queue
import uuid
import logging
from ... schema import GraphEmbeddings
from ... base import Subscriber
from . serialize import serialize_graph_embeddings
# Module logger
logger = logging.getLogger(__name__)
class GraphEmbeddingsExport:
def __init__(
@ -55,7 +59,7 @@ class GraphEmbeddingsExport:
continue
except Exception as e:
print(f"Exception: {str(e)}", flush=True)
logger.error(f"Exception: {str(e)}", exc_info=True)
break
await subs.unsubscribe_all(id)

View file

@ -2,6 +2,10 @@
import asyncio
from aiohttp import web
import uuid
import logging
# Module logger
logger = logging.getLogger(__name__)
from . config import ConfigRequestor
from . flow import FlowRequestor
@ -92,12 +96,12 @@ class DispatcherManager:
self.dispatchers = {}
async def start_flow(self, id, flow):
print("Start flow", id)
logger.info(f"Starting flow {id}")
self.flows[id] = flow
return
async def stop_flow(self, id, flow):
print("Stop flow", id)
logger.info(f"Stopping flow {id}")
del self.flows[id]
return

View file

@ -2,6 +2,10 @@
import asyncio
import queue
import uuid
import logging
# Module logger
logger = logging.getLogger(__name__)
MAX_OUTSTANDING_REQUESTS = 15
WORKER_CLOSE_WAIT = 0.01
@ -46,7 +50,7 @@ class Mux:
))
except Exception as e:
print("receive exception:", str(e), flush=True)
logger.error(f"Receive exception: {str(e)}", exc_info=True)
await self.ws.send_json({"error": str(e)})
async def maybe_tidy_workers(self, workers):
@ -138,7 +142,7 @@ class Mux:
except Exception as e:
# This is an internal working error, may not be recoverable
print("run prepare exception:", e)
logger.error(f"Run prepare exception: {e}", exc_info=True)
await self.ws.send_json({"id": id, "error": str(e)})
self.running.stop()
@ -155,7 +159,7 @@ class Mux:
)
except Exception as e:
print("Exception2:", e)
logger.error(f"Exception in mux: {e}", exc_info=True)
await self.ws.send_json({"error": str(e)})
self.running.stop()

View file

@ -68,7 +68,7 @@ class ServiceRequestor:
q.get(), timeout=self.timeout
)
except Exception as e:
print("Exception", e)
logger.error(f"Request timeout exception: {e}", exc_info=True)
raise RuntimeError("Timeout")
if resp.error:

View file

@ -1,11 +1,15 @@
import base64
import logging
from ... schema import TextDocument, Metadata
from ... messaging import TranslatorRegistry
from . sender import ServiceSender
# Module logger
logger = logging.getLogger(__name__)
class TextLoad(ServiceSender):
def __init__(self, pulsar_client, queue):
@ -18,6 +22,6 @@ class TextLoad(ServiceSender):
self.translator = TranslatorRegistry.get_request_translator("text-document")
def to_request(self, body):
print("Text document received")
logger.info("Text document received")
return self.translator.to_pulsar(body)

View file

@ -2,12 +2,16 @@
import asyncio
import queue
import uuid
import logging
from ... schema import Triples
from ... base import Subscriber
from . serialize import serialize_triples
# Module logger
logger = logging.getLogger(__name__)
class TriplesExport:
def __init__(
@ -55,7 +59,7 @@ class TriplesExport:
continue
except Exception as e:
print(f"Exception: {str(e)}", flush=True)
logger.error(f"Exception: {str(e)}", exc_info=True)
break
await subs.unsubscribe_all(id)

View file

@ -29,7 +29,7 @@ class ConstantEndpoint:
async def handle(self, request):
print(request.path, "...")
logger.debug(f"Processing request: {request.path}")
try:
ht = request.headers["Authorization"]

View file

@ -33,7 +33,7 @@ class MetricsEndpoint:
async def handle(self, request):
print(request.path, "...")
logger.debug(f"Processing metrics request: {request.path}")
try:
ht = request.headers["Authorization"]

View file

@ -74,24 +74,24 @@ class SocketEndpoint:
self.listener(ws, dispatcher, running)
)
print("Created taskgroup, waiting...")
logger.debug("Created task group, waiting for completion...")
# Wait for threads to complete
print("Task group closed")
logger.debug("Task group closed")
# Finally?
await dispatcher.destroy()
except ExceptionGroup as e:
print("Exception group:", flush=True)
logger.error("Exception group occurred:", exc_info=True)
for se in e.exceptions:
print(" Type:", type(se), flush=True)
print(f" Exception: {se}", flush=True)
logger.error(f" Exception type: {type(se)}")
logger.error(f" Exception: {se}")
except Exception as e:
print("Socket exception:", e, flush=True)
logger.error(f"Socket exception: {e}", exc_info=True)
await ws.close()

View file

@ -36,7 +36,7 @@ class StreamEndpoint:
async def handle(self, request):
print(request.path, "...")
logger.debug(f"Processing request: {request.path}")
try:
ht = request.headers["Authorization"]

View file

@ -28,7 +28,7 @@ class VariableEndpoint:
async def handle(self, request):
print(request.path, "...")
logger.debug(f"Processing request: {request.path}")
try:
ht = request.headers["Authorization"]