feat: add type hints to all public functions in trustgraph/base (#803)

feat: add type hints to all public functions in trustgraph/base

Add type annotations to 23 modules covering:
- Metrics classes (ConsumerMetrics, ProducerMetrics, etc.)
- Spec classes (ConsumerSpec, ProducerSpec, SubscriberSpec, etc.)
- Service classes with add_args() and run() methods
- Utility functions (logging, pubsub, clients)
- AsyncProcessor methods

All 93 public functions now fully typed.

Refs #785

* refactor: deduplicate imports and move __future__ after docstrings

Addresses review feedback on PR #803:
- Remove duplicate 'from argparse import ArgumentParser' across 12 files
- Move 'from __future__ import annotations' to line 1 in all files
- Clean up excessive blank lines
This commit is contained in:
RaccoonLabs 2026-04-16 05:59:04 -03:00 committed by Cyber MacGeddon
parent 22096e07e2
commit 706e62b7c2
23 changed files with 125 additions and 61 deletions

View file

@ -1,3 +1,6 @@
from __future__ import annotations
from argparse import ArgumentParser
""" """
Agent manager service completion base class Agent manager service completion base class
@ -97,7 +100,7 @@ class AgentService(FlowProcessor):
) )
@staticmethod @staticmethod
def add_args(parser): def add_args(parser: ArgumentParser) -> None:
FlowProcessor.add_args(parser) FlowProcessor.add_args(parser)

View file

@ -1,3 +1,7 @@
from __future__ import annotations
from argparse import ArgumentParser
from typing import Any, Callable
# Base class for processors. Implements: # Base class for processors. Implements:
# - Pub/sub client, subscribe and consume basic # - Pub/sub client, subscribe and consume basic
@ -178,20 +182,20 @@ class AsyncProcessor:
# This is called to stop all threads. An over-ride point for extra # This is called to stop all threads. An over-ride point for extra
# functionality # functionality
def stop(self): def stop(self) -> None:
self.pubsub_backend.close() self.pubsub_backend.close()
self.running = False self.running = False
# Returns the pub/sub backend (new interface) # Returns the pub/sub backend (new interface)
@property @property
def pubsub(self): return self.pubsub_backend def pubsub(self) -> Any: return self.pubsub_backend
# Returns the pulsar host (backward compatibility) # Returns the pulsar host (backward compatibility)
@property @property
def pulsar_host(self): return self._pulsar_host def pulsar_host(self) -> str: return self._pulsar_host
# Register a new event handler for configuration change # Register a new event handler for configuration change
def register_config_handler(self, handler, types=None): def register_config_handler(self, handler: Callable[..., Any], types: list[type] | None = None) -> None:
self.config_handlers.append({ self.config_handlers.append({
"handler": handler, "handler": handler,
"types": set(types) if types else None, "types": set(types) if types else None,
@ -295,13 +299,13 @@ class AsyncProcessor:
raise e raise e
@classmethod @classmethod
def setup_logging(cls, args): def setup_logging(cls, args: dict[str, Any]) -> None:
"""Configure logging for the entire application""" """Configure logging for the entire application"""
setup_logging(args) setup_logging(args)
# Startup fabric. launch calls launch_async in async mode. # Startup fabric. launch calls launch_async in async mode.
@classmethod @classmethod
def launch(cls, ident, doc): def launch(cls, ident: str, doc: str) -> None:
# Start assembling CLI arguments # Start assembling CLI arguments
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
@ -374,7 +378,7 @@ class AsyncProcessor:
# The command-line arguments are built using a stack of add_args # The command-line arguments are built using a stack of add_args
# invocations # invocations
@staticmethod @staticmethod
def add_args(parser): def add_args(parser: ArgumentParser) -> None:
add_pubsub_args(parser) add_pubsub_args(parser)
add_logging_args(parser) add_logging_args(parser)

View file

@ -1,16 +1,19 @@
from __future__ import annotations
from typing import Any
from . metrics import ConsumerMetrics from . metrics import ConsumerMetrics
from . consumer import Consumer from . consumer import Consumer
from . spec import Spec from . spec import Spec
class ConsumerSpec(Spec): class ConsumerSpec(Spec):
def __init__(self, name, schema, handler, concurrency = 1): def __init__(self, name: str, schema: Any, handler: Any, concurrency: int = 1) -> None:
self.name = name self.name = name
self.schema = schema self.schema = schema
self.handler = handler self.handler = handler
self.concurrency = concurrency self.concurrency = concurrency
def add(self, flow, processor, definition): def add(self, flow: Any, processor: Any, definition: dict[str, Any]) -> None:
consumer_metrics = ConsumerMetrics( consumer_metrics = ConsumerMetrics(
processor = flow.id, flow = flow.name, name = self.name, processor = flow.id, flow = flow.name, name = self.name,

View file

@ -1,3 +1,6 @@
from __future__ import annotations
from argparse import ArgumentParser
""" """
Document embeddings query service. Input is vectors. Output is list of Document embeddings query service. Input is vectors. Output is list of
@ -82,7 +85,7 @@ class DocumentEmbeddingsQueryService(FlowProcessor):
await flow("response").send(r, properties={"id": id}) await flow("response").send(r, properties={"id": id})
@staticmethod @staticmethod
def add_args(parser): def add_args(parser: ArgumentParser) -> None:
FlowProcessor.add_args(parser) FlowProcessor.add_args(parser)
@ -93,7 +96,7 @@ class DocumentEmbeddingsQueryService(FlowProcessor):
help=f'Number of concurrent requests (default: {default_concurrency})' help=f'Number of concurrent requests (default: {default_concurrency})'
) )
def run(): def run() -> None:
Processor.launch(default_ident, __doc__) Processor.launch(default_ident, __doc__)

View file

@ -1,3 +1,6 @@
from __future__ import annotations
from argparse import ArgumentParser
""" """
Document embeddings store base class Document embeddings store base class
@ -49,7 +52,7 @@ class DocumentEmbeddingsStoreService(FlowProcessor):
raise e raise e
@staticmethod @staticmethod
def add_args(parser): def add_args(parser: ArgumentParser) -> None:
FlowProcessor.add_args(parser) FlowProcessor.add_args(parser)

View file

@ -1,3 +1,6 @@
from __future__ import annotations
from argparse import ArgumentParser
""" """
Base class for dynamically pluggable tool services. Base class for dynamically pluggable tool services.
@ -173,7 +176,7 @@ class DynamicToolService(AsyncProcessor):
raise NotImplementedError("Subclasses must implement invoke()") raise NotImplementedError("Subclasses must implement invoke()")
@staticmethod @staticmethod
def add_args(parser): def add_args(parser: ArgumentParser) -> None:
AsyncProcessor.add_args(parser) AsyncProcessor.add_args(parser)

View file

@ -1,3 +1,6 @@
from __future__ import annotations
from argparse import ArgumentParser
""" """
Embeddings resolution base class Embeddings resolution base class
@ -100,7 +103,7 @@ class EmbeddingsService(FlowProcessor):
) )
@staticmethod @staticmethod
def add_args(parser): def add_args(parser: ArgumentParser) -> None:
parser.add_argument( parser.add_argument(
'-c', '--concurrency', '-c', '--concurrency',
@ -112,4 +115,3 @@ class EmbeddingsService(FlowProcessor):
FlowProcessor.add_args(parser) FlowProcessor.add_args(parser)

View file

@ -1,3 +1,7 @@
from __future__ import annotations
from typing import Any
from argparse import ArgumentParser
# Base class for processor with management of flows in & out which are managed # Base class for processor with management of flows in & out which are managed
# by configuration. This is probably all processor types, except for the # by configuration. This is probably all processor types, except for the
@ -41,7 +45,7 @@ class FlowProcessor(AsyncProcessor):
logger.info("Service initialised.") logger.info("Service initialised.")
# Register a configuration variable # Register a configuration variable
def register_specification(self, spec): def register_specification(self, spec: Any) -> None:
self.specifications.append(spec) self.specifications.append(spec)
# Start processing for a new flow # Start processing for a new flow
@ -99,7 +103,7 @@ class FlowProcessor(AsyncProcessor):
await super(FlowProcessor, self).start() await super(FlowProcessor, self).start()
@staticmethod @staticmethod
def add_args(parser): def add_args(parser: ArgumentParser) -> None:
AsyncProcessor.add_args(parser) AsyncProcessor.add_args(parser)

View file

@ -1,3 +1,6 @@
from __future__ import annotations
from typing import Any
import logging import logging
@ -9,7 +12,7 @@ from .. knowledge import Uri, Literal
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def to_value(x): def to_value(x: Any) -> Any:
"""Convert schema Term to Uri or Literal.""" """Convert schema Term to Uri or Literal."""
if x.type == IRI: if x.type == IRI:
return Uri(x.iri) return Uri(x.iri)

View file

@ -1,3 +1,6 @@
from __future__ import annotations
from argparse import ArgumentParser
""" """
Graph embeddings query service. Input is vectors. Output is list of Graph embeddings query service. Input is vectors. Output is list of
@ -82,7 +85,7 @@ class GraphEmbeddingsQueryService(FlowProcessor):
await flow("response").send(r, properties={"id": id}) await flow("response").send(r, properties={"id": id})
@staticmethod @staticmethod
def add_args(parser): def add_args(parser: ArgumentParser) -> None:
FlowProcessor.add_args(parser) FlowProcessor.add_args(parser)
@ -93,7 +96,7 @@ class GraphEmbeddingsQueryService(FlowProcessor):
help=f'Number of concurrent requests (default: {default_concurrency})' help=f'Number of concurrent requests (default: {default_concurrency})'
) )
def run(): def run() -> None:
Processor.launch(default_ident, __doc__) Processor.launch(default_ident, __doc__)

View file

@ -1,3 +1,6 @@
from __future__ import annotations
from argparse import ArgumentParser
""" """
Graph embeddings store base class Graph embeddings store base class
@ -49,7 +52,7 @@ class GraphEmbeddingsStoreService(FlowProcessor):
raise e raise e
@staticmethod @staticmethod
def add_args(parser): def add_args(parser: ArgumentParser) -> None:
FlowProcessor.add_args(parser) FlowProcessor.add_args(parser)

View file

@ -1,3 +1,6 @@
from __future__ import annotations
from argparse import ArgumentParser
""" """
LLM text completion base class LLM text completion base class
@ -205,7 +208,7 @@ class LlmService(FlowProcessor):
properties={"id": id} properties={"id": id}
) )
def supports_streaming(self): def supports_streaming(self) -> bool:
""" """
Override in subclass to indicate streaming support. Override in subclass to indicate streaming support.
Returns False by default. Returns False by default.
@ -221,7 +224,7 @@ class LlmService(FlowProcessor):
raise NotImplementedError("Streaming not implemented for this provider") raise NotImplementedError("Streaming not implemented for this provider")
@staticmethod @staticmethod
def add_args(parser): def add_args(parser: ArgumentParser) -> None:
parser.add_argument( parser.add_argument(
'-c', '--concurrency', '-c', '--concurrency',

View file

@ -11,7 +11,9 @@ Supports dual output to console and Loki for centralized log aggregation.
import contextvars import contextvars
import logging import logging
import logging.handlers import logging.handlers
from argparse import ArgumentParser
from queue import Queue from queue import Queue
from typing import Any
import os import os
@ -53,7 +55,7 @@ class _ProcessorIdFilter(logging.Filter):
return True return True
def add_logging_args(parser): def add_logging_args(parser: ArgumentParser) -> None:
""" """
Add standard logging arguments to an argument parser. Add standard logging arguments to an argument parser.
@ -100,7 +102,7 @@ def add_logging_args(parser):
) )
def setup_logging(args): def setup_logging(args: dict[str, Any]) -> None:
""" """
Configure logging from parsed command-line arguments. Configure logging from parsed command-line arguments.

View file

@ -1,3 +1,6 @@
from __future__ import annotations
from typing import Any
from prometheus_client import start_http_server, Info, Enum, Histogram from prometheus_client import start_http_server, Info, Enum, Histogram
from prometheus_client import Counter from prometheus_client import Counter
@ -10,7 +13,7 @@ class ConsumerMetrics:
within the flow, including state, requests, processing time, and queues. within the flow, including state, requests, processing time, and queues.
""" """
def __init__(self, processor, flow, name): def __init__(self, processor: str, flow: str, name: str) -> None:
self.processor = processor self.processor = processor
self.flow = flow self.flow = flow
@ -41,30 +44,30 @@ class ConsumerMetrics:
["processor", "flow", "name"], ["processor", "flow", "name"],
) )
def process(self, status): def process(self, status: str) -> None:
__class__.processing_metric.labels( __class__.processing_metric.labels(
processor = self.processor, flow = self.flow, name = self.name, processor = self.processor, flow = self.flow, name = self.name,
status=status status=status
).inc() ).inc()
def rate_limit(self): def rate_limit(self) -> None:
__class__.rate_limit_metric.labels( __class__.rate_limit_metric.labels(
processor = self.processor, flow = self.flow, name = self.name, processor = self.processor, flow = self.flow, name = self.name,
).inc() ).inc()
def state(self, state): def state(self, state: str) -> None:
__class__.state_metric.labels( __class__.state_metric.labels(
processor = self.processor, flow = self.flow, name = self.name, processor = self.processor, flow = self.flow, name = self.name,
).state(state) ).state(state)
def record_time(self): def record_time(self) -> Any:
return __class__.request_metric.labels( return __class__.request_metric.labels(
processor = self.processor, flow = self.flow, name = self.name, processor = self.processor, flow = self.flow, name = self.name,
).time() ).time()
class ProducerMetrics: class ProducerMetrics:
def __init__(self, processor, flow, name): def __init__(self, processor: str, flow: str, name: str) -> None:
self.processor = processor self.processor = processor
self.flow = flow self.flow = flow
@ -76,13 +79,13 @@ class ProducerMetrics:
["processor", "flow", "name"], ["processor", "flow", "name"],
) )
def inc(self): def inc(self) -> None:
__class__.producer_metric.labels( __class__.producer_metric.labels(
processor = self.processor, flow = self.flow, name = self.name processor = self.processor, flow = self.flow, name = self.name
).inc() ).inc()
class ProcessorMetrics: class ProcessorMetrics:
def __init__(self, processor): def __init__(self, processor: str) -> None:
self.processor = processor self.processor = processor
@ -92,14 +95,14 @@ class ProcessorMetrics:
["processor"] ["processor"]
) )
def info(self, info): def info(self, info: dict[str, str]) -> None:
__class__.processor_metric.labels( __class__.processor_metric.labels(
processor = self.processor processor = self.processor
).info(info) ).info(info)
class SubscriberMetrics: class SubscriberMetrics:
def __init__(self, processor, flow, name): def __init__(self, processor: str, flow: str, name: str) -> None:
self.processor = processor self.processor = processor
self.flow = flow self.flow = flow
@ -124,19 +127,18 @@ class SubscriberMetrics:
["processor", "flow", "name"], ["processor", "flow", "name"],
) )
def received(self): def received(self) -> None:
__class__.received_metric.labels( __class__.received_metric.labels(
processor = self.processor, flow = self.flow, name = self.name, processor = self.processor, flow = self.flow, name = self.name,
).inc() ).inc()
def state(self, state): def state(self, state: str) -> None:
__class__.state_metric.labels( __class__.state_metric.labels(
processor = self.processor, flow = self.flow, name = self.name, processor = self.processor, flow = self.flow, name = self.name,
).state(state) ).state(state)
def dropped(self, state): def dropped(self, state: str) -> None:
__class__.dropped_metric.labels( __class__.dropped_metric.labels(
processor = self.processor, flow = self.flow, name = self.name, processor = self.processor, flow = self.flow, name = self.name,
).inc() ).inc()

View file

@ -1,21 +1,23 @@
from __future__ import annotations
from typing import Any
from . spec import Spec from . spec import Spec
class Parameter: class Parameter:
def __init__(self, value): def __init__(self, value: Any) -> None:
self.value = value self.value = value
async def start(self): async def start(self) -> None:
pass pass
async def stop(self): async def stop(self) -> None:
pass pass
class ParameterSpec(Spec): class ParameterSpec(Spec):
def __init__(self, name): def __init__(self, name: str) -> None:
self.name = name self.name = name
def add(self, flow, processor, definition): def add(self, flow: Any, processor: Any, definition: dict[str, Any]) -> None:
value = definition.get(self.name, None) value = definition.get(self.name, None)
flow.parameter[self.name] = Parameter(value) flow.parameter[self.name] = Parameter(value)

View file

@ -1,14 +1,17 @@
from __future__ import annotations
from typing import Any
from . producer import Producer from . producer import Producer
from . metrics import ProducerMetrics from . metrics import ProducerMetrics
from . spec import Spec from . spec import Spec
class ProducerSpec(Spec): class ProducerSpec(Spec):
def __init__(self, name, schema): def __init__(self, name: str, schema: Any) -> None:
self.name = name self.name = name
self.schema = schema self.schema = schema
def add(self, flow, processor, definition): def add(self, flow: Any, processor: Any, definition: dict[str, Any]) -> None:
producer_metrics = ProducerMetrics( producer_metrics = ProducerMetrics(
processor = flow.id, flow = flow.name, name = self.name processor = flow.id, flow = flow.name, name = self.name
@ -22,4 +25,3 @@ class ProducerSpec(Spec):
) )
flow.producer[self.name] = producer flow.producer[self.name] = producer

View file

@ -1,6 +1,9 @@
from __future__ import annotations
import os import os
import logging import logging
from argparse import ArgumentParser
from typing import Any
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -15,7 +18,7 @@ DEFAULT_RABBITMQ_PASSWORD = os.getenv("RABBITMQ_PASSWORD", 'guest')
DEFAULT_RABBITMQ_VHOST = os.getenv("RABBITMQ_VHOST", '/') DEFAULT_RABBITMQ_VHOST = os.getenv("RABBITMQ_VHOST", '/')
def get_pubsub(**config): def get_pubsub(**config: Any) -> Any:
""" """
Factory function to create a pub/sub backend based on configuration. Factory function to create a pub/sub backend based on configuration.
@ -51,7 +54,7 @@ def get_pubsub(**config):
STANDALONE_PULSAR_HOST = 'pulsar://localhost:6650' STANDALONE_PULSAR_HOST = 'pulsar://localhost:6650'
def add_pubsub_args(parser, standalone=False): def add_pubsub_args(parser: ArgumentParser, standalone: bool = False) -> None:
"""Add pub/sub CLI arguments to an argument parser. """Add pub/sub CLI arguments to an argument parser.
Args: Args:

View file

@ -1,7 +1,9 @@
from __future__ import annotations
import uuid import uuid
import asyncio import asyncio
import logging import logging
from typing import Any
from . subscriber import Subscriber from . subscriber import Subscriber
from . producer import Producer from . producer import Producer
@ -115,7 +117,7 @@ class RequestResponseSpec(Spec):
self.response_schema = response_schema self.response_schema = response_schema
self.impl = impl self.impl = impl
def add(self, flow, processor, definition): def add(self, flow: Any, processor: Any, definition: dict[str, Any]) -> None:
request_metrics = ProducerMetrics( request_metrics = ProducerMetrics(
processor = flow.id, flow = flow.name, name = self.request_name processor = flow.id, flow = flow.name, name = self.request_name

View file

@ -1,3 +1,6 @@
from __future__ import annotations
from typing import Any
from . metrics import SubscriberMetrics from . metrics import SubscriberMetrics
from . subscriber import Subscriber from . subscriber import Subscriber
@ -5,11 +8,11 @@ from . spec import Spec
class SubscriberSpec(Spec): class SubscriberSpec(Spec):
def __init__(self, name, schema): def __init__(self, name: str, schema: Any) -> None:
self.name = name self.name = name
self.schema = schema self.schema = schema
def add(self, flow, processor, definition): def add(self, flow: Any, processor: Any, definition: dict[str, Any]) -> None:
subscriber_metrics = SubscriberMetrics( subscriber_metrics = SubscriberMetrics(
processor = flow.id, flow = flow.name, name = self.name processor = flow.id, flow = flow.name, name = self.name
@ -27,4 +30,3 @@ class SubscriberSpec(Spec):
# Put it in the consumer map, does that work? # Put it in the consumer map, does that work?
# It means it gets start/stop call. # It means it gets start/stop call.
flow.consumer[self.name] = subscriber flow.consumer[self.name] = subscriber

View file

@ -1,3 +1,6 @@
from __future__ import annotations
from argparse import ArgumentParser
""" """
Tool invocation base class Tool invocation base class
@ -112,7 +115,7 @@ class ToolService(FlowProcessor):
) )
@staticmethod @staticmethod
def add_args(parser): def add_args(parser: ArgumentParser) -> None:
parser.add_argument( parser.add_argument(
'-c', '--concurrency', '-c', '--concurrency',

View file

@ -1,3 +1,6 @@
from __future__ import annotations
from typing import Any
from . request_response_spec import RequestResponse, RequestResponseSpec from . request_response_spec import RequestResponse, RequestResponseSpec
from .. schema import TriplesQueryRequest, TriplesQueryResponse, Term, IRI, LITERAL, TRIPLE from .. schema import TriplesQueryRequest, TriplesQueryResponse, Term, IRI, LITERAL, TRIPLE
@ -11,7 +14,7 @@ class Triple:
self.o = o self.o = o
def to_value(x): def to_value(x: Any) -> Any:
"""Convert schema Term to Uri or Literal.""" """Convert schema Term to Uri or Literal."""
if x.type == IRI: if x.type == IRI:
return Uri(x.iri) return Uri(x.iri)
@ -21,7 +24,7 @@ def to_value(x):
return Literal(x.value or x.iri) return Literal(x.value or x.iri)
def from_value(x): def from_value(x: Any) -> Any:
"""Convert Uri, Literal, string, or Term to schema Term.""" """Convert Uri, Literal, string, or Term to schema Term."""
if x is None: if x is None:
return None return None

View file

@ -1,3 +1,6 @@
from __future__ import annotations
from argparse import ArgumentParser
""" """
Triples query service. Input is a (s, p, o) triple, some values may be Triples query service. Input is a (s, p, o) triple, some values may be
@ -108,7 +111,7 @@ class TriplesQueryService(FlowProcessor):
yield [], True yield [], True
@staticmethod @staticmethod
def add_args(parser): def add_args(parser: ArgumentParser) -> None:
FlowProcessor.add_args(parser) FlowProcessor.add_args(parser)
@ -119,7 +122,7 @@ class TriplesQueryService(FlowProcessor):
help=f'Number of concurrent requests (default: {default_concurrency})' help=f'Number of concurrent requests (default: {default_concurrency})'
) )
def run(): def run() -> None:
Processor.launch(default_ident, __doc__) Processor.launch(default_ident, __doc__)

View file

@ -1,3 +1,6 @@
from __future__ import annotations
from argparse import ArgumentParser
""" """
Triples store base class Triples store base class
@ -53,7 +56,7 @@ class TriplesStoreService(FlowProcessor):
raise e raise e
@staticmethod @staticmethod
def add_args(parser): def add_args(parser: ArgumentParser) -> None:
FlowProcessor.add_args(parser) FlowProcessor.add_args(parser)