2025-09-26 01:00:30 +01:00
|
|
|
"""
|
|
|
|
|
Unit tests for Flow Parameter Specification functionality
|
|
|
|
|
Testing parameter specification registration and handling in flow processors
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
import pytest
|
|
|
|
|
from unittest.mock import AsyncMock, MagicMock, patch
|
|
|
|
|
from unittest import IsolatedAsyncioTestCase
|
|
|
|
|
|
|
|
|
|
from trustgraph.base.flow_processor import FlowProcessor
|
|
|
|
|
from trustgraph.base import ParameterSpec, ConsumerSpec, ProducerSpec
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class MockAsyncProcessor:
|
|
|
|
|
def __init__(self, **params):
|
|
|
|
|
self.config_handlers = []
|
|
|
|
|
self.id = params.get('id', 'test-service')
|
|
|
|
|
self.specifications = []
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class TestFlowParameterSpecs(IsolatedAsyncioTestCase):
|
|
|
|
|
"""Test flow processor parameter specification functionality"""
|
|
|
|
|
|
|
|
|
|
@patch('trustgraph.base.async_processor.AsyncProcessor', MockAsyncProcessor)
|
|
|
|
|
def test_parameter_spec_registration(self):
|
|
|
|
|
"""Test that parameter specs can be registered with flow processors"""
|
|
|
|
|
# Arrange
|
|
|
|
|
config = {
|
|
|
|
|
'id': 'test-flow-processor',
|
|
|
|
|
'concurrency': 1,
|
|
|
|
|
'taskgroup': AsyncMock()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
processor = FlowProcessor(**config)
|
|
|
|
|
|
|
|
|
|
# Create test parameter specs
|
|
|
|
|
model_spec = ParameterSpec(name="model")
|
|
|
|
|
temperature_spec = ParameterSpec(name="temperature")
|
|
|
|
|
|
|
|
|
|
# Act
|
|
|
|
|
processor.register_specification(model_spec)
|
|
|
|
|
processor.register_specification(temperature_spec)
|
|
|
|
|
|
|
|
|
|
# Assert
|
|
|
|
|
assert len(processor.specifications) >= 2
|
|
|
|
|
|
|
|
|
|
param_specs = [spec for spec in processor.specifications
|
|
|
|
|
if isinstance(spec, ParameterSpec)]
|
|
|
|
|
assert len(param_specs) >= 2
|
|
|
|
|
|
|
|
|
|
param_names = [spec.name for spec in param_specs]
|
|
|
|
|
assert "model" in param_names
|
|
|
|
|
assert "temperature" in param_names
|
|
|
|
|
|
|
|
|
|
@patch('trustgraph.base.async_processor.AsyncProcessor', MockAsyncProcessor)
|
|
|
|
|
def test_mixed_specification_types(self):
|
|
|
|
|
"""Test registration of mixed specification types (parameters, consumers, producers)"""
|
|
|
|
|
# Arrange
|
|
|
|
|
config = {
|
|
|
|
|
'id': 'test-flow-processor',
|
|
|
|
|
'concurrency': 1,
|
|
|
|
|
'taskgroup': AsyncMock()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
processor = FlowProcessor(**config)
|
|
|
|
|
|
|
|
|
|
# Create different spec types
|
|
|
|
|
param_spec = ParameterSpec(name="model")
|
|
|
|
|
consumer_spec = ConsumerSpec(name="input", schema=MagicMock(), handler=MagicMock())
|
|
|
|
|
producer_spec = ProducerSpec(name="output", schema=MagicMock())
|
|
|
|
|
|
|
|
|
|
# Act
|
|
|
|
|
processor.register_specification(param_spec)
|
|
|
|
|
processor.register_specification(consumer_spec)
|
|
|
|
|
processor.register_specification(producer_spec)
|
|
|
|
|
|
|
|
|
|
# Assert
|
|
|
|
|
assert len(processor.specifications) == 3
|
|
|
|
|
|
|
|
|
|
# Count each type
|
|
|
|
|
param_specs = [s for s in processor.specifications if isinstance(s, ParameterSpec)]
|
|
|
|
|
consumer_specs = [s for s in processor.specifications if isinstance(s, ConsumerSpec)]
|
|
|
|
|
producer_specs = [s for s in processor.specifications if isinstance(s, ProducerSpec)]
|
|
|
|
|
|
|
|
|
|
assert len(param_specs) == 1
|
|
|
|
|
assert len(consumer_specs) == 1
|
|
|
|
|
assert len(producer_specs) == 1
|
|
|
|
|
|
|
|
|
|
@patch('trustgraph.base.async_processor.AsyncProcessor', MockAsyncProcessor)
|
|
|
|
|
def test_parameter_spec_metadata(self):
|
|
|
|
|
"""Test parameter specification metadata handling"""
|
|
|
|
|
# Arrange
|
|
|
|
|
config = {
|
|
|
|
|
'id': 'test-flow-processor',
|
|
|
|
|
'concurrency': 1,
|
|
|
|
|
'taskgroup': AsyncMock()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
processor = FlowProcessor(**config)
|
|
|
|
|
|
|
|
|
|
# Create parameter specs with metadata (if supported)
|
|
|
|
|
model_spec = ParameterSpec(name="model")
|
|
|
|
|
temperature_spec = ParameterSpec(name="temperature")
|
|
|
|
|
|
|
|
|
|
# Act
|
|
|
|
|
processor.register_specification(model_spec)
|
|
|
|
|
processor.register_specification(temperature_spec)
|
|
|
|
|
|
|
|
|
|
# Assert
|
|
|
|
|
param_specs = [spec for spec in processor.specifications
|
|
|
|
|
if isinstance(spec, ParameterSpec)]
|
|
|
|
|
|
|
|
|
|
model_spec_registered = next((s for s in param_specs if s.name == "model"), None)
|
|
|
|
|
temperature_spec_registered = next((s for s in param_specs if s.name == "temperature"), None)
|
|
|
|
|
|
|
|
|
|
assert model_spec_registered is not None
|
|
|
|
|
assert temperature_spec_registered is not None
|
|
|
|
|
assert model_spec_registered.name == "model"
|
|
|
|
|
assert temperature_spec_registered.name == "temperature"
|
|
|
|
|
|
|
|
|
|
@patch('trustgraph.base.async_processor.AsyncProcessor', MockAsyncProcessor)
|
|
|
|
|
def test_duplicate_parameter_spec_handling(self):
|
|
|
|
|
"""Test handling of duplicate parameter spec registration"""
|
|
|
|
|
# Arrange
|
|
|
|
|
config = {
|
|
|
|
|
'id': 'test-flow-processor',
|
|
|
|
|
'concurrency': 1,
|
|
|
|
|
'taskgroup': AsyncMock()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
processor = FlowProcessor(**config)
|
|
|
|
|
|
|
|
|
|
# Create duplicate parameter specs
|
|
|
|
|
model_spec1 = ParameterSpec(name="model")
|
|
|
|
|
model_spec2 = ParameterSpec(name="model")
|
|
|
|
|
|
|
|
|
|
# Act
|
|
|
|
|
processor.register_specification(model_spec1)
|
|
|
|
|
processor.register_specification(model_spec2)
|
|
|
|
|
|
|
|
|
|
# Assert - Should allow duplicates (or handle appropriately)
|
|
|
|
|
param_specs = [spec for spec in processor.specifications
|
|
|
|
|
if isinstance(spec, ParameterSpec) and spec.name == "model"]
|
|
|
|
|
|
|
|
|
|
# Either should have 2 duplicates or the system should handle deduplication
|
|
|
|
|
assert len(param_specs) >= 1 # At least one should be registered
|
|
|
|
|
|
|
|
|
|
@patch('trustgraph.base.async_processor.AsyncProcessor', MockAsyncProcessor)
|
|
|
|
|
@patch('trustgraph.base.flow_processor.Flow')
|
|
|
|
|
async def test_parameter_specs_available_to_flows(self, mock_flow_class):
|
|
|
|
|
"""Test that parameter specs are available when flows are created"""
|
|
|
|
|
# Arrange
|
|
|
|
|
config = {
|
|
|
|
|
'id': 'test-flow-processor',
|
|
|
|
|
'concurrency': 1,
|
|
|
|
|
'taskgroup': AsyncMock()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
processor = FlowProcessor(**config)
|
|
|
|
|
processor.id = 'test-processor'
|
|
|
|
|
|
|
|
|
|
# Register parameter specs
|
|
|
|
|
model_spec = ParameterSpec(name="model")
|
|
|
|
|
temperature_spec = ParameterSpec(name="temperature")
|
|
|
|
|
processor.register_specification(model_spec)
|
|
|
|
|
processor.register_specification(temperature_spec)
|
|
|
|
|
|
|
|
|
|
mock_flow = AsyncMock()
|
|
|
|
|
mock_flow_class.return_value = mock_flow
|
|
|
|
|
|
|
|
|
|
flow_name = 'test-flow'
|
|
|
|
|
flow_defn = {'config': 'test-config'}
|
|
|
|
|
|
|
|
|
|
# Act
|
feat: workspace-based multi-tenancy, replacing user as tenancy axis (#840)
Introduces `workspace` as the isolation boundary for config, flows,
library, and knowledge data. Removes `user` as a schema-level field
throughout the code, API specs, and tests; workspace provides the
same separation more cleanly at the trusted flow.workspace layer
rather than through client-supplied message fields.
Design
------
- IAM tech spec (docs/tech-specs/iam.md) documents current state,
proposed auth/access model, and migration direction.
- Data ownership model (docs/tech-specs/data-ownership-model.md)
captures the workspace/collection/flow hierarchy.
Schema + messaging
------------------
- Drop `user` field from AgentRequest/Step, GraphRagQuery,
DocumentRagQuery, Triples/Graph/Document/Row EmbeddingsRequest,
Sparql/Rows/Structured QueryRequest, ToolServiceRequest.
- Keep collection/workspace routing via flow.workspace at the
service layer.
- Translators updated to not serialise/deserialise user.
API specs
---------
- OpenAPI schemas and path examples cleaned of user fields.
- Websocket async-api messages updated.
- Removed the unused parameters/User.yaml.
Services + base
---------------
- Librarian, collection manager, knowledge, config: all operations
scoped by workspace. Config client API takes workspace as first
positional arg.
- `flow.workspace` set at flow start time by the infrastructure;
no longer pass-through from clients.
- Tool service drops user-personalisation passthrough.
CLI + SDK
---------
- tg-init-workspace and workspace-aware import/export.
- All tg-* commands drop user args; accept --workspace.
- Python API/SDK (flow, socket_client, async_*, explainability,
library) drop user kwargs from every method signature.
MCP server
----------
- All tool endpoints drop user parameters; socket_manager no longer
keyed per user.
Flow service
------------
- Closure-based topic cleanup on flow stop: only delete topics
whose blueprint template was parameterised AND no remaining
live flow (across all workspaces) still resolves to that topic.
Three scopes fall out naturally from template analysis:
* {id} -> per-flow, deleted on stop
* {blueprint} -> per-blueprint, kept while any flow of the
same blueprint exists
* {workspace} -> per-workspace, kept while any flow in the
workspace exists
* literal -> global, never deleted (e.g. tg.request.librarian)
Fixes a bug where stopping a flow silently destroyed the global
librarian exchange, wedging all library operations until manual
restart.
RabbitMQ backend
----------------
- heartbeat=60, blocked_connection_timeout=300. Catches silently
dead connections (broker restart, orphaned channels, network
partitions) within ~2 heartbeat windows, so the consumer
reconnects and re-binds its queue rather than sitting forever
on a zombie connection.
Tests
-----
- Full test refresh: unit, integration, contract, provenance.
- Dropped user-field assertions and constructor kwargs across
~100 test files.
- Renamed user-collection isolation tests to workspace-collection.
2026-04-21 23:23:01 +01:00
|
|
|
await processor.start_flow("default", flow_name, flow_defn)
|
2025-09-26 01:00:30 +01:00
|
|
|
|
|
|
|
|
# Assert - Flow should be created with access to processor specifications
|
feat: workspace-based multi-tenancy, replacing user as tenancy axis (#840)
Introduces `workspace` as the isolation boundary for config, flows,
library, and knowledge data. Removes `user` as a schema-level field
throughout the code, API specs, and tests; workspace provides the
same separation more cleanly at the trusted flow.workspace layer
rather than through client-supplied message fields.
Design
------
- IAM tech spec (docs/tech-specs/iam.md) documents current state,
proposed auth/access model, and migration direction.
- Data ownership model (docs/tech-specs/data-ownership-model.md)
captures the workspace/collection/flow hierarchy.
Schema + messaging
------------------
- Drop `user` field from AgentRequest/Step, GraphRagQuery,
DocumentRagQuery, Triples/Graph/Document/Row EmbeddingsRequest,
Sparql/Rows/Structured QueryRequest, ToolServiceRequest.
- Keep collection/workspace routing via flow.workspace at the
service layer.
- Translators updated to not serialise/deserialise user.
API specs
---------
- OpenAPI schemas and path examples cleaned of user fields.
- Websocket async-api messages updated.
- Removed the unused parameters/User.yaml.
Services + base
---------------
- Librarian, collection manager, knowledge, config: all operations
scoped by workspace. Config client API takes workspace as first
positional arg.
- `flow.workspace` set at flow start time by the infrastructure;
no longer pass-through from clients.
- Tool service drops user-personalisation passthrough.
CLI + SDK
---------
- tg-init-workspace and workspace-aware import/export.
- All tg-* commands drop user args; accept --workspace.
- Python API/SDK (flow, socket_client, async_*, explainability,
library) drop user kwargs from every method signature.
MCP server
----------
- All tool endpoints drop user parameters; socket_manager no longer
keyed per user.
Flow service
------------
- Closure-based topic cleanup on flow stop: only delete topics
whose blueprint template was parameterised AND no remaining
live flow (across all workspaces) still resolves to that topic.
Three scopes fall out naturally from template analysis:
* {id} -> per-flow, deleted on stop
* {blueprint} -> per-blueprint, kept while any flow of the
same blueprint exists
* {workspace} -> per-workspace, kept while any flow in the
workspace exists
* literal -> global, never deleted (e.g. tg.request.librarian)
Fixes a bug where stopping a flow silently destroyed the global
librarian exchange, wedging all library operations until manual
restart.
RabbitMQ backend
----------------
- heartbeat=60, blocked_connection_timeout=300. Catches silently
dead connections (broker restart, orphaned channels, network
partitions) within ~2 heartbeat windows, so the consumer
reconnects and re-binds its queue rather than sitting forever
on a zombie connection.
Tests
-----
- Full test refresh: unit, integration, contract, provenance.
- Dropped user-field assertions and constructor kwargs across
~100 test files.
- Renamed user-collection isolation tests to workspace-collection.
2026-04-21 23:23:01 +01:00
|
|
|
mock_flow_class.assert_called_once_with('test-processor', flow_name, "default", processor, flow_defn)
|
2025-09-26 01:00:30 +01:00
|
|
|
|
|
|
|
|
# The flow should have access to the processor's specifications
|
|
|
|
|
# (The exact mechanism depends on Flow implementation)
|
|
|
|
|
assert len(processor.specifications) >= 2
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class TestParameterSpecValidation(IsolatedAsyncioTestCase):
|
|
|
|
|
"""Test parameter specification validation functionality"""
|
|
|
|
|
|
|
|
|
|
@patch('trustgraph.base.async_processor.AsyncProcessor', MockAsyncProcessor)
|
|
|
|
|
def test_parameter_spec_name_validation(self):
|
|
|
|
|
"""Test parameter spec name validation"""
|
|
|
|
|
# Arrange
|
|
|
|
|
config = {
|
|
|
|
|
'id': 'test-flow-processor',
|
|
|
|
|
'concurrency': 1,
|
|
|
|
|
'taskgroup': AsyncMock()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
processor = FlowProcessor(**config)
|
|
|
|
|
|
|
|
|
|
# Act & Assert - Valid parameter names
|
|
|
|
|
valid_specs = [
|
|
|
|
|
ParameterSpec(name="model"),
|
|
|
|
|
ParameterSpec(name="temperature"),
|
|
|
|
|
ParameterSpec(name="max_tokens"),
|
|
|
|
|
ParameterSpec(name="api_key")
|
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
for spec in valid_specs:
|
|
|
|
|
# Should not raise any exceptions
|
|
|
|
|
processor.register_specification(spec)
|
|
|
|
|
|
|
|
|
|
assert len([s for s in processor.specifications if isinstance(s, ParameterSpec)]) >= 4
|
|
|
|
|
|
|
|
|
|
def test_parameter_spec_creation_validation(self):
|
|
|
|
|
"""Test parameter spec creation with various inputs"""
|
|
|
|
|
# Test valid parameter spec creation
|
|
|
|
|
valid_specs = [
|
|
|
|
|
ParameterSpec(name="model"),
|
|
|
|
|
ParameterSpec(name="temperature"),
|
|
|
|
|
ParameterSpec(name="max_output"),
|
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
for spec in valid_specs:
|
|
|
|
|
assert spec.name is not None
|
|
|
|
|
assert isinstance(spec.name, str)
|
|
|
|
|
|
|
|
|
|
# Test edge cases (if parameter specs have validation)
|
|
|
|
|
# This depends on the actual ParameterSpec implementation
|
|
|
|
|
try:
|
|
|
|
|
empty_name_spec = ParameterSpec(name="")
|
|
|
|
|
# May or may not be valid depending on implementation
|
|
|
|
|
except Exception:
|
|
|
|
|
# If validation exists, it should catch invalid names
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
|
pytest.main([__file__])
|