Per-workspace queue routing for workspace-scoped services (#862)

Workspace identity is now determined by queue infrastructure instead of
message body fields, closing a privilege-escalation vector where a caller
could spoof workspace in the request payload.

- Add WorkspaceProcessor base class: discovers workspaces from config at
  startup, creates per-workspace consumers (queue:workspace), and manages
  consumer lifecycle on workspace create/delete events
- Roll out to librarian, flow-svc, knowledge cores, and config-svc
- Config service gets a dual-queue regime: a system queue for
  cross-workspace ops (getvalues-all-ws, bootstrapper writes to
  __workspaces__) and per-workspace queues for tenant-scoped ops, with
  workspace discovery from its own Cassandra store
- Remove workspace field from request schemas (FlowRequest,
  LibrarianRequest, KnowledgeRequest, CollectionManagementRequest) and
  from DocumentMetadata / ProcessingMetadata — table stores now accept
  workspace as an explicit parameter
- Strip workspace encode/decode from all message translators and gateway
  serializers
- Gateway enforces workspace existence: reject requests targeting
  non-existent workspaces instead of routing to queues with no consumer
- Config service provisions new workspaces from __template__ on creation
- Add workspace lifecycle hooks to AsyncProcessor so any processor can
  react to workspace create/delete without subclassing WorkspaceProcessor
This commit is contained in:
cybermaggedon 2026-05-04 10:30:03 +01:00 committed by GitHub
parent 9be257ceee
commit 9f2bfbce0c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
53 changed files with 1565 additions and 677 deletions

View file

@ -233,7 +233,7 @@ class TestFlowProcessorSimple(IsolatedAsyncioTestCase):
mock_flow2.start.assert_called_once()
@with_async_processor_patches
@patch('trustgraph.base.async_processor.AsyncProcessor.start')
@patch('trustgraph.base.workspace_processor.WorkspaceProcessor.start')
async def test_start_calls_parent(self, mock_parent_start, *mocks):
"""Test that start() calls parent start method"""
mock_parent_start.return_value = None

View file

@ -45,7 +45,6 @@ def mock_flow_config():
def mock_request():
"""Mock knowledge load request."""
request = Mock()
request.workspace = "test-user"
request.id = "test-doc-id"
request.collection = "test-collection"
request.flow = "test-flow"
@ -131,17 +130,17 @@ class TestKnowledgeManagerLoadCore:
# Start the core loader background task
knowledge_manager.background_task = None
await knowledge_manager.load_kg_core(mock_request, mock_respond)
await knowledge_manager.load_kg_core(mock_request, mock_respond, "test-user")
# Wait for background processing
import asyncio
await asyncio.sleep(0.1)
# Verify publishers were created and started
assert mock_publisher_class.call_count == 2
mock_triples_pub.start.assert_called_once()
mock_ge_pub.start.assert_called_once()
# Verify triples were sent with correct collection
mock_triples_pub.send.assert_called_once()
sent_triples = mock_triples_pub.send.call_args[0][1]
@ -174,12 +173,12 @@ class TestKnowledgeManagerLoadCore:
# Start the core loader background task
knowledge_manager.background_task = None
await knowledge_manager.load_kg_core(mock_request, mock_respond)
await knowledge_manager.load_kg_core(mock_request, mock_respond, "test-user")
# Wait for background processing
import asyncio
await asyncio.sleep(0.1)
# Verify graph embeddings were sent with correct collection
mock_ge_pub.send.assert_called_once()
sent_ge = mock_ge_pub.send.call_args[0][1]
@ -191,7 +190,6 @@ class TestKnowledgeManagerLoadCore:
"""Test that load_kg_core falls back to 'default' when request.collection is None."""
# Create request with None collection
mock_request = Mock()
mock_request.workspace = "test-user"
mock_request.id = "test-doc-id"
mock_request.collection = None # Should fall back to "default"
mock_request.flow = "test-flow"
@ -213,12 +211,12 @@ class TestKnowledgeManagerLoadCore:
# Start the core loader background task
knowledge_manager.background_task = None
await knowledge_manager.load_kg_core(mock_request, mock_respond)
await knowledge_manager.load_kg_core(mock_request, mock_respond, "test-user")
# Wait for background processing
import asyncio
await asyncio.sleep(0.1)
# Verify triples were sent with default collection
mock_triples_pub.send.assert_called_once()
sent_triples = mock_triples_pub.send.call_args[0][1]
@ -246,13 +244,13 @@ class TestKnowledgeManagerLoadCore:
mock_publisher_class.side_effect = [mock_triples_pub, mock_ge_pub]
# Start the core loader background task
knowledge_manager.background_task = None
await knowledge_manager.load_kg_core(mock_request, mock_respond)
knowledge_manager.background_task = None
await knowledge_manager.load_kg_core(mock_request, mock_respond, "test-user")
# Wait for background processing
import asyncio
await asyncio.sleep(0.1)
# Verify both publishers were used with correct collection
mock_triples_pub.send.assert_called_once()
sent_triples = mock_triples_pub.send.call_args[0][1]
@ -267,7 +265,6 @@ class TestKnowledgeManagerLoadCore:
"""Test that load_kg_core validates flow configuration before processing."""
# Request with invalid flow
mock_request = Mock()
mock_request.workspace = "test-user"
mock_request.id = "test-doc-id"
mock_request.collection = "test-collection"
mock_request.flow = "invalid-flow" # Not in mock_flow_config.flows
@ -276,12 +273,12 @@ class TestKnowledgeManagerLoadCore:
# Start the core loader background task
knowledge_manager.background_task = None
await knowledge_manager.load_kg_core(mock_request, mock_respond)
await knowledge_manager.load_kg_core(mock_request, mock_respond, "test-user")
# Wait for background processing
import asyncio
await asyncio.sleep(0.1)
# Should have responded with error
mock_respond.assert_called()
response = mock_respond.call_args[0][0]
@ -295,18 +292,17 @@ class TestKnowledgeManagerLoadCore:
# Test missing ID
mock_request = Mock()
mock_request.workspace = "test-user"
mock_request.id = None # Missing
mock_request.collection = "test-collection"
mock_request.flow = "test-flow"
knowledge_manager.background_task = None
await knowledge_manager.load_kg_core(mock_request, mock_respond)
await knowledge_manager.load_kg_core(mock_request, mock_respond, "test-user")
# Wait for background processing
import asyncio
await asyncio.sleep(0.1)
# Should respond with error
mock_respond.assert_called()
response = mock_respond.call_args[0][0]
@ -321,18 +317,17 @@ class TestKnowledgeManagerOtherMethods:
async def test_get_kg_core_preserves_collection_from_store(self, knowledge_manager, sample_triples):
"""Test that get_kg_core preserves collection field from stored data."""
mock_request = Mock()
mock_request.workspace = "test-user"
mock_request.id = "test-doc-id"
mock_respond = AsyncMock()
async def mock_get_triples(user, doc_id, receiver):
await receiver(sample_triples)
knowledge_manager.table_store.get_triples = mock_get_triples
knowledge_manager.table_store.get_graph_embeddings = AsyncMock()
await knowledge_manager.get_kg_core(mock_request, mock_respond)
await knowledge_manager.get_kg_core(mock_request, mock_respond, "test-user")
# Should have called respond for triples and final EOS
assert mock_respond.call_count >= 2
@ -352,14 +347,13 @@ class TestKnowledgeManagerOtherMethods:
async def test_list_kg_cores(self, knowledge_manager):
"""Test listing knowledge cores."""
mock_request = Mock()
mock_request.workspace = "test-user"
mock_respond = AsyncMock()
# Mock return value
knowledge_manager.table_store.list_kg_cores.return_value = ["doc1", "doc2", "doc3"]
await knowledge_manager.list_kg_cores(mock_request, mock_respond)
await knowledge_manager.list_kg_cores(mock_request, mock_respond, "test-user")
# Verify table store was called correctly
knowledge_manager.table_store.list_kg_cores.assert_called_once_with("test-user")
@ -374,12 +368,11 @@ class TestKnowledgeManagerOtherMethods:
async def test_delete_kg_core(self, knowledge_manager):
"""Test deleting knowledge cores."""
mock_request = Mock()
mock_request.workspace = "test-user"
mock_request.id = "test-doc-id"
mock_respond = AsyncMock()
await knowledge_manager.delete_kg_core(mock_request, mock_respond)
await knowledge_manager.delete_kg_core(mock_request, mock_respond, "test-user")
# Verify table store was called correctly
knowledge_manager.table_store.delete_kg_core.assert_called_once_with("test-user", "test-doc-id")

View file

@ -34,7 +34,7 @@ class _Identity:
self.source = "api-key"
def _allow_auth(identity=None):
def _allow_auth(identity=None, workspaces=None):
"""Build an Auth double that authenticates to ``identity`` and
allows every authorise() call."""
auth = MagicMock()
@ -42,16 +42,18 @@ def _allow_auth(identity=None):
return_value=identity or _Identity(),
)
auth.authorise = AsyncMock(return_value=None)
auth.known_workspaces = workspaces or {"default", "acme"}
return auth
def _deny_auth(identity=None):
def _deny_auth(identity=None, workspaces=None):
"""Build an Auth double that authenticates but denies authorise."""
auth = MagicMock()
auth.authenticate = AsyncMock(
return_value=identity or _Identity(),
)
auth.authorise = AsyncMock(side_effect=access_denied())
auth.known_workspaces = workspaces or {"default", "acme"}
return auth

View file

@ -176,7 +176,7 @@ class TestDispatcherManager:
params = {"kind": "test_kind"}
result = await manager.process_global_service("data", "responder", params)
manager.invoke_global_service.assert_called_once_with("data", "responder", "test_kind")
manager.invoke_global_service.assert_called_once_with("data", "responder", "test_kind", workspace=None)
assert result == "global_result"
@pytest.mark.asyncio
@ -185,24 +185,24 @@ class TestDispatcherManager:
mock_backend = Mock()
mock_config_receiver = Mock()
manager = DispatcherManager(mock_backend, mock_config_receiver, auth=Mock())
# Pre-populate with existing dispatcher
mock_dispatcher = Mock()
mock_dispatcher.process = AsyncMock(return_value="cached_result")
manager.dispatchers[(None, "config")] = mock_dispatcher
result = await manager.invoke_global_service("data", "responder", "config")
manager.dispatchers[(None, "iam")] = mock_dispatcher
result = await manager.invoke_global_service("data", "responder", "iam")
mock_dispatcher.process.assert_called_once_with("data", "responder")
assert result == "cached_result"
@pytest.mark.asyncio
async def test_invoke_global_service_creates_new_dispatcher(self):
"""Test invoke_global_service creates new dispatcher"""
"""Test invoke_global_service creates new dispatcher for system service"""
mock_backend = Mock()
mock_config_receiver = Mock()
manager = DispatcherManager(mock_backend, mock_config_receiver, auth=Mock())
with patch('trustgraph.gateway.dispatch.manager.global_dispatchers') as mock_dispatchers:
mock_dispatcher_class = Mock()
mock_dispatcher = Mock()
@ -210,25 +210,51 @@ class TestDispatcherManager:
mock_dispatcher.process = AsyncMock(return_value="new_result")
mock_dispatcher_class.return_value = mock_dispatcher
mock_dispatchers.__getitem__.return_value = mock_dispatcher_class
result = await manager.invoke_global_service("data", "responder", "config")
# Verify dispatcher was created with correct parameters
result = await manager.invoke_global_service("data", "responder", "iam")
mock_dispatcher_class.assert_called_once_with(
backend=mock_backend,
timeout=120,
consumer="api-gateway-config-request",
subscriber="api-gateway-config-request",
consumer="api-gateway-iam-request",
subscriber="api-gateway-iam-request",
request_queue=None,
response_queue=None
)
mock_dispatcher.start.assert_called_once()
mock_dispatcher.process.assert_called_once_with("data", "responder")
# Verify dispatcher was cached
assert manager.dispatchers[(None, "config")] == mock_dispatcher
assert manager.dispatchers[(None, "iam")] == mock_dispatcher
assert result == "new_result"
@pytest.mark.asyncio
async def test_invoke_global_service_workspace_required_for_workspace_dispatchers(self):
"""Workspace dispatchers (config, flow, etc.) require a workspace"""
mock_backend = Mock()
mock_config_receiver = Mock()
manager = DispatcherManager(mock_backend, mock_config_receiver, auth=Mock())
with pytest.raises(RuntimeError, match="Workspace is required for config"):
await manager.invoke_global_service("data", "responder", "config")
@pytest.mark.asyncio
async def test_invoke_global_service_workspace_dispatcher_with_workspace(self):
"""Workspace dispatchers work when workspace is provided"""
mock_backend = Mock()
mock_config_receiver = Mock()
manager = DispatcherManager(mock_backend, mock_config_receiver, auth=Mock())
mock_dispatcher = Mock()
mock_dispatcher.process = AsyncMock(return_value="ws_result")
manager.dispatchers[("alice", "config")] = mock_dispatcher
result = await manager.invoke_global_service(
"data", "responder", "config", workspace="alice",
)
mock_dispatcher.process.assert_called_once_with("data", "responder")
assert result == "ws_result"
def test_dispatch_flow_import_returns_method(self):
"""Test dispatch_flow_import returns correct method"""
mock_backend = Mock()
@ -610,7 +636,7 @@ class TestDispatcherManager:
mock_dispatchers.__getitem__.return_value = mock_dispatcher_class
results = await asyncio.gather(*[
manager.invoke_global_service("data", "responder", "config")
manager.invoke_global_service("data", "responder", "iam")
for _ in range(5)
])
@ -618,7 +644,7 @@ class TestDispatcherManager:
"Dispatcher class instantiated more than once — duplicate consumer bug"
)
assert mock_dispatcher.start.call_count == 1
assert manager.dispatchers[(None, "config")] is mock_dispatcher
assert manager.dispatchers[(None, "iam")] is mock_dispatcher
assert all(r == "result" for r in results)
@pytest.mark.asyncio

View file

@ -33,12 +33,11 @@ def _make_librarian(min_chunk_size=1):
def _make_doc_metadata(
doc_id="doc-1", kind="application/pdf", workspace="alice", title="Test Doc"
doc_id="doc-1", kind="application/pdf", title="Test Doc"
):
meta = MagicMock()
meta.id = doc_id
meta.kind = kind
meta.workspace = workspace
meta.title = title
meta.time = 1700000000
meta.comments = ""
@ -47,21 +46,20 @@ def _make_doc_metadata(
def _make_begin_request(
doc_id="doc-1", kind="application/pdf", workspace="alice",
doc_id="doc-1", kind="application/pdf",
total_size=10_000_000, chunk_size=0
):
req = MagicMock()
req.document_metadata = _make_doc_metadata(doc_id=doc_id, kind=kind, workspace=workspace)
req.document_metadata = _make_doc_metadata(doc_id=doc_id, kind=kind)
req.total_size = total_size
req.chunk_size = chunk_size
return req
def _make_upload_chunk_request(upload_id="up-1", chunk_index=0, workspace="alice", content=b"data"):
def _make_upload_chunk_request(upload_id="up-1", chunk_index=0, content=b"data"):
req = MagicMock()
req.upload_id = upload_id
req.chunk_index = chunk_index
req.workspace = workspace
req.content = base64.b64encode(content)
return req
@ -76,7 +74,7 @@ def _make_session(
if document_metadata is None:
document_metadata = json.dumps({
"id": document_id, "kind": "application/pdf",
"workspace": workspace, "title": "Test", "time": 1700000000,
"title": "Test", "time": 1700000000,
"comments": "", "tags": [],
})
return {
@ -105,7 +103,7 @@ class TestBeginUpload:
lib.blob_store.create_multipart_upload.return_value = "s3-upload-id"
req = _make_begin_request(total_size=10_000_000)
resp = await lib.begin_upload(req)
resp = await lib.begin_upload(req, "alice")
assert resp.error is None
assert resp.upload_id is not None
@ -119,7 +117,7 @@ class TestBeginUpload:
lib.blob_store.create_multipart_upload.return_value = "s3-id"
req = _make_begin_request(total_size=10_000, chunk_size=3000)
resp = await lib.begin_upload(req)
resp = await lib.begin_upload(req, "alice")
assert resp.chunk_size == 3000
assert resp.total_chunks == math.ceil(10_000 / 3000)
@ -130,7 +128,7 @@ class TestBeginUpload:
req = _make_begin_request(kind="")
with pytest.raises(RequestError, match="MIME type.*required"):
await lib.begin_upload(req)
await lib.begin_upload(req, "alice")
@pytest.mark.asyncio
async def test_rejects_duplicate_document(self):
@ -139,7 +137,7 @@ class TestBeginUpload:
req = _make_begin_request()
with pytest.raises(RequestError, match="already exists"):
await lib.begin_upload(req)
await lib.begin_upload(req, "alice")
@pytest.mark.asyncio
async def test_rejects_zero_size(self):
@ -148,7 +146,7 @@ class TestBeginUpload:
req = _make_begin_request(total_size=0)
with pytest.raises(RequestError, match="positive"):
await lib.begin_upload(req)
await lib.begin_upload(req, "alice")
@pytest.mark.asyncio
async def test_rejects_chunk_below_minimum(self):
@ -157,7 +155,7 @@ class TestBeginUpload:
req = _make_begin_request(total_size=10_000, chunk_size=512)
with pytest.raises(RequestError, match="below minimum"):
await lib.begin_upload(req)
await lib.begin_upload(req, "alice")
@pytest.mark.asyncio
async def test_calls_s3_create_multipart(self):
@ -166,7 +164,7 @@ class TestBeginUpload:
lib.blob_store.create_multipart_upload.return_value = "s3-id"
req = _make_begin_request(kind="application/pdf")
await lib.begin_upload(req)
await lib.begin_upload(req, "alice")
lib.blob_store.create_multipart_upload.assert_called_once()
# create_multipart_upload(object_id, kind) — positional args
@ -180,7 +178,7 @@ class TestBeginUpload:
lib.blob_store.create_multipart_upload.return_value = "s3-id"
req = _make_begin_request(total_size=5_000_000)
resp = await lib.begin_upload(req)
resp = await lib.begin_upload(req, "alice")
lib.table_store.create_upload_session.assert_called_once()
kwargs = lib.table_store.create_upload_session.call_args[1]
@ -195,7 +193,7 @@ class TestBeginUpload:
lib.blob_store.create_multipart_upload.return_value = "s3-id"
req = _make_begin_request(kind="text/plain", total_size=1000)
resp = await lib.begin_upload(req)
resp = await lib.begin_upload(req, "alice")
assert resp.error is None
@ -213,7 +211,7 @@ class TestUploadChunk:
lib.blob_store.upload_part.return_value = "etag-1"
req = _make_upload_chunk_request(chunk_index=0, content=b"chunk data")
resp = await lib.upload_chunk(req)
resp = await lib.upload_chunk(req, "alice")
assert resp.error is None
assert resp.chunk_index == 0
@ -229,7 +227,7 @@ class TestUploadChunk:
lib.blob_store.upload_part.return_value = "etag"
req = _make_upload_chunk_request(chunk_index=0)
await lib.upload_chunk(req)
await lib.upload_chunk(req, "alice")
kwargs = lib.blob_store.upload_part.call_args[1]
assert kwargs["part_number"] == 1 # 0-indexed chunk → 1-indexed part
@ -242,7 +240,7 @@ class TestUploadChunk:
lib.blob_store.upload_part.return_value = "etag"
req = _make_upload_chunk_request(chunk_index=3)
await lib.upload_chunk(req)
await lib.upload_chunk(req, "alice")
kwargs = lib.blob_store.upload_part.call_args[1]
assert kwargs["part_number"] == 4
@ -254,7 +252,7 @@ class TestUploadChunk:
req = _make_upload_chunk_request()
with pytest.raises(RequestError, match="not found"):
await lib.upload_chunk(req)
await lib.upload_chunk(req, "alice")
@pytest.mark.asyncio
async def test_rejects_wrong_user(self):
@ -262,9 +260,9 @@ class TestUploadChunk:
session = _make_session(workspace="alice")
lib.table_store.get_upload_session.return_value = session
req = _make_upload_chunk_request(workspace="bob")
req = _make_upload_chunk_request()
with pytest.raises(RequestError, match="Not authorized"):
await lib.upload_chunk(req)
await lib.upload_chunk(req, "bob")
@pytest.mark.asyncio
async def test_rejects_negative_chunk_index(self):
@ -274,7 +272,7 @@ class TestUploadChunk:
req = _make_upload_chunk_request(chunk_index=-1)
with pytest.raises(RequestError, match="Invalid chunk index"):
await lib.upload_chunk(req)
await lib.upload_chunk(req, "alice")
@pytest.mark.asyncio
async def test_rejects_out_of_range_chunk_index(self):
@ -284,7 +282,7 @@ class TestUploadChunk:
req = _make_upload_chunk_request(chunk_index=5)
with pytest.raises(RequestError, match="Invalid chunk index"):
await lib.upload_chunk(req)
await lib.upload_chunk(req, "alice")
@pytest.mark.asyncio
async def test_progress_tracking(self):
@ -297,7 +295,7 @@ class TestUploadChunk:
lib.blob_store.upload_part.return_value = "e3"
req = _make_upload_chunk_request(chunk_index=2)
resp = await lib.upload_chunk(req)
resp = await lib.upload_chunk(req, "alice")
# Dict gets chunk 2 added (len=3), then +1 => 4
assert resp.chunks_received == 4
@ -316,7 +314,7 @@ class TestUploadChunk:
lib.blob_store.upload_part.return_value = "e2"
req = _make_upload_chunk_request(chunk_index=1)
resp = await lib.upload_chunk(req)
resp = await lib.upload_chunk(req, "alice")
# 3 chunks × 3000 = 9000 > 5000, so capped
assert resp.bytes_received <= 5000
@ -330,7 +328,7 @@ class TestUploadChunk:
raw = b"hello world binary data"
req = _make_upload_chunk_request(content=raw)
await lib.upload_chunk(req)
await lib.upload_chunk(req, "alice")
kwargs = lib.blob_store.upload_part.call_args[1]
assert kwargs["data"] == raw
@ -353,9 +351,8 @@ class TestCompleteUpload:
req = MagicMock()
req.upload_id = "up-1"
req.workspace = "alice"
resp = await lib.complete_upload(req)
resp = await lib.complete_upload(req, "alice")
assert resp.error is None
assert resp.document_id == "doc-1"
@ -375,9 +372,8 @@ class TestCompleteUpload:
req = MagicMock()
req.upload_id = "up-1"
req.workspace = "alice"
await lib.complete_upload(req)
await lib.complete_upload(req, "alice")
parts = lib.blob_store.complete_multipart_upload.call_args[1]["parts"]
part_numbers = [p[0] for p in parts]
@ -394,10 +390,9 @@ class TestCompleteUpload:
req = MagicMock()
req.upload_id = "up-1"
req.workspace = "alice"
with pytest.raises(RequestError, match="Missing chunks"):
await lib.complete_upload(req)
await lib.complete_upload(req, "alice")
@pytest.mark.asyncio
async def test_rejects_expired_session(self):
@ -406,10 +401,9 @@ class TestCompleteUpload:
req = MagicMock()
req.upload_id = "up-gone"
req.workspace = "alice"
with pytest.raises(RequestError, match="not found"):
await lib.complete_upload(req)
await lib.complete_upload(req, "alice")
@pytest.mark.asyncio
async def test_rejects_wrong_user(self):
@ -419,10 +413,9 @@ class TestCompleteUpload:
req = MagicMock()
req.upload_id = "up-1"
req.workspace = "bob"
with pytest.raises(RequestError, match="Not authorized"):
await lib.complete_upload(req)
await lib.complete_upload(req, "bob")
# ---------------------------------------------------------------------------
@ -439,9 +432,8 @@ class TestAbortUpload:
req = MagicMock()
req.upload_id = "up-1"
req.workspace = "alice"
resp = await lib.abort_upload(req)
resp = await lib.abort_upload(req, "alice")
assert resp.error is None
lib.blob_store.abort_multipart_upload.assert_called_once_with(
@ -456,10 +448,9 @@ class TestAbortUpload:
req = MagicMock()
req.upload_id = "up-gone"
req.workspace = "alice"
with pytest.raises(RequestError, match="not found"):
await lib.abort_upload(req)
await lib.abort_upload(req, "alice")
@pytest.mark.asyncio
async def test_rejects_wrong_user(self):
@ -469,10 +460,9 @@ class TestAbortUpload:
req = MagicMock()
req.upload_id = "up-1"
req.workspace = "bob"
with pytest.raises(RequestError, match="Not authorized"):
await lib.abort_upload(req)
await lib.abort_upload(req, "bob")
# ---------------------------------------------------------------------------
@ -492,9 +482,8 @@ class TestGetUploadStatus:
req = MagicMock()
req.upload_id = "up-1"
req.workspace = "alice"
resp = await lib.get_upload_status(req)
resp = await lib.get_upload_status(req, "alice")
assert resp.upload_state == "in-progress"
assert resp.chunks_received == 3
@ -510,9 +499,8 @@ class TestGetUploadStatus:
req = MagicMock()
req.upload_id = "up-expired"
req.workspace = "alice"
resp = await lib.get_upload_status(req)
resp = await lib.get_upload_status(req, "alice")
assert resp.upload_state == "expired"
@ -527,9 +515,8 @@ class TestGetUploadStatus:
req = MagicMock()
req.upload_id = "up-1"
req.workspace = "alice"
resp = await lib.get_upload_status(req)
resp = await lib.get_upload_status(req, "alice")
assert resp.missing_chunks == []
assert resp.chunks_received == 3
@ -544,10 +531,9 @@ class TestGetUploadStatus:
req = MagicMock()
req.upload_id = "up-1"
req.workspace = "bob"
with pytest.raises(RequestError, match="Not authorized"):
await lib.get_upload_status(req)
await lib.get_upload_status(req, "bob")
# ---------------------------------------------------------------------------
@ -564,12 +550,11 @@ class TestStreamDocument:
lib.blob_store.get_range = AsyncMock(return_value=b"x" * 2000)
req = MagicMock()
req.workspace = "alice"
req.document_id = "doc-1"
req.chunk_size = 2000
chunks = []
async for resp in lib.stream_document(req):
async for resp in lib.stream_document(req, "alice"):
chunks.append(resp)
assert len(chunks) == 3 # ceil(5000/2000)
@ -587,12 +572,11 @@ class TestStreamDocument:
lib.blob_store.get_range = AsyncMock(return_value=b"x" * 500)
req = MagicMock()
req.workspace = "alice"
req.document_id = "doc-1"
req.chunk_size = 2000
chunks = []
async for resp in lib.stream_document(req):
async for resp in lib.stream_document(req, "alice"):
chunks.append(resp)
assert len(chunks) == 1
@ -608,12 +592,11 @@ class TestStreamDocument:
lib.blob_store.get_range = AsyncMock(return_value=b"x" * 100)
req = MagicMock()
req.workspace = "alice"
req.document_id = "doc-1"
req.chunk_size = 2000
chunks = []
async for resp in lib.stream_document(req):
async for resp in lib.stream_document(req, "alice"):
chunks.append(resp)
# Verify the byte ranges passed to get_range
@ -630,12 +613,11 @@ class TestStreamDocument:
lib.blob_store.get_range = AsyncMock(return_value=b"x")
req = MagicMock()
req.workspace = "alice"
req.document_id = "doc-1"
req.chunk_size = 0 # Should use default 1MB
chunks = []
async for resp in lib.stream_document(req):
async for resp in lib.stream_document(req, "alice"):
chunks.append(resp)
assert len(chunks) == 2 # ceil(2MB / 1MB)
@ -649,12 +631,11 @@ class TestStreamDocument:
lib.blob_store.get_range = AsyncMock(return_value=raw)
req = MagicMock()
req.workspace = "alice"
req.document_id = "doc-1"
req.chunk_size = 1000
chunks = []
async for resp in lib.stream_document(req):
async for resp in lib.stream_document(req, "alice"):
chunks.append(resp)
assert chunks[0].content == base64.b64encode(raw)
@ -666,12 +647,11 @@ class TestStreamDocument:
lib.blob_store.get_size = AsyncMock(return_value=5000)
req = MagicMock()
req.workspace = "alice"
req.document_id = "doc-1"
req.chunk_size = 512
with pytest.raises(RequestError, match="below minimum"):
async for _ in lib.stream_document(req):
async for _ in lib.stream_document(req, "alice"):
pass
@ -698,9 +678,8 @@ class TestListUploads:
]
req = MagicMock()
req.workspace = "alice"
resp = await lib.list_uploads(req)
resp = await lib.list_uploads(req, "alice")
assert resp.error is None
assert len(resp.upload_sessions) == 1
@ -713,8 +692,7 @@ class TestListUploads:
lib.table_store.list_upload_sessions.return_value = []
req = MagicMock()
req.workspace = "alice"
resp = await lib.list_uploads(req)
resp = await lib.list_uploads(req, "alice")
assert resp.upload_sessions == []

View file

@ -30,7 +30,6 @@ class TestDocumentMetadataTranslator:
"title": "Test Document",
"comments": "No comments",
"metadata": [],
"workspace": "alice",
"tags": ["finance", "q4"],
"parent-id": "doc-100",
"document-type": "page",
@ -40,14 +39,12 @@ class TestDocumentMetadataTranslator:
assert obj.time == 1710000000
assert obj.kind == "application/pdf"
assert obj.title == "Test Document"
assert obj.workspace == "alice"
assert obj.tags == ["finance", "q4"]
assert obj.parent_id == "doc-100"
assert obj.document_type == "page"
wire = self.tx.encode(obj)
assert wire["id"] == "doc-123"
assert wire["workspace"] == "alice"
assert wire["parent-id"] == "doc-100"
assert wire["document-type"] == "page"
@ -80,10 +77,9 @@ class TestDocumentMetadataTranslator:
def test_falsy_fields_omitted_from_wire(self):
"""Empty string fields should be omitted from wire format."""
obj = DocumentMetadata(id="", time=0, workspace="")
obj = DocumentMetadata(id="", time=0)
wire = self.tx.encode(obj)
assert "id" not in wire
assert "workspace" not in wire
# ---------------------------------------------------------------------------
@ -101,7 +97,6 @@ class TestProcessingMetadataTranslator:
"document-id": "doc-123",
"time": 1710000000,
"flow": "default",
"workspace": "alice",
"collection": "my-collection",
"tags": ["tag1"],
}
@ -109,20 +104,17 @@ class TestProcessingMetadataTranslator:
assert obj.id == "proc-1"
assert obj.document_id == "doc-123"
assert obj.flow == "default"
assert obj.workspace == "alice"
assert obj.collection == "my-collection"
assert obj.tags == ["tag1"]
wire = self.tx.encode(obj)
assert wire["id"] == "proc-1"
assert wire["document-id"] == "doc-123"
assert wire["workspace"] == "alice"
assert wire["collection"] == "my-collection"
def test_missing_fields_use_defaults(self):
obj = self.tx.decode({})
assert obj.id is None
assert obj.workspace is None
assert obj.collection is None
def test_tags_none_omitted(self):
@ -135,10 +127,9 @@ class TestProcessingMetadataTranslator:
wire = self.tx.encode(obj)
assert wire["tags"] == []
def test_workspace_and_collection_preserved(self):
def test_collection_preserved(self):
"""Core pipeline routing fields must survive round-trip."""
data = {"workspace": "bob", "collection": "research"}
data = {"collection": "research"}
obj = self.tx.decode(data)
wire = self.tx.encode(obj)
assert wire["workspace"] == "bob"
assert wire["collection"] == "research"

View file

@ -41,7 +41,6 @@ def translator():
def graph_embeddings_request():
return KnowledgeRequest(
operation="put-kg-core",
workspace="alice",
id="doc-1",
flow="default",
collection="testcoll",
@ -110,7 +109,7 @@ class TestKnowledgeRequestTranslatorGraphEmbeddings:
assert isinstance(decoded, KnowledgeRequest)
assert decoded.operation == "put-kg-core"
assert decoded.workspace == "alice"
assert decoded.id == "doc-1"
assert decoded.id == "doc-1"
assert decoded.flow == "default"
assert decoded.collection == "testcoll"