The id field in pipeline Metadata was being overwritten at each processing (#686)

The id field in pipeline Metadata was being overwritten at each processing
stage (document → page → chunk), causing knowledge storage to create
separate cores per chunk instead of grouping by document.

Add a root field that:
- Is set by librarian to the original document ID
- Is copied unchanged through PDF decoder, chunkers, and extractors
- Is used by knowledge storage for document_id grouping (with fallback to id)

Changes:
- Add root field to Metadata schema with empty string default
- Set root=document.id in librarian when initiating document processing
- Copy root through PDF decoder, recursive chunker, and all extractors
- Update knowledge storage to use root (or id as fallback) for grouping
- Add root handling to translators and gateway serialization
- Update test mock Metadata class to include root parameter
This commit is contained in:
cybermaggedon 2026-03-11 12:16:39 +00:00 committed by GitHub
parent aa4f5c6c00
commit 286f762369
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
15 changed files with 48 additions and 4 deletions

View file

@ -178,6 +178,7 @@ class Processor(ChunkingService):
await flow("triples").send(Triples(
metadata=Metadata(
id=chunk_uri,
root=v.metadata.root,
user=v.metadata.user,
collection=v.metadata.collection,
),
@ -188,6 +189,7 @@ class Processor(ChunkingService):
r = Chunk(
metadata=Metadata(
id=chunk_uri,
root=v.metadata.root,
user=v.metadata.user,
collection=v.metadata.collection,
),

View file

@ -302,6 +302,7 @@ class Processor(FlowProcessor):
await flow("triples").send(Triples(
metadata=Metadata(
id=pg_uri,
root=v.metadata.root,
user=v.metadata.user,
collection=v.metadata.collection,
),
@ -313,6 +314,7 @@ class Processor(FlowProcessor):
r = TextDocument(
metadata=Metadata(
id=pg_uri,
root=v.metadata.root,
user=v.metadata.user,
collection=v.metadata.collection,
),

View file

@ -104,6 +104,7 @@ class Processor(FlowProcessor):
tpls = Triples(
metadata = Metadata(
id = metadata.id,
root = metadata.root,
user = metadata.user,
collection = metadata.collection,
),
@ -116,6 +117,7 @@ class Processor(FlowProcessor):
ecs = EntityContexts(
metadata = Metadata(
id = metadata.id,
root = metadata.root,
user = metadata.user,
collection = metadata.collection,
),

View file

@ -218,6 +218,7 @@ class Processor(FlowProcessor):
flow("triples"),
Metadata(
id=v.metadata.id,
root=v.metadata.root,
user=v.metadata.user,
collection=v.metadata.collection,
),
@ -231,6 +232,7 @@ class Processor(FlowProcessor):
flow("entity-contexts"),
Metadata(
id=v.metadata.id,
root=v.metadata.root,
user=v.metadata.user,
collection=v.metadata.collection,
),

View file

@ -554,6 +554,7 @@ class Processor(FlowProcessor):
t = Triples(
metadata=Metadata(
id=metadata.id,
root=metadata.root,
user=metadata.user,
collection=metadata.collection,
),
@ -566,6 +567,7 @@ class Processor(FlowProcessor):
ec = EntityContexts(
metadata=Metadata(
id=metadata.id,
root=metadata.root,
user=metadata.user,
collection=metadata.collection,
),

View file

@ -219,6 +219,7 @@ class Processor(FlowProcessor):
flow("triples"),
Metadata(
id=v.metadata.id,
root=v.metadata.root,
user=v.metadata.user,
collection=v.metadata.collection,
),

View file

@ -272,6 +272,7 @@ class Processor(FlowProcessor):
extracted = ExtractedObject(
metadata=Metadata(
id=f"{v.metadata.id}:{schema_name}",
root=v.metadata.root,
user=v.metadata.user,
collection=v.metadata.collection,
),

View file

@ -37,6 +37,7 @@ def serialize_triples(message):
return {
"metadata": {
"id": message.metadata.id,
"root": message.metadata.root,
"user": message.metadata.user,
"collection": message.metadata.collection,
},
@ -48,6 +49,7 @@ def serialize_graph_embeddings(message):
return {
"metadata": {
"id": message.metadata.id,
"root": message.metadata.root,
"user": message.metadata.user,
"collection": message.metadata.collection,
},
@ -65,6 +67,7 @@ def serialize_entity_contexts(message):
return {
"metadata": {
"id": message.metadata.id,
"root": message.metadata.root,
"user": message.metadata.user,
"collection": message.metadata.collection,
},
@ -82,6 +85,7 @@ def serialize_document_embeddings(message):
return {
"metadata": {
"id": message.metadata.id,
"root": message.metadata.root,
"user": message.metadata.user,
"collection": message.metadata.collection,
},

View file

@ -48,6 +48,7 @@ class TriplesImport:
elt = Triples(
metadata=Metadata(
id=data["metadata"]["id"],
root=data["metadata"].get("root", ""),
user=data["metadata"]["user"],
collection=data["metadata"]["collection"],
),

View file

@ -334,6 +334,7 @@ class Processor(AsyncProcessor):
triples_msg = Triples(
metadata=Metadata(
id=doc_uri,
root=document.id,
user=processing.user,
collection=processing.collection,
),
@ -380,6 +381,7 @@ class Processor(AsyncProcessor):
doc = TextDocument(
metadata = Metadata(
id = document.id,
root = document.id,
user = processing.user,
collection = processing.collection
),
@ -390,6 +392,7 @@ class Processor(AsyncProcessor):
doc = TextDocument(
metadata = Metadata(
id = document.id,
root = document.id,
user = processing.user,
collection = processing.collection
),
@ -405,6 +408,7 @@ class Processor(AsyncProcessor):
doc = Document(
metadata = Metadata(
id = document.id,
root = document.id,
user = processing.user,
collection = processing.collection
),
@ -415,6 +419,7 @@ class Processor(AsyncProcessor):
doc = Document(
metadata = Metadata(
id = document.id,
root = document.id,
user = processing.user,
collection = processing.collection
),

View file

@ -233,7 +233,7 @@ class KnowledgeTableStore:
self.insert_triples_stmt,
(
uuid.uuid4(), m.metadata.user,
m.metadata.id, when,
m.metadata.root or m.metadata.id, when,
[], triples,
)
)
@ -265,7 +265,7 @@ class KnowledgeTableStore:
self.insert_graph_embeddings_stmt,
(
uuid.uuid4(), m.metadata.user,
m.metadata.id, when,
m.metadata.root or m.metadata.id, when,
[], entities,
)
)
@ -297,7 +297,7 @@ class KnowledgeTableStore:
self.insert_document_embeddings_stmt,
(
uuid.uuid4(), m.metadata.user,
m.metadata.id, when,
m.metadata.root or m.metadata.id, when,
[], chunks,
)
)