2024-09-03 00:09:15 +01:00
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
Accepts entity/vector pairs and writes them to a Qdrant store.
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
from qdrant_client import QdrantClient
|
|
|
|
|
from qdrant_client.models import PointStruct
|
|
|
|
|
from qdrant_client.models import Distance, VectorParams
|
|
|
|
|
import uuid
|
2025-07-30 23:18:38 +01:00
|
|
|
import logging
|
2024-09-03 00:09:15 +01:00
|
|
|
|
2025-12-05 21:45:30 +00:00
|
|
|
from .... base import DocumentEmbeddingsStoreService, CollectionConfigHandler
|
2025-09-18 15:57:52 +01:00
|
|
|
from .... base import AsyncProcessor, Consumer, Producer
|
|
|
|
|
from .... base import ConsumerMetrics, ProducerMetrics
|
2024-09-03 00:09:15 +01:00
|
|
|
|
2025-07-30 23:18:38 +01:00
|
|
|
# Module logger
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
2026-02-23 15:56:29 +00:00
|
|
|
default_ident = "doc-embeddings-write"
|
2024-09-03 00:09:15 +01:00
|
|
|
|
|
|
|
|
default_store_uri = 'http://localhost:6333'
|
|
|
|
|
|
2025-12-05 21:45:30 +00:00
|
|
|
class Processor(CollectionConfigHandler, DocumentEmbeddingsStoreService):
|
2024-09-03 00:09:15 +01:00
|
|
|
|
|
|
|
|
def __init__(self, **params):
|
|
|
|
|
|
|
|
|
|
store_uri = params.get("store_uri", default_store_uri)
|
2025-02-08 11:45:52 +00:00
|
|
|
api_key = params.get("api_key", None)
|
2024-09-03 00:09:15 +01:00
|
|
|
|
|
|
|
|
super(Processor, self).__init__(
|
|
|
|
|
**params | {
|
|
|
|
|
"store_uri": store_uri,
|
2025-02-08 11:45:52 +00:00
|
|
|
"api_key": api_key,
|
2024-09-03 00:09:15 +01:00
|
|
|
}
|
|
|
|
|
)
|
|
|
|
|
|
2025-04-22 20:21:38 +01:00
|
|
|
self.qdrant = QdrantClient(url=store_uri, api_key=api_key)
|
2024-09-03 00:09:15 +01:00
|
|
|
|
2025-12-05 21:45:30 +00:00
|
|
|
# Register for config push notifications
|
Config push notify pattern: replace stateful pub/sub with signal+ fetch (#760)
Replace the config push mechanism that broadcast the full config
blob on a 'state' class pub/sub queue with a lightweight notify
signal containing only the version number and affected config
types. Processors fetch the full config via request/response from
the config service when notified.
This eliminates the need for the pub/sub 'state' queue class and
stateful pub/sub services entirely. The config push queue moves
from 'state' to 'flow' class — a simple transient signal rather
than a retained message. This solves the RabbitMQ
late-subscriber problem where restarting processes never received
the current config because their fresh queue had no historical
messages.
Key changes:
- ConfigPush schema: config dict replaced with types list
- Subscribe-then-fetch startup with retry: processors subscribe
to notify queue, fetch config via request/response, then
process buffered notifies with version comparison to avoid race
conditions
- register_config_handler() accepts optional types parameter so
handlers only fire when their config types change
- Short-lived config request/response clients to avoid subscriber
contention on non-persistent response topics
- Config service passes affected types through put/delete/flow
operations
- Gateway ConfigReceiver rewritten with same notify pattern and
retry loop
Tests updated
New tests:
- register_config_handler: without types, with types, multiple
types, multiple handlers
- on_config_notify: old/same version skipped, irrelevant types
skipped (version still updated), relevant type triggers fetch,
handler without types always called, mixed handler filtering,
empty types invokes all, fetch failure handled gracefully
- fetch_config: returns config+version, raises on error response,
stops client even on exception
- fetch_and_apply_config: applies to all handlers on startup,
retries on failure
2026-04-06 16:57:27 +01:00
|
|
|
self.register_config_handler(self.on_collection_config, types=["collection"])
|
2025-09-30 16:02:33 +01:00
|
|
|
|
2025-04-22 20:21:38 +01:00
|
|
|
async def store_document_embeddings(self, message):
|
2024-09-03 00:09:15 +01:00
|
|
|
|
2026-01-05 13:45:14 +00:00
|
|
|
# Validate collection exists in config before processing
|
|
|
|
|
if not self.collection_exists(message.metadata.user, message.metadata.collection):
|
|
|
|
|
logger.warning(
|
|
|
|
|
f"Collection {message.metadata.collection} for user {message.metadata.user} "
|
|
|
|
|
f"does not exist in config (likely deleted while data was in-flight). "
|
|
|
|
|
f"Dropping message."
|
|
|
|
|
)
|
|
|
|
|
return
|
|
|
|
|
|
2025-04-22 20:21:38 +01:00
|
|
|
for emb in message.chunks:
|
2025-01-04 21:51:28 +00:00
|
|
|
|
2026-03-07 23:10:45 +00:00
|
|
|
chunk_id = emb.chunk_id
|
|
|
|
|
if chunk_id == "":
|
|
|
|
|
continue
|
2025-01-04 21:51:28 +00:00
|
|
|
|
2026-03-09 10:53:44 +00:00
|
|
|
vec = emb.vector
|
|
|
|
|
if not vec:
|
|
|
|
|
continue
|
2025-11-10 16:56:51 +00:00
|
|
|
|
2026-03-09 10:53:44 +00:00
|
|
|
# Create collection name with dimension suffix for lazy creation
|
|
|
|
|
dim = len(vec)
|
|
|
|
|
collection = (
|
|
|
|
|
f"d_{message.metadata.user}_{message.metadata.collection}_{dim}"
|
|
|
|
|
)
|
2025-11-10 16:56:51 +00:00
|
|
|
|
2026-03-09 10:53:44 +00:00
|
|
|
# Lazily create collection if it doesn't exist (but only if authorized in config)
|
|
|
|
|
if not self.qdrant.collection_exists(collection):
|
|
|
|
|
logger.info(f"Lazily creating Qdrant collection {collection} with dimension {dim}")
|
|
|
|
|
self.qdrant.create_collection(
|
2025-01-04 21:51:28 +00:00
|
|
|
collection_name=collection,
|
2026-03-09 10:53:44 +00:00
|
|
|
vectors_config=VectorParams(
|
|
|
|
|
size=dim,
|
|
|
|
|
distance=Distance.COSINE
|
|
|
|
|
)
|
2025-01-04 21:51:28 +00:00
|
|
|
)
|
2024-09-03 00:09:15 +01:00
|
|
|
|
2026-03-09 10:53:44 +00:00
|
|
|
self.qdrant.upsert(
|
|
|
|
|
collection_name=collection,
|
|
|
|
|
points=[
|
|
|
|
|
PointStruct(
|
|
|
|
|
id=str(uuid.uuid4()),
|
|
|
|
|
vector=vec,
|
|
|
|
|
payload={
|
|
|
|
|
"chunk_id": chunk_id,
|
|
|
|
|
}
|
|
|
|
|
)
|
|
|
|
|
]
|
|
|
|
|
)
|
|
|
|
|
|
2024-09-03 00:09:15 +01:00
|
|
|
@staticmethod
|
|
|
|
|
def add_args(parser):
|
|
|
|
|
|
2025-04-22 20:21:38 +01:00
|
|
|
DocumentEmbeddingsStoreService.add_args(parser)
|
2024-09-03 00:09:15 +01:00
|
|
|
|
|
|
|
|
parser.add_argument(
|
|
|
|
|
'-t', '--store-uri',
|
|
|
|
|
default=default_store_uri,
|
2025-02-08 11:45:52 +00:00
|
|
|
help=f'Qdrant URI (default: {default_store_uri})'
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
parser.add_argument(
|
|
|
|
|
'-k', '--api-key',
|
|
|
|
|
default=None,
|
|
|
|
|
help=f'Qdrant API key (default: None)'
|
2024-09-03 00:09:15 +01:00
|
|
|
)
|
|
|
|
|
|
2025-12-05 21:45:30 +00:00
|
|
|
async def create_collection(self, user: str, collection: str, metadata: dict):
|
2025-11-10 16:56:51 +00:00
|
|
|
"""
|
2025-12-05 21:45:30 +00:00
|
|
|
Create collection via config push - collections are created lazily on first write
|
2025-11-10 16:56:51 +00:00
|
|
|
with the correct dimension determined from the actual embeddings.
|
|
|
|
|
"""
|
2025-09-30 16:02:33 +01:00
|
|
|
try:
|
2025-12-05 21:45:30 +00:00
|
|
|
logger.info(f"Collection create request for {user}/{collection} - will be created lazily on first write")
|
2025-09-30 16:02:33 +01:00
|
|
|
|
|
|
|
|
except Exception as e:
|
2025-12-05 21:45:30 +00:00
|
|
|
logger.error(f"Failed to create collection {user}/{collection}: {e}", exc_info=True)
|
|
|
|
|
raise
|
2025-09-30 16:02:33 +01:00
|
|
|
|
2025-12-05 21:45:30 +00:00
|
|
|
async def delete_collection(self, user: str, collection: str):
|
|
|
|
|
"""Delete the collection for document embeddings via config push"""
|
2025-09-18 15:57:52 +01:00
|
|
|
try:
|
2025-12-05 21:45:30 +00:00
|
|
|
prefix = f"d_{user}_{collection}_"
|
2025-11-10 16:56:51 +00:00
|
|
|
|
|
|
|
|
# Get all collections and filter for matches
|
|
|
|
|
all_collections = self.qdrant.get_collections().collections
|
|
|
|
|
matching_collections = [
|
|
|
|
|
coll.name for coll in all_collections
|
|
|
|
|
if coll.name.startswith(prefix)
|
|
|
|
|
]
|
2025-09-18 15:57:52 +01:00
|
|
|
|
2025-11-10 16:56:51 +00:00
|
|
|
if not matching_collections:
|
|
|
|
|
logger.info(f"No collections found matching prefix {prefix}")
|
2025-09-18 15:57:52 +01:00
|
|
|
else:
|
2025-11-10 16:56:51 +00:00
|
|
|
for collection_name in matching_collections:
|
|
|
|
|
self.qdrant.delete_collection(collection_name)
|
|
|
|
|
logger.info(f"Deleted Qdrant collection: {collection_name}")
|
2025-12-05 21:45:30 +00:00
|
|
|
logger.info(f"Deleted {len(matching_collections)} collection(s) for {user}/{collection}")
|
2025-09-18 15:57:52 +01:00
|
|
|
|
|
|
|
|
except Exception as e:
|
2025-12-05 21:45:30 +00:00
|
|
|
logger.error(f"Failed to delete collection {user}/{collection}: {e}", exc_info=True)
|
2025-09-18 15:57:52 +01:00
|
|
|
raise
|
|
|
|
|
|
2024-09-03 00:09:15 +01:00
|
|
|
def run():
|
|
|
|
|
|
2025-04-22 20:21:38 +01:00
|
|
|
Processor.launch(default_ident, __doc__)
|
2024-09-03 00:09:15 +01:00
|
|
|
|