Fix hard coded vector size (#555)

* Fixed hard-coded embeddings store size

* Vector store lazy-creates collections, different collections for
  different dimension lengths.

* Added tech spec for vector store lifecycle

* Fixed some tests for the new spec
This commit is contained in:
cybermaggedon 2025-11-10 16:56:51 +00:00 committed by GitHub
parent 05b9063fea
commit 6129bb68c1
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
22 changed files with 793 additions and 572 deletions

View file

@ -50,24 +50,26 @@ class DocVectors:
logger.debug(f"Reload at {self.next_reload}")
def collection_exists(self, user, collection):
"""Check if collection exists (dimension-independent check)"""
collection_name = make_safe_collection_name(user, collection, self.prefix)
return self.client.has_collection(collection_name)
"""
Check if any collection exists for this user/collection combination.
Since collections are dimension-specific, this checks if ANY dimension variant exists.
"""
base_name = make_safe_collection_name(user, collection, self.prefix)
prefix = f"{base_name}_"
all_collections = self.client.list_collections()
return any(coll.startswith(prefix) for coll in all_collections)
def create_collection(self, user, collection, dimension=384):
"""Create collection with default dimension"""
collection_name = make_safe_collection_name(user, collection, self.prefix)
if self.client.has_collection(collection_name):
logger.info(f"Collection {collection_name} already exists")
return
self.init_collection(dimension, user, collection)
logger.info(f"Created Milvus collection {collection_name} with dimension {dimension}")
"""
No-op for explicit collection creation.
Collections are created lazily on first insert with actual dimension.
"""
logger.info(f"Collection creation requested for {user}/{collection} - will be created lazily on first insert")
def init_collection(self, dimension, user, collection):
collection_name = make_safe_collection_name(user, collection, self.prefix)
base_name = make_safe_collection_name(user, collection, self.prefix)
collection_name = f"{base_name}_{dimension}"
pkey_field = FieldSchema(
name="id",
@ -115,6 +117,7 @@ class DocVectors:
)
self.collections[(dimension, user, collection)] = collection_name
logger.info(f"Created Milvus collection {collection_name} with dimension {dimension}")
def insert(self, embeds, doc, user, collection):
@ -139,8 +142,15 @@ class DocVectors:
dim = len(embeds)
# Check if collection exists - return empty if not
if (dim, user, collection) not in self.collections:
self.init_collection(dim, user, collection)
base_name = make_safe_collection_name(user, collection, self.prefix)
collection_name = f"{base_name}_{dim}"
if not self.client.has_collection(collection_name):
logger.info(f"Collection {collection_name} does not exist, returning empty results")
return []
# Collection exists but not in cache, add it
self.collections[(dim, user, collection)] = collection_name
coll = self.collections[(dim, user, collection)]
@ -172,19 +182,27 @@ class DocVectors:
return res
def delete_collection(self, user, collection):
"""Delete a collection for the given user and collection"""
collection_name = make_safe_collection_name(user, collection, self.prefix)
"""
Delete all dimension variants of the collection for the given user/collection.
Since collections are created with dimension suffixes, we need to find and delete all.
"""
base_name = make_safe_collection_name(user, collection, self.prefix)
prefix = f"{base_name}_"
# Check if collection exists
if self.client.has_collection(collection_name):
# Drop the collection
self.client.drop_collection(collection_name)
logger.info(f"Deleted Milvus collection: {collection_name}")
# Get all collections and filter for matches
all_collections = self.client.list_collections()
matching_collections = [coll for coll in all_collections if coll.startswith(prefix)]
# Remove from our local cache
keys_to_remove = [key for key in self.collections.keys() if key[1] == user and key[2] == collection]
for key in keys_to_remove:
del self.collections[key]
if not matching_collections:
logger.info(f"No collections found matching prefix {prefix}")
else:
logger.info(f"Collection {collection_name} does not exist, nothing to delete")
for collection_name in matching_collections:
self.client.drop_collection(collection_name)
logger.info(f"Deleted Milvus collection: {collection_name}")
logger.info(f"Deleted {len(matching_collections)} collection(s) for {user}/{collection}")
# Remove from our local cache
keys_to_remove = [key for key in self.collections.keys() if key[1] == user and key[2] == collection]
for key in keys_to_remove:
del self.collections[key]

View file

@ -50,24 +50,26 @@ class EntityVectors:
logger.debug(f"Reload at {self.next_reload}")
def collection_exists(self, user, collection):
"""Check if collection exists (dimension-independent check)"""
collection_name = make_safe_collection_name(user, collection, self.prefix)
return self.client.has_collection(collection_name)
"""
Check if any collection exists for this user/collection combination.
Since collections are dimension-specific, this checks if ANY dimension variant exists.
"""
base_name = make_safe_collection_name(user, collection, self.prefix)
prefix = f"{base_name}_"
all_collections = self.client.list_collections()
return any(coll.startswith(prefix) for coll in all_collections)
def create_collection(self, user, collection, dimension=384):
"""Create collection with default dimension"""
collection_name = make_safe_collection_name(user, collection, self.prefix)
if self.client.has_collection(collection_name):
logger.info(f"Collection {collection_name} already exists")
return
self.init_collection(dimension, user, collection)
logger.info(f"Created Milvus collection {collection_name} with dimension {dimension}")
"""
No-op for explicit collection creation.
Collections are created lazily on first insert with actual dimension.
"""
logger.info(f"Collection creation requested for {user}/{collection} - will be created lazily on first insert")
def init_collection(self, dimension, user, collection):
collection_name = make_safe_collection_name(user, collection, self.prefix)
base_name = make_safe_collection_name(user, collection, self.prefix)
collection_name = f"{base_name}_{dimension}"
pkey_field = FieldSchema(
name="id",
@ -115,6 +117,7 @@ class EntityVectors:
)
self.collections[(dimension, user, collection)] = collection_name
logger.info(f"Created Milvus collection {collection_name} with dimension {dimension}")
def insert(self, embeds, entity, user, collection):
@ -139,8 +142,15 @@ class EntityVectors:
dim = len(embeds)
# Check if collection exists - return empty if not
if (dim, user, collection) not in self.collections:
self.init_collection(dim, user, collection)
base_name = make_safe_collection_name(user, collection, self.prefix)
collection_name = f"{base_name}_{dim}"
if not self.client.has_collection(collection_name):
logger.info(f"Collection {collection_name} does not exist, returning empty results")
return []
# Collection exists but not in cache, add it
self.collections[(dim, user, collection)] = collection_name
coll = self.collections[(dim, user, collection)]
@ -172,19 +182,27 @@ class EntityVectors:
return res
def delete_collection(self, user, collection):
"""Delete a collection for the given user and collection"""
collection_name = make_safe_collection_name(user, collection, self.prefix)
"""
Delete all dimension variants of the collection for the given user/collection.
Since collections are created with dimension suffixes, we need to find and delete all.
"""
base_name = make_safe_collection_name(user, collection, self.prefix)
prefix = f"{base_name}_"
# Check if collection exists
if self.client.has_collection(collection_name):
# Drop the collection
self.client.drop_collection(collection_name)
logger.info(f"Deleted Milvus collection: {collection_name}")
# Get all collections and filter for matches
all_collections = self.client.list_collections()
matching_collections = [coll for coll in all_collections if coll.startswith(prefix)]
# Remove from our local cache
keys_to_remove = [key for key in self.collections.keys() if key[1] == user and key[2] == collection]
for key in keys_to_remove:
del self.collections[key]
if not matching_collections:
logger.info(f"No collections found matching prefix {prefix}")
else:
logger.info(f"Collection {collection_name} does not exist, nothing to delete")
for collection_name in matching_collections:
self.client.drop_collection(collection_name)
logger.info(f"Deleted Milvus collection: {collection_name}")
logger.info(f"Deleted {len(matching_collections)} collection(s) for {user}/{collection}")
# Remove from our local cache
keys_to_remove = [key for key in self.collections.keys() if key[1] == user and key[2] == collection]
for key in keys_to_remove:
del self.collections[key]

View file

@ -47,39 +47,6 @@ class Processor(DocumentEmbeddingsQueryService):
}
)
self.last_index_name = None
def ensure_index_exists(self, index_name, dim):
"""Ensure index exists, create if it doesn't"""
if index_name != self.last_index_name:
if not self.pinecone.has_index(index_name):
try:
self.pinecone.create_index(
name=index_name,
dimension=dim,
metric="cosine",
spec=ServerlessSpec(
cloud="aws",
region="us-east-1",
)
)
logger.info(f"Created index: {index_name}")
# Wait for index to be ready
import time
for i in range(0, 1000):
if self.pinecone.describe_index(index_name).status["ready"]:
break
time.sleep(1)
if not self.pinecone.describe_index(index_name).status["ready"]:
raise RuntimeError("Gave up waiting for index creation")
except Exception as e:
logger.error(f"Pinecone index creation failed: {e}")
raise e
self.last_index_name = index_name
async def query_document_embeddings(self, msg):
try:
@ -94,11 +61,13 @@ class Processor(DocumentEmbeddingsQueryService):
dim = len(vec)
index_name = (
"d-" + msg.user + "-" + msg.collection
)
# Use dimension suffix in index name
index_name = f"d-{msg.user}-{msg.collection}-{dim}"
self.ensure_index_exists(index_name, dim)
# Check if index exists - skip if not
if not self.pinecone.has_index(index_name):
logger.info(f"Index {index_name} does not exist, skipping this vector")
continue
index = self.pinecone.Index(index_name)

View file

@ -38,28 +38,6 @@ class Processor(DocumentEmbeddingsQueryService):
)
self.qdrant = QdrantClient(url=store_uri, api_key=api_key)
self.last_collection = None
def ensure_collection_exists(self, collection, dim):
"""Ensure collection exists, create if it doesn't"""
if collection != self.last_collection:
if not self.qdrant.collection_exists(collection):
try:
self.qdrant.create_collection(
collection_name=collection,
vectors_config=VectorParams(
size=dim, distance=Distance.COSINE
),
)
logger.info(f"Created collection: {collection}")
except Exception as e:
logger.error(f"Qdrant collection creation failed: {e}")
raise e
self.last_collection = collection
def collection_exists(self, collection):
"""Check if collection exists (no implicit creation)"""
return self.qdrant.collection_exists(collection)
def collection_exists(self, collection):
"""Check if collection exists (no implicit creation)"""
@ -71,16 +49,17 @@ class Processor(DocumentEmbeddingsQueryService):
chunks = []
collection = (
"d_" + msg.user + "_" + msg.collection
)
# Check if collection exists - return empty if not
if not self.collection_exists(collection):
logger.info(f"Collection {collection} does not exist, returning empty results")
return []
for vec in msg.vectors:
# Use dimension suffix in collection name
dim = len(vec)
collection = f"d_{msg.user}_{msg.collection}_{dim}"
# Check if collection exists - return empty if not
if not self.collection_exists(collection):
logger.info(f"Collection {collection} does not exist, returning empty results")
continue
search_result = self.qdrant.query_points(
collection_name=collection,
query=vec,

View file

@ -49,39 +49,6 @@ class Processor(GraphEmbeddingsQueryService):
}
)
self.last_index_name = None
def ensure_index_exists(self, index_name, dim):
"""Ensure index exists, create if it doesn't"""
if index_name != self.last_index_name:
if not self.pinecone.has_index(index_name):
try:
self.pinecone.create_index(
name=index_name,
dimension=dim,
metric="cosine",
spec=ServerlessSpec(
cloud="aws",
region="us-east-1",
)
)
logger.info(f"Created index: {index_name}")
# Wait for index to be ready
import time
for i in range(0, 1000):
if self.pinecone.describe_index(index_name).status["ready"]:
break
time.sleep(1)
if not self.pinecone.describe_index(index_name).status["ready"]:
raise RuntimeError("Gave up waiting for index creation")
except Exception as e:
logger.error(f"Pinecone index creation failed: {e}")
raise e
self.last_index_name = index_name
def create_value(self, ent):
if ent.startswith("http://") or ent.startswith("https://"):
return Value(value=ent, is_uri=True)
@ -103,11 +70,13 @@ class Processor(GraphEmbeddingsQueryService):
dim = len(vec)
index_name = (
"t-" + msg.user + "-" + msg.collection
)
# Use dimension suffix in index name
index_name = f"t-{msg.user}-{msg.collection}-{dim}"
self.ensure_index_exists(index_name, dim)
# Check if index exists - skip if not
if not self.pinecone.has_index(index_name):
logger.info(f"Index {index_name} does not exist, skipping this vector")
continue
index = self.pinecone.Index(index_name)

View file

@ -38,28 +38,6 @@ class Processor(GraphEmbeddingsQueryService):
)
self.qdrant = QdrantClient(url=store_uri, api_key=api_key)
self.last_collection = None
def ensure_collection_exists(self, collection, dim):
"""Ensure collection exists, create if it doesn't"""
if collection != self.last_collection:
if not self.qdrant.collection_exists(collection):
try:
self.qdrant.create_collection(
collection_name=collection,
vectors_config=VectorParams(
size=dim, distance=Distance.COSINE
),
)
logger.info(f"Created collection: {collection}")
except Exception as e:
logger.error(f"Qdrant collection creation failed: {e}")
raise e
self.last_collection = collection
def collection_exists(self, collection):
"""Check if collection exists (no implicit creation)"""
return self.qdrant.collection_exists(collection)
def collection_exists(self, collection):
"""Check if collection exists (no implicit creation)"""
@ -78,17 +56,17 @@ class Processor(GraphEmbeddingsQueryService):
entity_set = set()
entities = []
collection = (
"t_" + msg.user + "_" + msg.collection
)
# Check if collection exists - return empty if not
if not self.collection_exists(collection):
logger.info(f"Collection {collection} does not exist, returning empty results")
return []
for vec in msg.vectors:
# Use dimension suffix in collection name
dim = len(vec)
collection = f"t_{msg.user}_{msg.collection}_{dim}"
# Check if collection exists - return empty if not
if not self.collection_exists(collection):
logger.info(f"Collection {collection} does not exist, skipping this vector")
continue
# Heuristic hack, get (2*limit), so that we have more chance
# of getting (limit) entities
search_result = self.qdrant.query_points(

View file

@ -132,20 +132,20 @@ class Processor(DocumentEmbeddingsStoreService):
await self.storage_response_producer.send(response)
async def handle_create_collection(self, request):
"""Create a Milvus collection for document embeddings"""
"""
No-op for collection creation - collections are created lazily on first write
with the correct dimension determined from the actual embeddings.
"""
try:
if self.vecstore.collection_exists(request.user, request.collection):
logger.info(f"Collection {request.user}/{request.collection} already exists")
else:
self.vecstore.create_collection(request.user, request.collection)
logger.info(f"Created collection {request.user}/{request.collection}")
logger.info(f"Collection create request for {request.user}/{request.collection} - will be created lazily on first write")
self.vecstore.create_collection(request.user, request.collection)
# Send success response
response = StorageManagementResponse(error=None)
await self.storage_response_producer.send(response)
except Exception as e:
logger.error(f"Failed to create collection: {e}", exc_info=True)
logger.error(f"Failed to handle create collection request: {e}", exc_info=True)
response = StorageManagementResponse(
error=Error(
type="creation_error",

View file

@ -123,19 +123,6 @@ class Processor(DocumentEmbeddingsStoreService):
async def store_document_embeddings(self, message):
index_name = (
"d-" + message.metadata.user + "-" + message.metadata.collection
)
# Validate collection exists before accepting writes
if not self.pinecone.has_index(index_name):
error_msg = (
f"Collection {message.metadata.collection} does not exist. "
f"Create it first with tg-set-collection."
)
logger.error(error_msg)
raise ValueError(error_msg)
for emb in message.chunks:
if emb.chunk is None or emb.chunk == b"": continue
@ -145,6 +132,17 @@ class Processor(DocumentEmbeddingsStoreService):
for vec in emb.vectors:
# Create index name with dimension suffix for lazy creation
dim = len(vec)
index_name = (
f"d-{message.metadata.user}-{message.metadata.collection}-{dim}"
)
# Lazily create index if it doesn't exist
if not self.pinecone.has_index(index_name):
logger.info(f"Lazily creating Pinecone index {index_name} with dimension {dim}")
self.create_index(index_name, dim)
index = self.pinecone.Index(index_name)
# Generate unique ID for each vector
@ -220,23 +218,19 @@ class Processor(DocumentEmbeddingsStoreService):
await self.storage_response_producer.send(response)
async def handle_create_collection(self, request):
"""Create a Pinecone index for document embeddings"""
"""
No-op for collection creation - indexes are created lazily on first write
with the correct dimension determined from the actual embeddings.
"""
try:
index_name = f"d-{request.user}-{request.collection}"
if self.pinecone.has_index(index_name):
logger.info(f"Pinecone index {index_name} already exists")
else:
# Create with default dimension - will need to be recreated if dimension doesn't match
self.create_index(index_name, dim=384)
logger.info(f"Created Pinecone index: {index_name}")
logger.info(f"Collection create request for {request.user}/{request.collection} - will be created lazily on first write")
# Send success response
response = StorageManagementResponse(error=None)
await self.storage_response_producer.send(response)
except Exception as e:
logger.error(f"Failed to create collection: {e}", exc_info=True)
logger.error(f"Failed to handle create collection request: {e}", exc_info=True)
response = StorageManagementResponse(
error=Error(
type="creation_error",
@ -246,22 +240,34 @@ class Processor(DocumentEmbeddingsStoreService):
await self.storage_response_producer.send(response)
async def handle_delete_collection(self, request):
"""Delete the collection for document embeddings"""
"""
Delete all dimension variants of the index for document embeddings.
Since indexes are created with dimension suffixes (e.g., d-user-coll-384),
we need to find and delete all matching indexes.
"""
try:
index_name = f"d-{request.user}-{request.collection}"
prefix = f"d-{request.user}-{request.collection}-"
if self.pinecone.has_index(index_name):
self.pinecone.delete_index(index_name)
logger.info(f"Deleted Pinecone index: {index_name}")
# Get all indexes and filter for matches
all_indexes = self.pinecone.list_indexes()
matching_indexes = [
idx.name for idx in all_indexes
if idx.name.startswith(prefix)
]
if not matching_indexes:
logger.info(f"No indexes found matching prefix {prefix}")
else:
logger.info(f"Index {index_name} does not exist, nothing to delete")
for index_name in matching_indexes:
self.pinecone.delete_index(index_name)
logger.info(f"Deleted Pinecone index: {index_name}")
logger.info(f"Deleted {len(matching_indexes)} index(es) for {request.user}/{request.collection}")
# Send success response
response = StorageManagementResponse(
error=None # No error means success
)
await self.storage_response_producer.send(response)
logger.info(f"Successfully deleted collection {request.user}/{request.collection}")
except Exception as e:
logger.error(f"Failed to delete collection: {e}")

View file

@ -79,20 +79,6 @@ class Processor(DocumentEmbeddingsStoreService):
async def store_document_embeddings(self, message):
# Validate collection exists before accepting writes
collection = (
"d_" + message.metadata.user + "_" +
message.metadata.collection
)
if not self.qdrant.collection_exists(collection):
error_msg = (
f"Collection {message.metadata.collection} does not exist. "
f"Create it first with tg-set-collection."
)
logger.error(error_msg)
raise ValueError(error_msg)
for emb in message.chunks:
chunk = emb.chunk.decode("utf-8")
@ -100,6 +86,23 @@ class Processor(DocumentEmbeddingsStoreService):
for vec in emb.vectors:
# Create collection name with dimension suffix for lazy creation
dim = len(vec)
collection = (
f"d_{message.metadata.user}_{message.metadata.collection}_{dim}"
)
# Lazily create collection if it doesn't exist
if not self.qdrant.collection_exists(collection):
logger.info(f"Lazily creating Qdrant collection {collection} with dimension {dim}")
self.qdrant.create_collection(
collection_name=collection,
vectors_config=VectorParams(
size=dim,
distance=Distance.COSINE
)
)
self.qdrant.upsert(
collection_name=collection,
points=[
@ -160,30 +163,19 @@ class Processor(DocumentEmbeddingsStoreService):
await self.storage_response_producer.send(response)
async def handle_create_collection(self, request):
"""Create a Qdrant collection for document embeddings"""
"""
No-op for collection creation - collections are created lazily on first write
with the correct dimension determined from the actual embeddings.
"""
try:
collection_name = f"d_{request.user}_{request.collection}"
if self.qdrant.collection_exists(collection_name):
logger.info(f"Qdrant collection {collection_name} already exists")
else:
# Create collection with default dimension (will be recreated with correct dim on first write if needed)
# Using a placeholder dimension - actual dimension determined by first embedding
self.qdrant.create_collection(
collection_name=collection_name,
vectors_config=VectorParams(
size=384, # Default dimension, common for many models
distance=Distance.COSINE
)
)
logger.info(f"Created Qdrant collection: {collection_name}")
logger.info(f"Collection create request for {request.user}/{request.collection} - will be created lazily on first write")
# Send success response
response = StorageManagementResponse(error=None)
await self.storage_response_producer.send(response)
except Exception as e:
logger.error(f"Failed to create collection: {e}", exc_info=True)
logger.error(f"Failed to handle create collection request: {e}", exc_info=True)
response = StorageManagementResponse(
error=Error(
type="creation_error",
@ -193,22 +185,34 @@ class Processor(DocumentEmbeddingsStoreService):
await self.storage_response_producer.send(response)
async def handle_delete_collection(self, request):
"""Delete the collection for document embeddings"""
"""
Delete all dimension variants of the collection for document embeddings.
Since collections are created with dimension suffixes (e.g., d_user_coll_384),
we need to find and delete all matching collections.
"""
try:
collection_name = f"d_{request.user}_{request.collection}"
prefix = f"d_{request.user}_{request.collection}_"
if self.qdrant.collection_exists(collection_name):
self.qdrant.delete_collection(collection_name)
logger.info(f"Deleted Qdrant collection: {collection_name}")
# Get all collections and filter for matches
all_collections = self.qdrant.get_collections().collections
matching_collections = [
coll.name for coll in all_collections
if coll.name.startswith(prefix)
]
if not matching_collections:
logger.info(f"No collections found matching prefix {prefix}")
else:
logger.info(f"Collection {collection_name} does not exist, nothing to delete")
for collection_name in matching_collections:
self.qdrant.delete_collection(collection_name)
logger.info(f"Deleted Qdrant collection: {collection_name}")
logger.info(f"Deleted {len(matching_collections)} collection(s) for {request.user}/{request.collection}")
# Send success response
response = StorageManagementResponse(
error=None # No error means success
)
await self.storage_response_producer.send(response)
logger.info(f"Successfully deleted collection {request.user}/{request.collection}")
except Exception as e:
logger.error(f"Failed to delete collection: {e}")

View file

@ -128,20 +128,20 @@ class Processor(GraphEmbeddingsStoreService):
await self.storage_response_producer.send(response)
async def handle_create_collection(self, request):
"""Create a Milvus collection for graph embeddings"""
"""
No-op for collection creation - collections are created lazily on first write
with the correct dimension determined from the actual embeddings.
"""
try:
if self.vecstore.collection_exists(request.user, request.collection):
logger.info(f"Collection {request.user}/{request.collection} already exists")
else:
self.vecstore.create_collection(request.user, request.collection)
logger.info(f"Created collection {request.user}/{request.collection}")
logger.info(f"Collection create request for {request.user}/{request.collection} - will be created lazily on first write")
self.vecstore.create_collection(request.user, request.collection)
# Send success response
response = StorageManagementResponse(error=None)
await self.storage_response_producer.send(response)
except Exception as e:
logger.error(f"Failed to create collection: {e}", exc_info=True)
logger.error(f"Failed to handle create collection request: {e}", exc_info=True)
response = StorageManagementResponse(
error=Error(
type="creation_error",

View file

@ -123,19 +123,6 @@ class Processor(GraphEmbeddingsStoreService):
async def store_graph_embeddings(self, message):
index_name = (
"t-" + message.metadata.user + "-" + message.metadata.collection
)
# Validate collection exists before accepting writes
if not self.pinecone.has_index(index_name):
error_msg = (
f"Collection {message.metadata.collection} does not exist. "
f"Create it first with tg-set-collection."
)
logger.error(error_msg)
raise ValueError(error_msg)
for entity in message.entities:
if entity.entity.value == "" or entity.entity.value is None:
@ -143,6 +130,17 @@ class Processor(GraphEmbeddingsStoreService):
for vec in entity.vectors:
# Create index name with dimension suffix for lazy creation
dim = len(vec)
index_name = (
f"t-{message.metadata.user}-{message.metadata.collection}-{dim}"
)
# Lazily create index if it doesn't exist
if not self.pinecone.has_index(index_name):
logger.info(f"Lazily creating Pinecone index {index_name} with dimension {dim}")
self.create_index(index_name, dim)
index = self.pinecone.Index(index_name)
# Generate unique ID for each vector
@ -218,23 +216,19 @@ class Processor(GraphEmbeddingsStoreService):
await self.storage_response_producer.send(response)
async def handle_create_collection(self, request):
"""Create a Pinecone index for graph embeddings"""
"""
No-op for collection creation - indexes are created lazily on first write
with the correct dimension determined from the actual embeddings.
"""
try:
index_name = f"t-{request.user}-{request.collection}"
if self.pinecone.has_index(index_name):
logger.info(f"Pinecone index {index_name} already exists")
else:
# Create with default dimension - will need to be recreated if dimension doesn't match
self.create_index(index_name, dim=384)
logger.info(f"Created Pinecone index: {index_name}")
logger.info(f"Collection create request for {request.user}/{request.collection} - will be created lazily on first write")
# Send success response
response = StorageManagementResponse(error=None)
await self.storage_response_producer.send(response)
except Exception as e:
logger.error(f"Failed to create collection: {e}", exc_info=True)
logger.error(f"Failed to handle create collection request: {e}", exc_info=True)
response = StorageManagementResponse(
error=Error(
type="creation_error",
@ -244,22 +238,34 @@ class Processor(GraphEmbeddingsStoreService):
await self.storage_response_producer.send(response)
async def handle_delete_collection(self, request):
"""Delete the collection for graph embeddings"""
"""
Delete all dimension variants of the index for graph embeddings.
Since indexes are created with dimension suffixes (e.g., t-user-coll-384),
we need to find and delete all matching indexes.
"""
try:
index_name = f"t-{request.user}-{request.collection}"
prefix = f"t-{request.user}-{request.collection}-"
if self.pinecone.has_index(index_name):
self.pinecone.delete_index(index_name)
logger.info(f"Deleted Pinecone index: {index_name}")
# Get all indexes and filter for matches
all_indexes = self.pinecone.list_indexes()
matching_indexes = [
idx.name for idx in all_indexes
if idx.name.startswith(prefix)
]
if not matching_indexes:
logger.info(f"No indexes found matching prefix {prefix}")
else:
logger.info(f"Index {index_name} does not exist, nothing to delete")
for index_name in matching_indexes:
self.pinecone.delete_index(index_name)
logger.info(f"Deleted Pinecone index: {index_name}")
logger.info(f"Deleted {len(matching_indexes)} index(es) for {request.user}/{request.collection}")
# Send success response
response = StorageManagementResponse(
error=None # No error means success
)
await self.storage_response_producer.send(response)
logger.info(f"Successfully deleted collection {request.user}/{request.collection}")
except Exception as e:
logger.error(f"Failed to delete collection: {e}")

View file

@ -69,22 +69,6 @@ class Processor(GraphEmbeddingsStoreService):
metrics=storage_response_metrics,
)
def get_collection(self, user, collection):
"""Get collection name and validate it exists"""
cname = (
"t_" + user + "_" + collection
)
if not self.qdrant.collection_exists(cname):
error_msg = (
f"Collection {collection} does not exist. "
f"Create it first with tg-set-collection."
)
logger.error(error_msg)
raise ValueError(error_msg)
return cname
async def start(self):
"""Start the processor and its storage management consumer"""
await super().start()
@ -101,10 +85,23 @@ class Processor(GraphEmbeddingsStoreService):
for vec in entity.vectors:
collection = self.get_collection(
message.metadata.user, message.metadata.collection
# Create collection name with dimension suffix for lazy creation
dim = len(vec)
collection = (
f"t_{message.metadata.user}_{message.metadata.collection}_{dim}"
)
# Lazily create collection if it doesn't exist
if not self.qdrant.collection_exists(collection):
logger.info(f"Lazily creating Qdrant collection {collection} with dimension {dim}")
self.qdrant.create_collection(
collection_name=collection,
vectors_config=VectorParams(
size=dim,
distance=Distance.COSINE
)
)
self.qdrant.upsert(
collection_name=collection,
points=[
@ -165,30 +162,19 @@ class Processor(GraphEmbeddingsStoreService):
await self.storage_response_producer.send(response)
async def handle_create_collection(self, request):
"""Create a Qdrant collection for graph embeddings"""
"""
No-op for collection creation - collections are created lazily on first write
with the correct dimension determined from the actual embeddings.
"""
try:
collection_name = f"t_{request.user}_{request.collection}"
if self.qdrant.collection_exists(collection_name):
logger.info(f"Qdrant collection {collection_name} already exists")
else:
# Create collection with default dimension (will be recreated with correct dim on first write if needed)
# Using a placeholder dimension - actual dimension determined by first embedding
self.qdrant.create_collection(
collection_name=collection_name,
vectors_config=VectorParams(
size=384, # Default dimension, common for many models
distance=Distance.COSINE
)
)
logger.info(f"Created Qdrant collection: {collection_name}")
logger.info(f"Collection create request for {request.user}/{request.collection} - will be created lazily on first write")
# Send success response
response = StorageManagementResponse(error=None)
await self.storage_response_producer.send(response)
except Exception as e:
logger.error(f"Failed to create collection: {e}", exc_info=True)
logger.error(f"Failed to handle create collection request: {e}", exc_info=True)
response = StorageManagementResponse(
error=Error(
type="creation_error",
@ -198,22 +184,34 @@ class Processor(GraphEmbeddingsStoreService):
await self.storage_response_producer.send(response)
async def handle_delete_collection(self, request):
"""Delete the collection for graph embeddings"""
"""
Delete all dimension variants of the collection for graph embeddings.
Since collections are created with dimension suffixes (e.g., t_user_coll_384),
we need to find and delete all matching collections.
"""
try:
collection_name = f"t_{request.user}_{request.collection}"
prefix = f"t_{request.user}_{request.collection}_"
if self.qdrant.collection_exists(collection_name):
self.qdrant.delete_collection(collection_name)
logger.info(f"Deleted Qdrant collection: {collection_name}")
# Get all collections and filter for matches
all_collections = self.qdrant.get_collections().collections
matching_collections = [
coll.name for coll in all_collections
if coll.name.startswith(prefix)
]
if not matching_collections:
logger.info(f"No collections found matching prefix {prefix}")
else:
logger.info(f"Collection {collection_name} does not exist, nothing to delete")
for collection_name in matching_collections:
self.qdrant.delete_collection(collection_name)
logger.info(f"Deleted Qdrant collection: {collection_name}")
logger.info(f"Deleted {len(matching_collections)} collection(s) for {request.user}/{request.collection}")
# Send success response
response = StorageManagementResponse(
error=None # No error means success
)
await self.storage_response_producer.send(response)
logger.info(f"Successfully deleted collection {request.user}/{request.collection}")
except Exception as e:
logger.error(f"Failed to delete collection: {e}")