async-semantic-llm-cache/semantic_llm_cache/exceptions.py

26 lines
495 B
Python
Raw Permalink Normal View History

2026-03-06 15:54:47 +01:00
"""Custom exceptions for prompt-cache."""
class PromptCacheError(Exception):
"""Base exception for prompt-cache errors."""
pass
class CacheBackendError(PromptCacheError):
"""Exception raised when backend operations fail."""
pass
class CacheSerializationError(PromptCacheError):
"""Exception raised when serialization/deserialization fails."""
pass
class CacheNotFoundError(PromptCacheError):
"""Exception raised when cache entry is not found."""
pass