SurfSense/surfsense_backend/app/schemas/new_llm_config.py
Vonic 4eb6ed18d6 Epic 5 Complete: Billing, Subscriptions, and Admin Features
Resolve all 5 deferred items from Epic 5 adversarial code review:
- Migration 124: Add CASCADE to subscriptionstatus enum drop (prevent orphaned references)
- Stripe rate limiting: In-memory per-user limiter (20 calls/60s) on verify-checkout-session
- Subscription request cooldown: 24h cooldown before resubmitting rejected requests
- Token reset date: Initialize on first subscription activation
- Checkout URL validation: Confirmed HTTPS-only (Stripe always returns HTTPS)

Implement Story 5.4 (Usage Tracking & Rate Limit Enforcement):
- Page quota pre-check at HTTP upload layer
- Extend UserRead schema with token quota fields
- Frontend 402 error handling in document upload
- Quota indicator in dashboard sidebar

Story 5.5 (Admin Seed & Approval Flow):
- Seed admin user migration with default credentials warning
- Subscription approval/rejection routes with admin guard
- 24h rejection cooldown enforcement

Story 5.6 (Admin-Only Model Config):
- Global model config visible across all search spaces
- Per-search-space model configs with user access control
- Superuser CRUD for global configs

Additional fixes from code review:
- PageLimitService: PAST_DUE subscriptions enforce free-tier limits
- TokenQuotaService: PAST_DUE subscriptions enforce free-tier limits
- Config routes: Fixed user_id.is_(None) filter on mutation endpoints
- Stripe webhook: Added guard against silent plan downgrade on unrecognized price_id

All changes formatted with Ruff (Python) and Biome (TypeScript).

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-15 03:54:45 +07:00

221 lines
7 KiB
Python

"""
Pydantic schemas for the NewLLMConfig API.
NewLLMConfig combines model settings with prompt configuration:
- LLM provider, model, API key, etc.
- Configurable system instructions
- Citation toggle
"""
import uuid
from datetime import datetime
from typing import Any
from pydantic import BaseModel, ConfigDict, Field
from app.db import LiteLLMProvider
class NewLLMConfigBase(BaseModel):
"""Base schema with common fields for NewLLMConfig."""
name: str = Field(
..., max_length=100, description="User-friendly name for the configuration"
)
description: str | None = Field(
None, max_length=500, description="Optional description"
)
# Model Configuration
provider: LiteLLMProvider = Field(..., description="LiteLLM provider type")
custom_provider: str | None = Field(
None, max_length=100, description="Custom provider name when provider is CUSTOM"
)
model_name: str = Field(
..., max_length=100, description="Model name without provider prefix"
)
api_key: str = Field(..., description="API key for the provider")
api_base: str | None = Field(
None, max_length=500, description="Optional API base URL"
)
litellm_params: dict[str, Any] | None = Field(
default=None, description="Additional LiteLLM parameters"
)
# Prompt Configuration
system_instructions: str = Field(
default="",
description="Custom system instructions. Empty string uses default SURFSENSE_SYSTEM_INSTRUCTIONS.",
)
use_default_system_instructions: bool = Field(
default=True,
description="Whether to use default instructions when system_instructions is empty",
)
citations_enabled: bool = Field(
default=True,
description="Whether to include citation instructions in the system prompt",
)
class NewLLMConfigCreate(NewLLMConfigBase):
"""Schema for creating a new NewLLMConfig."""
search_space_id: int | None = Field(
None,
description="Search space ID. None = global admin config visible to all spaces",
)
class NewLLMConfigUpdate(BaseModel):
"""Schema for updating an existing NewLLMConfig. All fields are optional."""
name: str | None = Field(None, max_length=100)
description: str | None = Field(None, max_length=500)
# Model Configuration
provider: LiteLLMProvider | None = None
custom_provider: str | None = Field(None, max_length=100)
model_name: str | None = Field(None, max_length=100)
api_key: str | None = None
api_base: str | None = Field(None, max_length=500)
litellm_params: dict[str, Any] | None = None
# Prompt Configuration
system_instructions: str | None = None
use_default_system_instructions: bool | None = None
citations_enabled: bool | None = None
class NewLLMConfigRead(NewLLMConfigBase):
"""Schema for reading a NewLLMConfig (includes id and timestamps)."""
id: int
created_at: datetime
search_space_id: int | None = None
user_id: uuid.UUID | None = None
model_config = ConfigDict(from_attributes=True)
class NewLLMConfigPublic(BaseModel):
"""
Public schema for NewLLMConfig that hides the API key.
Used when returning configs in list views or to users who shouldn't see keys.
"""
id: int
name: str
description: str | None = None
# Model Configuration (no api_key)
provider: LiteLLMProvider
custom_provider: str | None = None
model_name: str
api_base: str | None = None
litellm_params: dict[str, Any] | None = None
# Prompt Configuration
system_instructions: str
use_default_system_instructions: bool
citations_enabled: bool
created_at: datetime
search_space_id: int | None = None
user_id: uuid.UUID | None = None
model_config = ConfigDict(from_attributes=True)
class DefaultSystemInstructionsResponse(BaseModel):
"""Response schema for getting default system instructions."""
default_system_instructions: str = Field(
..., description="The default SURFSENSE_SYSTEM_INSTRUCTIONS template"
)
class GlobalNewLLMConfigRead(BaseModel):
"""
Schema for reading global LLM configs from YAML.
Global configs have negative IDs and no search_space_id.
API key is hidden for security.
ID 0 is reserved for Auto mode which uses LiteLLM Router for load balancing.
"""
id: int = Field(
...,
description="Config ID: 0 for Auto mode, negative for global configs",
)
name: str
description: str | None = None
# Model Configuration (no api_key)
provider: str # String because YAML doesn't enforce enum, "AUTO" for Auto mode
custom_provider: str | None = None
model_name: str
api_base: str | None = None
litellm_params: dict[str, Any] | None = None
# Prompt Configuration
system_instructions: str = ""
use_default_system_instructions: bool = True
citations_enabled: bool = True
is_global: bool = True # Always true for global configs
is_auto_mode: bool = False # True only for Auto mode (ID 0)
# =============================================================================
# LLM Preferences Schemas (for role assignments)
# =============================================================================
class LLMPreferencesRead(BaseModel):
"""Schema for reading LLM preferences (role assignments) for a search space."""
agent_llm_id: int | None = Field(
None, description="ID of the LLM config to use for agent/chat tasks"
)
document_summary_llm_id: int | None = Field(
None, description="ID of the LLM config to use for document summarization"
)
image_generation_config_id: int | None = Field(
None, description="ID of the image generation config to use"
)
vision_llm_config_id: int | None = Field(
None,
description="ID of the vision LLM config to use for vision/screenshot analysis",
)
agent_llm: dict[str, Any] | None = Field(
None, description="Full config for agent LLM"
)
document_summary_llm: dict[str, Any] | None = Field(
None, description="Full config for document summary LLM"
)
image_generation_config: dict[str, Any] | None = Field(
None, description="Full config for image generation"
)
vision_llm_config: dict[str, Any] | None = Field(
None, description="Full config for vision LLM"
)
model_config = ConfigDict(from_attributes=True)
class LLMPreferencesUpdate(BaseModel):
"""Schema for updating LLM preferences."""
agent_llm_id: int | None = Field(
None, description="ID of the LLM config to use for agent/chat tasks"
)
document_summary_llm_id: int | None = Field(
None, description="ID of the LLM config to use for document summarization"
)
image_generation_config_id: int | None = Field(
None, description="ID of the image generation config to use"
)
vision_llm_config_id: int | None = Field(
None,
description="ID of the vision LLM config to use for vision/screenshot analysis",
)