mirror of
https://github.com/MODSetter/SurfSense.git
synced 2026-04-29 10:56:24 +02:00
feat: added periodic tasks in backend db and frontend hooks
- TODO: Add celery redbeat and create tasks dinamically in our redis
This commit is contained in:
parent
70808eb08b
commit
182f815bb7
8 changed files with 484 additions and 31 deletions
|
|
@ -55,30 +55,45 @@ def upgrade() -> None:
|
|||
|
||||
# ===== STEP 2: Populate search_space_id with user's first search space =====
|
||||
# This ensures existing LLM configs are assigned to a valid search space
|
||||
op.execute(
|
||||
"""
|
||||
UPDATE llm_configs lc
|
||||
SET search_space_id = (
|
||||
SELECT id
|
||||
FROM searchspaces ss
|
||||
WHERE ss.user_id = lc.user_id
|
||||
ORDER BY ss.created_at ASC
|
||||
LIMIT 1
|
||||
# Only run this if user_id column exists on llm_configs
|
||||
if "user_id" in llm_config_columns:
|
||||
op.execute(
|
||||
"""
|
||||
UPDATE llm_configs lc
|
||||
SET search_space_id = (
|
||||
SELECT id
|
||||
FROM searchspaces ss
|
||||
WHERE ss.user_id = lc.user_id
|
||||
ORDER BY ss.created_at ASC
|
||||
LIMIT 1
|
||||
)
|
||||
WHERE search_space_id IS NULL AND user_id IS NOT NULL
|
||||
"""
|
||||
)
|
||||
WHERE search_space_id IS NULL AND user_id IS NOT NULL
|
||||
"""
|
||||
)
|
||||
|
||||
# ===== STEP 3: Make search_space_id NOT NULL and add FK constraint =====
|
||||
op.alter_column(
|
||||
"llm_configs",
|
||||
"search_space_id",
|
||||
nullable=False,
|
||||
# Check if there are any rows with NULL search_space_id
|
||||
# If llm_configs table is empty or all rows have search_space_id, we can proceed
|
||||
result = conn.execute(
|
||||
sa.text("SELECT COUNT(*) FROM llm_configs WHERE search_space_id IS NULL")
|
||||
)
|
||||
null_count = result.scalar()
|
||||
|
||||
# Add foreign key constraint
|
||||
if null_count == 0 or "user_id" in llm_config_columns:
|
||||
# Safe to make NOT NULL
|
||||
op.alter_column(
|
||||
"llm_configs",
|
||||
"search_space_id",
|
||||
nullable=False,
|
||||
)
|
||||
else:
|
||||
# If there are NULL values and no user_id to migrate from, skip making it NOT NULL
|
||||
# This would happen if llm_configs already exists without user_id
|
||||
pass
|
||||
|
||||
# Add foreign key constraint only if search_space_id is NOT NULL
|
||||
foreign_keys = [fk["name"] for fk in inspector.get_foreign_keys("llm_configs")]
|
||||
if "fk_llm_configs_search_space_id" not in foreign_keys:
|
||||
if "fk_llm_configs_search_space_id" not in foreign_keys and null_count == 0:
|
||||
op.create_foreign_key(
|
||||
"fk_llm_configs_search_space_id",
|
||||
"llm_configs",
|
||||
|
|
|
|||
|
|
@ -0,0 +1,94 @@
|
|||
"""Add periodic indexing fields to search_source_connectors
|
||||
|
||||
Revision ID: 32
|
||||
Revises: 31
|
||||
|
||||
Changes:
|
||||
1. Add periodic_indexing_enabled column (Boolean, default False)
|
||||
2. Add indexing_frequency_minutes column (Integer, nullable)
|
||||
3. Add next_scheduled_at column (TIMESTAMP with timezone, nullable)
|
||||
"""
|
||||
|
||||
from collections.abc import Sequence
|
||||
|
||||
import sqlalchemy as sa
|
||||
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "32"
|
||||
down_revision: str | None = "31"
|
||||
branch_labels: str | Sequence[str] | None = None
|
||||
depends_on: str | Sequence[str] | None = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Add periodic indexing fields to search_source_connectors table."""
|
||||
|
||||
from sqlalchemy import inspect
|
||||
|
||||
conn = op.get_bind()
|
||||
inspector = inspect(conn)
|
||||
|
||||
# Get existing columns
|
||||
connector_columns = [
|
||||
col["name"] for col in inspector.get_columns("search_source_connectors")
|
||||
]
|
||||
|
||||
# Add periodic_indexing_enabled column if it doesn't exist
|
||||
if "periodic_indexing_enabled" not in connector_columns:
|
||||
op.add_column(
|
||||
"search_source_connectors",
|
||||
sa.Column(
|
||||
"periodic_indexing_enabled",
|
||||
sa.Boolean(),
|
||||
nullable=False,
|
||||
server_default="false",
|
||||
),
|
||||
)
|
||||
|
||||
# Add indexing_frequency_minutes column if it doesn't exist
|
||||
if "indexing_frequency_minutes" not in connector_columns:
|
||||
op.add_column(
|
||||
"search_source_connectors",
|
||||
sa.Column(
|
||||
"indexing_frequency_minutes",
|
||||
sa.Integer(),
|
||||
nullable=True,
|
||||
),
|
||||
)
|
||||
|
||||
# Add next_scheduled_at column if it doesn't exist
|
||||
if "next_scheduled_at" not in connector_columns:
|
||||
op.add_column(
|
||||
"search_source_connectors",
|
||||
sa.Column(
|
||||
"next_scheduled_at",
|
||||
sa.TIMESTAMP(timezone=True),
|
||||
nullable=True,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Remove periodic indexing fields from search_source_connectors table."""
|
||||
|
||||
from sqlalchemy import inspect
|
||||
|
||||
conn = op.get_bind()
|
||||
inspector = inspect(conn)
|
||||
|
||||
# Get existing columns
|
||||
connector_columns = [
|
||||
col["name"] for col in inspector.get_columns("search_source_connectors")
|
||||
]
|
||||
|
||||
# Drop columns if they exist
|
||||
if "next_scheduled_at" in connector_columns:
|
||||
op.drop_column("search_source_connectors", "next_scheduled_at")
|
||||
|
||||
if "indexing_frequency_minutes" in connector_columns:
|
||||
op.drop_column("search_source_connectors", "indexing_frequency_minutes")
|
||||
|
||||
if "periodic_indexing_enabled" in connector_columns:
|
||||
op.drop_column("search_source_connectors", "periodic_indexing_enabled")
|
||||
Loading…
Add table
Add a link
Reference in a new issue