mirror of
https://github.com/MODSetter/SurfSense.git
synced 2026-05-01 11:56:25 +02:00
feat: added periodic tasks in backend db and frontend hooks
- TODO: Add celery redbeat and create tasks dinamically in our redis
This commit is contained in:
parent
70808eb08b
commit
182f815bb7
8 changed files with 484 additions and 31 deletions
|
|
@ -11,7 +11,7 @@ Note: Each search space can have only one connector of each type per user (based
|
|||
"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from typing import Any
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
|
|
@ -124,8 +124,22 @@ async def create_search_source_connector(
|
|||
status_code=409,
|
||||
detail=f"A connector with type {connector.connector_type} already exists in this search space. Each search space can have only one connector of each type per user.",
|
||||
)
|
||||
|
||||
# Prepare connector data
|
||||
connector_data = connector.model_dump()
|
||||
|
||||
# Automatically set next_scheduled_at if periodic indexing is enabled
|
||||
if (
|
||||
connector.periodic_indexing_enabled
|
||||
and connector.indexing_frequency_minutes
|
||||
and connector.next_scheduled_at is None
|
||||
):
|
||||
connector_data["next_scheduled_at"] = datetime.now(UTC) + timedelta(
|
||||
minutes=connector.indexing_frequency_minutes
|
||||
)
|
||||
|
||||
db_connector = SearchSourceConnector(
|
||||
**connector.model_dump(), search_space_id=search_space_id, user_id=user.id
|
||||
**connector_data, search_space_id=search_space_id, user_id=user.id
|
||||
)
|
||||
session.add(db_connector)
|
||||
await session.commit()
|
||||
|
|
@ -224,6 +238,50 @@ async def update_search_source_connector(
|
|||
# Convert the sparse update data (only fields present in request) to a dict
|
||||
update_data = connector_update.model_dump(exclude_unset=True)
|
||||
|
||||
# Validate periodic indexing fields
|
||||
# Get the effective values after update
|
||||
effective_is_indexable = update_data.get("is_indexable", db_connector.is_indexable)
|
||||
effective_periodic_enabled = update_data.get(
|
||||
"periodic_indexing_enabled", db_connector.periodic_indexing_enabled
|
||||
)
|
||||
effective_frequency = update_data.get(
|
||||
"indexing_frequency_minutes", db_connector.indexing_frequency_minutes
|
||||
)
|
||||
|
||||
# Validate periodic indexing configuration
|
||||
if effective_periodic_enabled:
|
||||
if not effective_is_indexable:
|
||||
raise HTTPException(
|
||||
status_code=422,
|
||||
detail="periodic_indexing_enabled can only be True for indexable connectors",
|
||||
)
|
||||
if effective_frequency is None:
|
||||
raise HTTPException(
|
||||
status_code=422,
|
||||
detail="indexing_frequency_minutes is required when periodic_indexing_enabled is True",
|
||||
)
|
||||
if effective_frequency <= 0:
|
||||
raise HTTPException(
|
||||
status_code=422,
|
||||
detail="indexing_frequency_minutes must be greater than 0",
|
||||
)
|
||||
|
||||
# Automatically set next_scheduled_at if not provided and periodic indexing is being enabled
|
||||
if (
|
||||
"periodic_indexing_enabled" in update_data
|
||||
or "indexing_frequency_minutes" in update_data
|
||||
) and "next_scheduled_at" not in update_data:
|
||||
# Schedule the next indexing based on the frequency
|
||||
update_data["next_scheduled_at"] = datetime.now(UTC) + timedelta(
|
||||
minutes=effective_frequency
|
||||
)
|
||||
elif (
|
||||
effective_periodic_enabled is False
|
||||
and "periodic_indexing_enabled" in update_data
|
||||
):
|
||||
# If disabling periodic indexing, clear the next_scheduled_at
|
||||
update_data["next_scheduled_at"] = None
|
||||
|
||||
# Special handling for 'config' field
|
||||
if "config" in update_data:
|
||||
incoming_config = update_data["config"] # Config data from the request
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue