mirror of
https://github.com/MODSetter/SurfSense.git
synced 2026-04-26 17:26:23 +02:00
refactor: remove local folder indexing endpoints and update related API calls
This commit is contained in:
parent
5f5954e932
commit
6f9f69c3e8
3 changed files with 13 additions and 200 deletions
|
|
@ -1385,166 +1385,6 @@ async def restore_document_version(
|
|||
}
|
||||
|
||||
|
||||
# ===== Local folder indexing endpoints =====
|
||||
|
||||
|
||||
class FolderIndexRequest(PydanticBaseModel):
|
||||
folder_path: str
|
||||
folder_name: str
|
||||
search_space_id: int
|
||||
exclude_patterns: list[str] | None = None
|
||||
file_extensions: list[str] | None = None
|
||||
root_folder_id: int | None = None
|
||||
enable_summary: bool = False
|
||||
|
||||
|
||||
class FolderIndexFilesRequest(PydanticBaseModel):
|
||||
folder_path: str
|
||||
folder_name: str
|
||||
search_space_id: int
|
||||
target_file_paths: list[str]
|
||||
root_folder_id: int | None = None
|
||||
enable_summary: bool = False
|
||||
|
||||
|
||||
@router.post("/documents/folder-index")
|
||||
async def folder_index(
|
||||
request: FolderIndexRequest,
|
||||
session: AsyncSession = Depends(get_async_session),
|
||||
user: User = Depends(current_active_user),
|
||||
):
|
||||
"""Full-scan index of a local folder. Creates the root Folder row synchronously
|
||||
and dispatches the heavy indexing work to a Celery task.
|
||||
Returns the root_folder_id so the desktop can persist it.
|
||||
"""
|
||||
from app.config import config as app_config
|
||||
|
||||
if not app_config.is_self_hosted():
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Local folder indexing is only available in self-hosted mode",
|
||||
)
|
||||
|
||||
await check_permission(
|
||||
session,
|
||||
user,
|
||||
request.search_space_id,
|
||||
Permission.DOCUMENTS_CREATE.value,
|
||||
"You don't have permission to create documents in this search space",
|
||||
)
|
||||
|
||||
watched_metadata = {
|
||||
"watched": True,
|
||||
"folder_path": request.folder_path,
|
||||
"exclude_patterns": request.exclude_patterns,
|
||||
"file_extensions": request.file_extensions,
|
||||
}
|
||||
|
||||
root_folder_id = request.root_folder_id
|
||||
if root_folder_id:
|
||||
existing = (
|
||||
await session.execute(select(Folder).where(Folder.id == root_folder_id))
|
||||
).scalar_one_or_none()
|
||||
if not existing:
|
||||
root_folder_id = None
|
||||
else:
|
||||
existing.folder_metadata = watched_metadata
|
||||
await session.commit()
|
||||
|
||||
if not root_folder_id:
|
||||
root_folder = Folder(
|
||||
name=request.folder_name,
|
||||
search_space_id=request.search_space_id,
|
||||
created_by_id=str(user.id),
|
||||
position="a0",
|
||||
folder_metadata=watched_metadata,
|
||||
)
|
||||
session.add(root_folder)
|
||||
await session.flush()
|
||||
root_folder_id = root_folder.id
|
||||
await session.commit()
|
||||
|
||||
from app.tasks.celery_tasks.document_tasks import index_local_folder_task
|
||||
|
||||
index_local_folder_task.delay(
|
||||
search_space_id=request.search_space_id,
|
||||
user_id=str(user.id),
|
||||
folder_path=request.folder_path,
|
||||
folder_name=request.folder_name,
|
||||
exclude_patterns=request.exclude_patterns,
|
||||
file_extensions=request.file_extensions,
|
||||
root_folder_id=root_folder_id,
|
||||
enable_summary=request.enable_summary,
|
||||
)
|
||||
|
||||
return {
|
||||
"message": "Folder indexing started",
|
||||
"status": "processing",
|
||||
"root_folder_id": root_folder_id,
|
||||
}
|
||||
|
||||
|
||||
@router.post("/documents/folder-index-files")
|
||||
async def folder_index_files(
|
||||
request: FolderIndexFilesRequest,
|
||||
session: AsyncSession = Depends(get_async_session),
|
||||
user: User = Depends(current_active_user),
|
||||
):
|
||||
"""Index multiple files within a watched folder (batched chokidar trigger).
|
||||
Validates that all target_file_paths are under folder_path.
|
||||
Dispatches a single Celery task that processes them in parallel.
|
||||
"""
|
||||
from app.config import config as app_config
|
||||
|
||||
if not app_config.is_self_hosted():
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Local folder indexing is only available in self-hosted mode",
|
||||
)
|
||||
|
||||
if not request.target_file_paths:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="target_file_paths must not be empty"
|
||||
)
|
||||
|
||||
await check_permission(
|
||||
session,
|
||||
user,
|
||||
request.search_space_id,
|
||||
Permission.DOCUMENTS_CREATE.value,
|
||||
"You don't have permission to create documents in this search space",
|
||||
)
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
for fp in request.target_file_paths:
|
||||
try:
|
||||
Path(fp).relative_to(request.folder_path)
|
||||
except ValueError as err:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"target_file_path {fp} must be inside folder_path",
|
||||
) from err
|
||||
|
||||
from app.tasks.celery_tasks.document_tasks import index_local_folder_task
|
||||
|
||||
index_local_folder_task.delay(
|
||||
search_space_id=request.search_space_id,
|
||||
user_id=str(user.id),
|
||||
folder_path=request.folder_path,
|
||||
folder_name=request.folder_name,
|
||||
target_file_paths=request.target_file_paths,
|
||||
root_folder_id=request.root_folder_id,
|
||||
enable_summary=request.enable_summary,
|
||||
)
|
||||
|
||||
return {
|
||||
"message": f"Batch indexing started for {len(request.target_file_paths)} file(s)",
|
||||
"status": "processing",
|
||||
"file_count": len(request.target_file_paths),
|
||||
}
|
||||
|
||||
|
||||
# ===== Upload-based local folder indexing endpoints =====
|
||||
# These work for ALL deployment modes (cloud, self-hosted remote, self-hosted local).
|
||||
# The desktop app reads files locally and uploads them here.
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue