Merge pull request #1185 from AnishSarkar22/fix/folder-watch
Some checks are pending
Build and Push Docker Images / tag_release (push) Waiting to run
Build and Push Docker Images / build (./surfsense_backend, ./surfsense_backend/Dockerfile, backend, surfsense-backend, ubuntu-24.04-arm, linux/arm64, arm64) (push) Blocked by required conditions
Build and Push Docker Images / build (./surfsense_backend, ./surfsense_backend/Dockerfile, backend, surfsense-backend, ubuntu-latest, linux/amd64, amd64) (push) Blocked by required conditions
Build and Push Docker Images / build (./surfsense_web, ./surfsense_web/Dockerfile, web, surfsense-web, ubuntu-24.04-arm, linux/arm64, arm64) (push) Blocked by required conditions
Build and Push Docker Images / build (./surfsense_web, ./surfsense_web/Dockerfile, web, surfsense-web, ubuntu-latest, linux/amd64, amd64) (push) Blocked by required conditions
Build and Push Docker Images / create_manifest (backend, surfsense-backend) (push) Blocked by required conditions
Build and Push Docker Images / create_manifest (web, surfsense-web) (push) Blocked by required conditions

fix: harden folder watch feature with file hash dedup, mtime seeding, and stable spinner
This commit is contained in:
Rohan Verma 2026-04-08 14:00:05 -07:00 committed by GitHub
commit fe6f830eab
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
39 changed files with 1955 additions and 497 deletions

View file

@ -25,7 +25,7 @@ from sqlalchemy import (
)
from sqlalchemy.dialects.postgresql import JSONB, UUID
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
from sqlalchemy.orm import DeclarativeBase, Mapped, declared_attr, relationship
from sqlalchemy.orm import DeclarativeBase, Mapped, backref, declared_attr, relationship
from app.config import config
@ -1086,7 +1086,9 @@ class DocumentVersion(BaseModel, TimestampMixin):
content_hash = Column(String, nullable=False)
title = Column(String, nullable=True)
document = relationship("Document", backref="versions")
document = relationship(
"Document", backref=backref("versions", passive_deletes=True)
)
class Chunk(BaseModel, TimestampMixin):

View file

@ -17,6 +17,7 @@ class ConnectorDocument(BaseModel):
metadata: dict = {}
connector_id: int | None = None
created_by_id: str
folder_id: int | None = None
@field_validator("title", "source_markdown", "unique_id", "created_by_id")
@classmethod

View file

@ -268,6 +268,8 @@ class IndexingPipelineService:
):
existing.status = DocumentStatus.pending()
existing.updated_at = datetime.now(UTC)
if connector_doc.folder_id is not None:
existing.folder_id = connector_doc.folder_id
documents.append(existing)
log_document_requeued(ctx)
continue
@ -294,6 +296,8 @@ class IndexingPipelineService:
existing.document_metadata = connector_doc.metadata
existing.updated_at = datetime.now(UTC)
existing.status = DocumentStatus.pending()
if connector_doc.folder_id is not None:
existing.folder_id = connector_doc.folder_id
documents.append(existing)
log_document_updated(ctx)
continue
@ -317,6 +321,7 @@ class IndexingPipelineService:
created_by_id=connector_doc.created_by_id,
updated_at=datetime.now(UTC),
status=DocumentStatus.pending(),
folder_id=connector_doc.folder_id,
)
self.session.add(document)
documents.append(document)

View file

@ -1385,45 +1385,48 @@ async def restore_document_version(
}
# ===== Local folder indexing endpoints =====
# ===== Upload-based local folder indexing endpoints =====
# These work for ALL deployment modes (cloud, self-hosted remote, self-hosted local).
# The desktop app reads files locally and uploads them here.
class FolderIndexRequest(PydanticBaseModel):
folder_path: str
class FolderMtimeCheckFile(PydanticBaseModel):
relative_path: str
mtime: float
class FolderMtimeCheckRequest(PydanticBaseModel):
folder_name: str
search_space_id: int
exclude_patterns: list[str] | None = None
file_extensions: list[str] | None = None
root_folder_id: int | None = None
enable_summary: bool = False
files: list[FolderMtimeCheckFile]
class FolderIndexFilesRequest(PydanticBaseModel):
folder_path: str
class FolderUnlinkRequest(PydanticBaseModel):
folder_name: str
search_space_id: int
target_file_paths: list[str]
root_folder_id: int | None = None
enable_summary: bool = False
relative_paths: list[str]
@router.post("/documents/folder-index")
async def folder_index(
request: FolderIndexRequest,
class FolderSyncFinalizeRequest(PydanticBaseModel):
folder_name: str
search_space_id: int
root_folder_id: int | None = None
all_relative_paths: list[str]
@router.post("/documents/folder-mtime-check")
async def folder_mtime_check(
request: FolderMtimeCheckRequest,
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""Full-scan index of a local folder. Creates the root Folder row synchronously
and dispatches the heavy indexing work to a Celery task.
Returns the root_folder_id so the desktop can persist it.
"""
from app.config import config as app_config
"""Pre-upload optimization: check which files need uploading based on mtime.
if not app_config.is_self_hosted():
raise HTTPException(
status_code=400,
detail="Local folder indexing is only available in self-hosted mode",
)
Returns the subset of relative paths where the file is new or has a
different mtime, so the client can skip reading/uploading unchanged files.
"""
from app.indexing_pipeline.document_hashing import compute_identifier_hash
await check_permission(
session,
@ -1433,113 +1436,309 @@ async def folder_index(
"You don't have permission to create documents in this search space",
)
watched_metadata = {
"watched": True,
"folder_path": request.folder_path,
"exclude_patterns": request.exclude_patterns,
"file_extensions": request.file_extensions,
}
uid_hashes = {}
for f in request.files:
uid = f"{request.folder_name}:{f.relative_path}"
uid_hash = compute_identifier_hash(
DocumentType.LOCAL_FOLDER_FILE.value, uid, request.search_space_id
)
uid_hashes[uid_hash] = f
root_folder_id = request.root_folder_id
if root_folder_id:
existing = (
await session.execute(select(Folder).where(Folder.id == root_folder_id))
).scalar_one_or_none()
if not existing:
root_folder_id = None
else:
existing.folder_metadata = watched_metadata
await session.commit()
existing_docs = (
(
await session.execute(
select(Document).where(
Document.unique_identifier_hash.in_(list(uid_hashes.keys())),
Document.document_type == DocumentType.LOCAL_FOLDER_FILE,
)
)
)
.scalars()
.all()
)
existing_by_hash = {doc.unique_identifier_hash: doc for doc in existing_docs}
mtime_tolerance = 1.0
files_to_upload: list[str] = []
for uid_hash, file_info in uid_hashes.items():
doc = existing_by_hash.get(uid_hash)
if doc is None:
files_to_upload.append(file_info.relative_path)
continue
stored_mtime = (doc.document_metadata or {}).get("mtime")
if stored_mtime is None:
files_to_upload.append(file_info.relative_path)
continue
if abs(file_info.mtime - stored_mtime) >= mtime_tolerance:
files_to_upload.append(file_info.relative_path)
return {"files_to_upload": files_to_upload}
@router.post("/documents/folder-upload")
async def folder_upload(
files: list[UploadFile],
folder_name: str = Form(...),
search_space_id: int = Form(...),
relative_paths: str = Form(...),
root_folder_id: int | None = Form(None),
enable_summary: bool = Form(False),
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""Upload files from the desktop app for folder indexing.
Files are written to temp storage and dispatched to a Celery task.
Works for all deployment modes (no is_self_hosted guard).
"""
import json
import tempfile
await check_permission(
session,
user,
search_space_id,
Permission.DOCUMENTS_CREATE.value,
"You don't have permission to create documents in this search space",
)
if not files:
raise HTTPException(status_code=400, detail="No files provided")
try:
rel_paths: list[str] = json.loads(relative_paths)
except (json.JSONDecodeError, TypeError) as e:
raise HTTPException(
status_code=400, detail=f"Invalid relative_paths JSON: {e}"
) from e
if len(rel_paths) != len(files):
raise HTTPException(
status_code=400,
detail=f"Mismatch: {len(files)} files but {len(rel_paths)} relative_paths",
)
for file in files:
file_size = file.size or 0
if file_size > MAX_FILE_SIZE_BYTES:
raise HTTPException(
status_code=413,
detail=f"File '{file.filename}' ({file_size / (1024 * 1024):.1f} MB) "
f"exceeds the {MAX_FILE_SIZE_BYTES // (1024 * 1024)} MB per-file limit.",
)
if not root_folder_id:
root_folder = Folder(
name=request.folder_name,
search_space_id=request.search_space_id,
created_by_id=str(user.id),
position="a0",
folder_metadata=watched_metadata,
)
session.add(root_folder)
await session.flush()
root_folder_id = root_folder.id
watched_metadata = {
"watched": True,
"folder_path": folder_name,
}
existing_root = (
await session.execute(
select(Folder).where(
Folder.name == folder_name,
Folder.parent_id.is_(None),
Folder.search_space_id == search_space_id,
)
)
).scalar_one_or_none()
if existing_root:
root_folder_id = existing_root.id
existing_root.folder_metadata = watched_metadata
else:
root_folder = Folder(
name=folder_name,
search_space_id=search_space_id,
created_by_id=str(user.id),
position="a0",
folder_metadata=watched_metadata,
)
session.add(root_folder)
await session.flush()
root_folder_id = root_folder.id
await session.commit()
from app.tasks.celery_tasks.document_tasks import index_local_folder_task
async def _read_and_save(file: UploadFile, idx: int) -> dict:
content = await file.read()
filename = file.filename or rel_paths[idx].split("/")[-1]
index_local_folder_task.delay(
search_space_id=request.search_space_id,
def _write_temp() -> str:
with tempfile.NamedTemporaryFile(
delete=False, suffix=os.path.splitext(filename)[1]
) as tmp:
tmp.write(content)
return tmp.name
temp_path = await asyncio.to_thread(_write_temp)
return {
"temp_path": temp_path,
"relative_path": rel_paths[idx],
"filename": filename,
}
file_mappings = await asyncio.gather(
*(_read_and_save(f, i) for i, f in enumerate(files))
)
from app.tasks.celery_tasks.document_tasks import (
index_uploaded_folder_files_task,
)
index_uploaded_folder_files_task.delay(
search_space_id=search_space_id,
user_id=str(user.id),
folder_path=request.folder_path,
folder_name=request.folder_name,
exclude_patterns=request.exclude_patterns,
file_extensions=request.file_extensions,
folder_name=folder_name,
root_folder_id=root_folder_id,
enable_summary=request.enable_summary,
enable_summary=enable_summary,
file_mappings=list(file_mappings),
)
return {
"message": "Folder indexing started",
"message": f"Folder upload started for {len(files)} file(s)",
"status": "processing",
"root_folder_id": root_folder_id,
"file_count": len(files),
}
@router.post("/documents/folder-index-files")
async def folder_index_files(
request: FolderIndexFilesRequest,
@router.post("/documents/folder-unlink")
async def folder_unlink(
request: FolderUnlinkRequest,
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""Index multiple files within a watched folder (batched chokidar trigger).
Validates that all target_file_paths are under folder_path.
Dispatches a single Celery task that processes them in parallel.
"""Handle file deletion events from the desktop watcher.
For each relative path, find the matching document and delete it.
"""
from app.config import config as app_config
if not app_config.is_self_hosted():
raise HTTPException(
status_code=400,
detail="Local folder indexing is only available in self-hosted mode",
)
if not request.target_file_paths:
raise HTTPException(
status_code=400, detail="target_file_paths must not be empty"
)
from app.indexing_pipeline.document_hashing import compute_identifier_hash
from app.tasks.connector_indexers.local_folder_indexer import (
_cleanup_empty_folder_chain,
)
await check_permission(
session,
user,
request.search_space_id,
Permission.DOCUMENTS_CREATE.value,
"You don't have permission to create documents in this search space",
Permission.DOCUMENTS_DELETE.value,
"You don't have permission to delete documents in this search space",
)
from pathlib import Path
deleted_count = 0
for fp in request.target_file_paths:
try:
Path(fp).relative_to(request.folder_path)
except ValueError as err:
raise HTTPException(
status_code=400,
detail=f"target_file_path {fp} must be inside folder_path",
) from err
for rel_path in request.relative_paths:
unique_id = f"{request.folder_name}:{rel_path}"
uid_hash = compute_identifier_hash(
DocumentType.LOCAL_FOLDER_FILE.value,
unique_id,
request.search_space_id,
)
from app.tasks.celery_tasks.document_tasks import index_local_folder_task
existing = (
await session.execute(
select(Document).where(Document.unique_identifier_hash == uid_hash)
)
).scalar_one_or_none()
index_local_folder_task.delay(
search_space_id=request.search_space_id,
user_id=str(user.id),
folder_path=request.folder_path,
folder_name=request.folder_name,
target_file_paths=request.target_file_paths,
root_folder_id=request.root_folder_id,
enable_summary=request.enable_summary,
if existing:
deleted_folder_id = existing.folder_id
await session.delete(existing)
await session.flush()
if deleted_folder_id and request.root_folder_id:
await _cleanup_empty_folder_chain(
session, deleted_folder_id, request.root_folder_id
)
deleted_count += 1
await session.commit()
return {"deleted_count": deleted_count}
@router.post("/documents/folder-sync-finalize")
async def folder_sync_finalize(
request: FolderSyncFinalizeRequest,
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
):
"""Finalize a full folder scan by deleting orphaned documents.
The client sends the complete list of relative paths currently in the
folder. Any document in the DB for this folder that is NOT in the list
gets deleted.
"""
from app.indexing_pipeline.document_hashing import compute_identifier_hash
from app.services.folder_service import get_folder_subtree_ids
from app.tasks.connector_indexers.local_folder_indexer import (
_cleanup_empty_folders,
)
return {
"message": f"Batch indexing started for {len(request.target_file_paths)} file(s)",
"status": "processing",
"file_count": len(request.target_file_paths),
}
await check_permission(
session,
user,
request.search_space_id,
Permission.DOCUMENTS_DELETE.value,
"You don't have permission to delete documents in this search space",
)
if not request.root_folder_id:
return {"deleted_count": 0}
subtree_ids = await get_folder_subtree_ids(session, request.root_folder_id)
seen_hashes: set[str] = set()
for rel_path in request.all_relative_paths:
unique_id = f"{request.folder_name}:{rel_path}"
uid_hash = compute_identifier_hash(
DocumentType.LOCAL_FOLDER_FILE.value,
unique_id,
request.search_space_id,
)
seen_hashes.add(uid_hash)
all_folder_docs = (
(
await session.execute(
select(Document).where(
Document.document_type == DocumentType.LOCAL_FOLDER_FILE,
Document.search_space_id == request.search_space_id,
Document.folder_id.in_(subtree_ids),
)
)
)
.scalars()
.all()
)
deleted_count = 0
for doc in all_folder_docs:
if doc.unique_identifier_hash not in seen_hashes:
await session.delete(doc)
deleted_count += 1
await session.flush()
existing_dirs: set[str] = set()
for rel_path in request.all_relative_paths:
parent = str(os.path.dirname(rel_path))
if parent and parent != ".":
existing_dirs.add(parent)
folder_mapping: dict[str, int] = {"": request.root_folder_id}
await _cleanup_empty_folders(
session,
request.root_folder_id,
request.search_space_id,
existing_dirs,
folder_mapping,
subtree_ids=subtree_ids,
)
await session.commit()
return {"deleted_count": deleted_count}

View file

@ -11,7 +11,10 @@ from app.config import config
from app.services.notification_service import NotificationService
from app.services.task_logging_service import TaskLoggingService
from app.tasks.celery_tasks import get_celery_session_maker
from app.tasks.connector_indexers.local_folder_indexer import index_local_folder
from app.tasks.connector_indexers.local_folder_indexer import (
index_local_folder,
index_uploaded_files,
)
from app.tasks.document_processors import (
add_extension_received_document,
add_youtube_video_document,
@ -1411,3 +1414,132 @@ async def _index_local_folder_async(
heartbeat_task.cancel()
if notification_id is not None:
_stop_heartbeat(notification_id)
# ===== Upload-based folder indexing task =====
@celery_app.task(name="index_uploaded_folder_files", bind=True)
def index_uploaded_folder_files_task(
self,
search_space_id: int,
user_id: str,
folder_name: str,
root_folder_id: int,
enable_summary: bool,
file_mappings: list[dict],
):
"""Celery task to index files uploaded from the desktop app."""
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
loop.run_until_complete(
_index_uploaded_folder_files_async(
search_space_id=search_space_id,
user_id=user_id,
folder_name=folder_name,
root_folder_id=root_folder_id,
enable_summary=enable_summary,
file_mappings=file_mappings,
)
)
finally:
loop.close()
async def _index_uploaded_folder_files_async(
search_space_id: int,
user_id: str,
folder_name: str,
root_folder_id: int,
enable_summary: bool,
file_mappings: list[dict],
):
"""Run upload-based folder indexing with notification + heartbeat."""
file_count = len(file_mappings)
doc_name = f"{folder_name} ({file_count} file{'s' if file_count != 1 else ''})"
notification = None
notification_id: int | None = None
heartbeat_task = None
async with get_celery_session_maker()() as session:
try:
notification = (
await NotificationService.document_processing.notify_processing_started(
session=session,
user_id=UUID(user_id),
document_type="LOCAL_FOLDER_FILE",
document_name=doc_name,
search_space_id=search_space_id,
)
)
notification_id = notification.id
_start_heartbeat(notification_id)
heartbeat_task = asyncio.create_task(_run_heartbeat_loop(notification_id))
except Exception:
logger.warning(
"Failed to create notification for uploaded folder indexing",
exc_info=True,
)
async def _heartbeat_progress(completed_count: int) -> None:
if notification:
with contextlib.suppress(Exception):
await NotificationService.document_processing.notify_processing_progress(
session=session,
notification=notification,
stage="indexing",
stage_message=f"Syncing files ({completed_count}/{file_count})",
)
try:
_indexed, _failed, err = await index_uploaded_files(
session=session,
search_space_id=search_space_id,
user_id=user_id,
folder_name=folder_name,
root_folder_id=root_folder_id,
enable_summary=enable_summary,
file_mappings=file_mappings,
on_heartbeat_callback=_heartbeat_progress,
)
if notification:
try:
await session.refresh(notification)
if err:
await NotificationService.document_processing.notify_processing_completed(
session=session,
notification=notification,
error_message=err,
)
else:
await NotificationService.document_processing.notify_processing_completed(
session=session,
notification=notification,
)
except Exception:
logger.warning(
"Failed to update notification after uploaded folder indexing",
exc_info=True,
)
except Exception as e:
logger.exception(f"Uploaded folder indexing failed: {e}")
if notification:
try:
await session.refresh(notification)
await NotificationService.document_processing.notify_processing_completed(
session=session,
notification=notification,
error_message=str(e)[:200],
)
except Exception:
pass
raise
finally:
if heartbeat_task:
heartbeat_task.cancel()
if notification_id is not None:
_stop_heartbeat(notification_id)

View file

@ -14,13 +14,14 @@ no connector row is read.
"""
import asyncio
import contextlib
import os
from collections.abc import Awaitable, Callable
from datetime import UTC, datetime
from pathlib import Path
from sqlalchemy import select
from sqlalchemy.exc import IntegrityError, SQLAlchemyError
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.ext.asyncio import AsyncSession
from app.db import (
@ -178,6 +179,22 @@ def _content_hash(content: str, search_space_id: int) -> str:
return hashlib.sha256(f"{search_space_id}:{content}".encode()).hexdigest()
def _compute_raw_file_hash(file_path: str) -> str:
"""SHA-256 hash of the raw file bytes.
Much cheaper than ETL/OCR extraction -- only performs sequential I/O.
Used as a pre-filter to skip expensive content extraction when the
underlying file hasn't changed at all.
"""
import hashlib
h = hashlib.sha256()
with open(file_path, "rb") as f:
for chunk in iter(lambda: f.read(8192), b""):
h.update(chunk)
return h.hexdigest()
async def _compute_file_content_hash(
file_path: str,
filename: str,
@ -328,6 +345,27 @@ async def _resolve_folder_for_file(
return current_parent_id
async def _set_indexing_flag(session: AsyncSession, folder_id: int) -> None:
folder = await session.get(Folder, folder_id)
if folder:
meta = dict(folder.folder_metadata or {})
meta["indexing_in_progress"] = True
folder.folder_metadata = meta
await session.commit()
async def _clear_indexing_flag(session: AsyncSession, folder_id: int) -> None:
try:
folder = await session.get(Folder, folder_id)
if folder:
meta = dict(folder.folder_metadata or {})
meta.pop("indexing_in_progress", None)
folder.folder_metadata = meta
await session.commit()
except Exception:
pass
async def _cleanup_empty_folder_chain(
session: AsyncSession,
folder_id: int,
@ -371,24 +409,21 @@ async def _cleanup_empty_folders(
search_space_id: int,
existing_dirs_on_disk: set[str],
folder_mapping: dict[str, int],
subtree_ids: list[int] | None = None,
) -> None:
"""Delete Folder rows that are empty (no docs, no children) and no longer on disk."""
from sqlalchemy import delete as sa_delete
id_to_rel: dict[int, str] = {fid: rel for rel, fid in folder_mapping.items() if rel}
all_folders = (
(
await session.execute(
select(Folder).where(
Folder.search_space_id == search_space_id,
Folder.id != root_folder_id,
)
)
)
.scalars()
.all()
query = select(Folder).where(
Folder.search_space_id == search_space_id,
Folder.id != root_folder_id,
)
if subtree_ids is not None:
query = query.where(Folder.id.in_(subtree_ids))
all_folders = (await session.execute(query)).scalars().all()
candidates: list[Folder] = []
for folder in all_folders:
@ -518,44 +553,50 @@ async def index_local_folder(
# BATCH MODE (1..N files)
# ====================================================================
if target_file_paths:
if len(target_file_paths) == 1:
indexed, skipped, err = await _index_single_file(
session=session,
if root_folder_id:
await _set_indexing_flag(session, root_folder_id)
try:
if len(target_file_paths) == 1:
indexed, skipped, err = await _index_single_file(
session=session,
search_space_id=search_space_id,
user_id=user_id,
folder_path=folder_path,
folder_name=folder_name,
target_file_path=target_file_paths[0],
enable_summary=enable_summary,
root_folder_id=root_folder_id,
task_logger=task_logger,
log_entry=log_entry,
)
return indexed, skipped, root_folder_id, err
indexed, failed, err = await _index_batch_files(
search_space_id=search_space_id,
user_id=user_id,
folder_path=folder_path,
folder_name=folder_name,
target_file_path=target_file_paths[0],
target_file_paths=target_file_paths,
enable_summary=enable_summary,
root_folder_id=root_folder_id,
task_logger=task_logger,
log_entry=log_entry,
on_progress_callback=on_heartbeat_callback,
)
return indexed, skipped, root_folder_id, err
indexed, failed, err = await _index_batch_files(
search_space_id=search_space_id,
user_id=user_id,
folder_path=folder_path,
folder_name=folder_name,
target_file_paths=target_file_paths,
enable_summary=enable_summary,
root_folder_id=root_folder_id,
on_progress_callback=on_heartbeat_callback,
)
if err:
await task_logger.log_task_success(
log_entry,
f"Batch indexing: {indexed} indexed, {failed} failed",
{"indexed": indexed, "failed": failed},
)
else:
await task_logger.log_task_success(
log_entry,
f"Batch indexing complete: {indexed} indexed",
{"indexed": indexed, "failed": failed},
)
return indexed, failed, root_folder_id, err
if err:
await task_logger.log_task_success(
log_entry,
f"Batch indexing: {indexed} indexed, {failed} failed",
{"indexed": indexed, "failed": failed},
)
else:
await task_logger.log_task_success(
log_entry,
f"Batch indexing complete: {indexed} indexed",
{"indexed": indexed, "failed": failed},
)
return indexed, failed, root_folder_id, err
finally:
if root_folder_id:
await _clear_indexing_flag(session, root_folder_id)
# ====================================================================
# FULL-SCAN MODE
@ -575,6 +616,7 @@ async def index_local_folder(
exclude_patterns=exclude_patterns,
)
await session.flush()
await _set_indexing_flag(session, root_folder_id)
try:
files = scan_folder(folder_path, file_extensions, exclude_patterns)
@ -582,6 +624,7 @@ async def index_local_folder(
await task_logger.log_task_failure(
log_entry, f"Failed to scan folder: {e}", "Scan error", {}
)
await _clear_indexing_flag(session, root_folder_id)
return 0, 0, root_folder_id, f"Failed to scan folder: {e}"
logger.info(f"Found {len(files)} files in folder")
@ -630,6 +673,24 @@ async def index_local_folder(
skipped_count += 1
continue
raw_hash = await asyncio.to_thread(
_compute_raw_file_hash, file_path_abs
)
stored_raw_hash = (existing_document.document_metadata or {}).get(
"raw_file_hash"
)
if stored_raw_hash and stored_raw_hash == raw_hash:
meta = dict(existing_document.document_metadata or {})
meta["mtime"] = current_mtime
existing_document.document_metadata = meta
if not DocumentStatus.is_state(
existing_document.status, DocumentStatus.READY
):
existing_document.status = DocumentStatus.ready()
skipped_count += 1
continue
try:
estimated_pages = await _check_page_limit_or_skip(
page_limit_service, user_id, file_path_abs
@ -653,6 +714,7 @@ async def index_local_folder(
if existing_document.content_hash == content_hash:
meta = dict(existing_document.document_metadata or {})
meta["mtime"] = current_mtime
meta["raw_file_hash"] = raw_hash
existing_document.document_metadata = meta
if not DocumentStatus.is_state(
existing_document.status, DocumentStatus.READY
@ -687,6 +749,10 @@ async def index_local_folder(
skipped_count += 1
continue
raw_hash = await asyncio.to_thread(
_compute_raw_file_hash, file_path_abs
)
doc = _build_connector_doc(
title=file_info["name"],
content=content,
@ -702,6 +768,7 @@ async def index_local_folder(
"mtime": file_info["modified_at"].timestamp(),
"estimated_pages": estimated_pages,
"content_length": len(content),
"raw_file_hash": raw_hash,
}
except Exception as e:
@ -753,29 +820,16 @@ async def index_local_folder(
compute_unique_identifier_hash,
)
pipeline = IndexingPipelineService(session)
doc_map = {compute_unique_identifier_hash(cd): cd for cd in connector_docs}
documents = await pipeline.prepare_for_indexing(connector_docs)
# Assign folder_id immediately so docs appear in the correct
# folder while still pending/processing (visible via Zero sync).
for document in documents:
cd = doc_map.get(document.unique_identifier_hash)
if cd is None:
continue
for cd in connector_docs:
rel_path = (cd.metadata or {}).get("file_path", "")
parent_dir = str(Path(rel_path).parent) if rel_path else ""
if parent_dir == ".":
parent_dir = ""
document.folder_id = folder_mapping.get(
parent_dir, folder_mapping.get("")
)
try:
await session.commit()
except IntegrityError:
await session.rollback()
for document in documents:
await session.refresh(document)
cd.folder_id = folder_mapping.get(parent_dir, folder_mapping.get(""))
pipeline = IndexingPipelineService(session)
doc_map = {compute_unique_identifier_hash(cd): cd for cd in connector_docs}
documents = await pipeline.prepare_for_indexing(connector_docs)
llm = await get_user_long_context_llm(session, user_id, search_space_id)
@ -795,6 +849,7 @@ async def index_local_folder(
doc_meta = dict(result.document_metadata or {})
doc_meta["mtime"] = mtime_info.get("mtime")
doc_meta["raw_file_hash"] = mtime_info.get("raw_file_hash")
result.document_metadata = doc_meta
est = mtime_info.get("estimated_pages", 1)
@ -823,8 +878,16 @@ async def index_local_folder(
root_fid = folder_mapping.get("")
if root_fid:
from app.services.folder_service import get_folder_subtree_ids
subtree_ids = await get_folder_subtree_ids(session, root_fid)
await _cleanup_empty_folders(
session, root_fid, search_space_id, existing_dirs, folder_mapping
session,
root_fid,
search_space_id,
existing_dirs,
folder_mapping,
subtree_ids=subtree_ids,
)
try:
@ -851,6 +914,7 @@ async def index_local_folder(
},
)
await _clear_indexing_flag(session, root_folder_id)
return indexed_count, skipped_count, root_folder_id, warning_message
except SQLAlchemyError as e:
@ -859,6 +923,8 @@ async def index_local_folder(
await task_logger.log_task_failure(
log_entry, f"DB error: {e}", "Database error", {}
)
if root_folder_id:
await _clear_indexing_flag(session, root_folder_id)
return 0, 0, root_folder_id, f"Database error: {e}"
except Exception as e:
@ -866,6 +932,8 @@ async def index_local_folder(
await task_logger.log_task_failure(
log_entry, f"Error: {e}", "Unexpected error", {}
)
if root_folder_id:
await _clear_indexing_flag(session, root_folder_id)
return 0, 0, root_folder_id, str(e)
@ -988,6 +1056,22 @@ async def _index_single_file(
DocumentType.LOCAL_FOLDER_FILE.value, unique_id, search_space_id
)
raw_hash = await asyncio.to_thread(_compute_raw_file_hash, str(full_path))
existing = await check_document_by_unique_identifier(session, uid_hash)
if existing:
stored_raw_hash = (existing.document_metadata or {}).get("raw_file_hash")
if stored_raw_hash and stored_raw_hash == raw_hash:
mtime = full_path.stat().st_mtime
meta = dict(existing.document_metadata or {})
meta["mtime"] = mtime
existing.document_metadata = meta
if not DocumentStatus.is_state(existing.status, DocumentStatus.READY):
existing.status = DocumentStatus.ready()
await session.commit()
return 0, 0, None
page_limit_service = PageLimitService(session)
try:
estimated_pages = await _check_page_limit_or_skip(
@ -1006,13 +1090,12 @@ async def _index_single_file(
if not content.strip():
return 0, 1, None
existing = await check_document_by_unique_identifier(session, uid_hash)
if existing:
if existing.content_hash == content_hash:
mtime = full_path.stat().st_mtime
meta = dict(existing.document_metadata or {})
meta["mtime"] = mtime
meta["raw_file_hash"] = raw_hash
existing.document_metadata = meta
await session.commit()
return 0, 1, None
@ -1031,6 +1114,11 @@ async def _index_single_file(
enable_summary=enable_summary,
)
if root_folder_id:
connector_doc.folder_id = await _resolve_folder_for_file(
session, rel_path, root_folder_id, search_space_id, user_id
)
pipeline = IndexingPipelineService(session)
llm = await get_user_long_context_llm(session, user_id, search_space_id)
documents = await pipeline.prepare_for_indexing([connector_doc])
@ -1040,21 +1128,12 @@ async def _index_single_file(
db_doc = documents[0]
if root_folder_id:
try:
db_doc.folder_id = await _resolve_folder_for_file(
session, rel_path, root_folder_id, search_space_id, user_id
)
await session.commit()
except IntegrityError:
await session.rollback()
await session.refresh(db_doc)
await pipeline.index(db_doc, connector_doc, llm)
await session.refresh(db_doc)
doc_meta = dict(db_doc.document_metadata or {})
doc_meta["mtime"] = mtime
doc_meta["raw_file_hash"] = raw_hash
db_doc.document_metadata = doc_meta
await session.commit()
@ -1081,3 +1160,305 @@ async def _index_single_file(
logger.exception(f"Error indexing single file {target_file_path}: {e}")
await session.rollback()
return 0, 0, str(e)
# ========================================================================
# Upload-based folder indexing (works for all deployment modes)
# ========================================================================
async def _mirror_folder_structure_from_paths(
session: AsyncSession,
relative_paths: list[str],
folder_name: str,
search_space_id: int,
user_id: str,
root_folder_id: int | None = None,
) -> tuple[dict[str, int], int]:
"""Create DB Folder rows from a list of relative file paths.
Unlike ``_mirror_folder_structure`` this does not walk the filesystem;
it derives the directory tree from the paths provided by the client.
Returns (mapping, root_folder_id) where mapping is
relative_dir_path -> folder_id. The empty-string key maps to root.
"""
dir_set: set[str] = set()
for rp in relative_paths:
parent = str(Path(rp).parent)
if parent == ".":
continue
parts = Path(parent).parts
for i in range(len(parts)):
dir_set.add(str(Path(*parts[: i + 1])))
subdirs = sorted(dir_set, key=lambda p: p.count(os.sep))
mapping: dict[str, int] = {}
if root_folder_id:
existing = (
await session.execute(select(Folder).where(Folder.id == root_folder_id))
).scalar_one_or_none()
if existing:
mapping[""] = existing.id
else:
root_folder_id = None
if not root_folder_id:
root_folder = Folder(
name=folder_name,
search_space_id=search_space_id,
created_by_id=user_id,
position="a0",
)
session.add(root_folder)
await session.flush()
mapping[""] = root_folder.id
root_folder_id = root_folder.id
for rel_dir in subdirs:
dir_parts = Path(rel_dir).parts
dir_name = dir_parts[-1]
parent_rel = str(Path(*dir_parts[:-1])) if len(dir_parts) > 1 else ""
parent_id = mapping.get(parent_rel, mapping[""])
existing_folder = (
await session.execute(
select(Folder).where(
Folder.name == dir_name,
Folder.parent_id == parent_id,
Folder.search_space_id == search_space_id,
)
)
).scalar_one_or_none()
if existing_folder:
mapping[rel_dir] = existing_folder.id
else:
new_folder = Folder(
name=dir_name,
parent_id=parent_id,
search_space_id=search_space_id,
created_by_id=user_id,
position="a0",
)
session.add(new_folder)
await session.flush()
mapping[rel_dir] = new_folder.id
await session.flush()
return mapping, root_folder_id
UPLOAD_BATCH_CONCURRENCY = 5
async def index_uploaded_files(
session: AsyncSession,
search_space_id: int,
user_id: str,
folder_name: str,
root_folder_id: int,
enable_summary: bool,
file_mappings: list[dict],
on_heartbeat_callback: HeartbeatCallbackType | None = None,
) -> tuple[int, int, str | None]:
"""Index files uploaded from the desktop app via temp paths.
Each entry in *file_mappings* is ``{temp_path, relative_path, filename}``.
This function mirrors the folder structure from the provided relative
paths, then indexes each file exactly like ``_index_single_file`` but
reads from the temp path. Temp files are cleaned up after processing.
Returns ``(indexed_count, failed_count, error_summary_or_none)``.
"""
task_logger = TaskLoggingService(session, search_space_id)
log_entry = await task_logger.log_task_start(
task_name="local_folder_indexing",
source="uploaded_folder_indexing",
message=f"Indexing {len(file_mappings)} uploaded file(s) for {folder_name}",
metadata={"file_count": len(file_mappings)},
)
try:
all_relative_paths = [m["relative_path"] for m in file_mappings]
_folder_mapping, root_folder_id = await _mirror_folder_structure_from_paths(
session=session,
relative_paths=all_relative_paths,
folder_name=folder_name,
search_space_id=search_space_id,
user_id=user_id,
root_folder_id=root_folder_id,
)
await session.flush()
await _set_indexing_flag(session, root_folder_id)
page_limit_service = PageLimitService(session)
pipeline = IndexingPipelineService(session)
llm = await get_user_long_context_llm(session, user_id, search_space_id)
indexed_count = 0
failed_count = 0
errors: list[str] = []
for i, mapping in enumerate(file_mappings):
temp_path = mapping["temp_path"]
relative_path = mapping["relative_path"]
filename = mapping["filename"]
try:
unique_id = f"{folder_name}:{relative_path}"
uid_hash = compute_identifier_hash(
DocumentType.LOCAL_FOLDER_FILE.value,
unique_id,
search_space_id,
)
raw_hash = await asyncio.to_thread(_compute_raw_file_hash, temp_path)
existing = await check_document_by_unique_identifier(session, uid_hash)
if existing:
stored_raw_hash = (existing.document_metadata or {}).get(
"raw_file_hash"
)
if stored_raw_hash and stored_raw_hash == raw_hash:
meta = dict(existing.document_metadata or {})
meta["mtime"] = datetime.now(UTC).timestamp()
existing.document_metadata = meta
if not DocumentStatus.is_state(
existing.status, DocumentStatus.READY
):
existing.status = DocumentStatus.ready()
await session.commit()
continue
try:
estimated_pages = await _check_page_limit_or_skip(
page_limit_service, user_id, temp_path
)
except PageLimitExceededError:
logger.warning(f"Page limit exceeded, skipping: {relative_path}")
failed_count += 1
continue
try:
content, content_hash = await _compute_file_content_hash(
temp_path, filename, search_space_id
)
except Exception as e:
logger.warning(f"Could not read {relative_path}: {e}")
failed_count += 1
errors.append(f"{filename}: {e}")
continue
if not content.strip():
failed_count += 1
continue
if existing:
if existing.content_hash == content_hash:
meta = dict(existing.document_metadata or {})
meta["mtime"] = datetime.now(UTC).timestamp()
meta["raw_file_hash"] = raw_hash
existing.document_metadata = meta
if not DocumentStatus.is_state(
existing.status, DocumentStatus.READY
):
existing.status = DocumentStatus.ready()
await session.commit()
continue
await create_version_snapshot(session, existing)
connector_doc = _build_connector_doc(
title=filename,
content=content,
relative_path=relative_path,
folder_name=folder_name,
search_space_id=search_space_id,
user_id=user_id,
enable_summary=enable_summary,
)
connector_doc.folder_id = await _resolve_folder_for_file(
session,
relative_path,
root_folder_id,
search_space_id,
user_id,
)
documents = await pipeline.prepare_for_indexing([connector_doc])
if not documents:
failed_count += 1
continue
db_doc = documents[0]
await pipeline.index(db_doc, connector_doc, llm)
await session.refresh(db_doc)
doc_meta = dict(db_doc.document_metadata or {})
doc_meta["mtime"] = datetime.now(UTC).timestamp()
doc_meta["raw_file_hash"] = raw_hash
db_doc.document_metadata = doc_meta
await session.commit()
if DocumentStatus.is_state(db_doc.status, DocumentStatus.READY):
indexed_count += 1
final_pages = _compute_final_pages(
page_limit_service, estimated_pages, len(content)
)
await page_limit_service.update_page_usage(
user_id, final_pages, allow_exceed=True
)
else:
failed_count += 1
if on_heartbeat_callback and (i + 1) % 5 == 0:
await on_heartbeat_callback(i + 1)
except Exception as e:
logger.exception(f"Error indexing uploaded file {relative_path}: {e}")
await session.rollback()
failed_count += 1
errors.append(f"{filename}: {e}")
finally:
with contextlib.suppress(OSError):
os.unlink(temp_path)
error_summary = None
if errors:
error_summary = f"{failed_count} file(s) failed: " + "; ".join(errors[:5])
if len(errors) > 5:
error_summary += f" ... and {len(errors) - 5} more"
await task_logger.log_task_success(
log_entry,
f"Upload indexing complete: {indexed_count} indexed, {failed_count} failed",
{"indexed": indexed_count, "failed": failed_count},
)
return indexed_count, failed_count, error_summary
except SQLAlchemyError as e:
logger.exception(f"Database error during uploaded file indexing: {e}")
await session.rollback()
await task_logger.log_task_failure(
log_entry, f"DB error: {e}", "Database error", {}
)
return 0, 0, f"Database error: {e}"
except Exception as e:
logger.exception(f"Error during uploaded file indexing: {e}")
await task_logger.log_task_failure(
log_entry, f"Error: {e}", "Unexpected error", {}
)
return 0, 0, str(e)
finally:
await _clear_indexing_flag(session, root_folder_id)

View file

@ -1,4 +1,4 @@
"""Integration tests for local folder indexer — Tier 3 (I1-I5), Tier 4 (F1-F7), Tier 5 (P1), Tier 6 (B1-B2)."""
"""Integration tests for local folder indexer — Tier 3 (I1-I5), Tier 4 (F1-F7), Tier 5 (P1), Tier 6 (B1-B2), Tier 7 (IP1-IP3)."""
import os
from contextlib import asynccontextmanager
@ -1178,3 +1178,131 @@ class TestPageLimits:
await db_session.refresh(db_user)
assert db_user.pages_used > 0
assert db_user.pages_used <= db_user.pages_limit + 1
# ====================================================================
# Tier 7: Indexing Progress Flag (IP1-IP3)
# ====================================================================
class TestIndexingProgressFlag:
@pytest.mark.usefixtures(*UNIFIED_FIXTURES)
async def test_ip1_full_scan_clears_flag(
self,
db_session: AsyncSession,
db_user: User,
db_search_space: SearchSpace,
tmp_path: Path,
):
"""IP1: Full-scan mode clears indexing_in_progress after completion."""
from app.tasks.connector_indexers.local_folder_indexer import index_local_folder
(tmp_path / "note.md").write_text("# Hello\n\nContent.")
_, _, root_folder_id, _ = await index_local_folder(
session=db_session,
search_space_id=db_search_space.id,
user_id=str(db_user.id),
folder_path=str(tmp_path),
folder_name="test-folder",
)
assert root_folder_id is not None
root_folder = (
await db_session.execute(select(Folder).where(Folder.id == root_folder_id))
).scalar_one()
meta = root_folder.folder_metadata or {}
assert "indexing_in_progress" not in meta
@pytest.mark.usefixtures(*UNIFIED_FIXTURES)
async def test_ip2_single_file_clears_flag(
self,
db_session: AsyncSession,
db_user: User,
db_search_space: SearchSpace,
tmp_path: Path,
):
"""IP2: Single-file (Chokidar) mode clears indexing_in_progress after completion."""
from app.tasks.connector_indexers.local_folder_indexer import index_local_folder
(tmp_path / "root.md").write_text("root")
_, _, root_folder_id, _ = await index_local_folder(
session=db_session,
search_space_id=db_search_space.id,
user_id=str(db_user.id),
folder_path=str(tmp_path),
folder_name="test-folder",
)
(tmp_path / "new.md").write_text("new file content")
await index_local_folder(
session=db_session,
search_space_id=db_search_space.id,
user_id=str(db_user.id),
folder_path=str(tmp_path),
folder_name="test-folder",
target_file_paths=[str(tmp_path / "new.md")],
root_folder_id=root_folder_id,
)
root_folder = (
await db_session.execute(select(Folder).where(Folder.id == root_folder_id))
).scalar_one()
meta = root_folder.folder_metadata or {}
assert "indexing_in_progress" not in meta
@pytest.mark.usefixtures(*UNIFIED_FIXTURES)
async def test_ip3_flag_set_during_indexing(
self,
db_session: AsyncSession,
db_user: User,
db_search_space: SearchSpace,
tmp_path: Path,
):
"""IP3: indexing_in_progress is True on the root folder while indexing is running."""
from app.tasks.connector_indexers.local_folder_indexer import index_local_folder
(tmp_path / "note.md").write_text("# Check flag\n\nDuring indexing.")
from app.indexing_pipeline.indexing_pipeline_service import (
IndexingPipelineService,
)
original_index = IndexingPipelineService.index
flag_observed = []
async def patched_index(self_pipe, document, connector_doc, llm):
folder = (
await db_session.execute(
select(Folder).where(
Folder.search_space_id == db_search_space.id,
Folder.parent_id.is_(None),
)
)
).scalar_one_or_none()
if folder:
meta = folder.folder_metadata or {}
flag_observed.append(meta.get("indexing_in_progress", False))
return await original_index(self_pipe, document, connector_doc, llm)
IndexingPipelineService.index = patched_index
try:
_, _, root_folder_id, _ = await index_local_folder(
session=db_session,
search_space_id=db_search_space.id,
user_id=str(db_user.id),
folder_path=str(tmp_path),
folder_name="test-folder",
)
finally:
IndexingPipelineService.index = original_index
assert len(flag_observed) > 0, "index() should have been called at least once"
assert all(flag_observed), "indexing_in_progress should be True during indexing"
root_folder = (
await db_session.execute(select(Folder).where(Folder.id == root_folder_id))
).scalar_one()
meta = root_folder.folder_metadata or {}
assert "indexing_in_progress" not in meta

View file

@ -30,6 +30,8 @@ export const IPC_CHANNELS = {
FOLDER_SYNC_RENDERER_READY: 'folder-sync:renderer-ready',
FOLDER_SYNC_GET_PENDING_EVENTS: 'folder-sync:get-pending-events',
FOLDER_SYNC_ACK_EVENTS: 'folder-sync:ack-events',
FOLDER_SYNC_LIST_FILES: 'folder-sync:list-files',
FOLDER_SYNC_SEED_MTIMES: 'folder-sync:seed-mtimes',
BROWSE_FILES: 'browse:files',
READ_LOCAL_FILES: 'browse:read-local-files',
// Auth token sync across windows

View file

@ -19,6 +19,9 @@ import {
markRendererReady,
browseFiles,
readLocalFiles,
listFolderFiles,
seedFolderMtimes,
type WatchedFolderConfig,
} from '../modules/folder-watcher';
import { getShortcuts, setShortcuts, type ShortcutConfig } from '../modules/shortcuts';
import { getActiveSearchSpaceId, setActiveSearchSpaceId } from '../modules/active-search-space';
@ -91,6 +94,16 @@ export function registerIpcHandlers(): void {
acknowledgeFileEvents(eventIds)
);
ipcMain.handle(IPC_CHANNELS.FOLDER_SYNC_LIST_FILES, (_event, config: WatchedFolderConfig) =>
listFolderFiles(config)
);
ipcMain.handle(
IPC_CHANNELS.FOLDER_SYNC_SEED_MTIMES,
(_event, folderPath: string, mtimes: Record<string, number>) =>
seedFolderMtimes(folderPath, mtimes),
);
ipcMain.handle(IPC_CHANNELS.BROWSE_FILES, () => browseFiles());
ipcMain.handle(IPC_CHANNELS.READ_LOCAL_FILES, (_event, paths: string[]) =>

View file

@ -188,6 +188,31 @@ function walkFolderMtimes(config: WatchedFolderConfig): MtimeMap {
return result;
}
export interface FolderFileEntry {
relativePath: string;
fullPath: string;
size: number;
mtimeMs: number;
}
export function listFolderFiles(config: WatchedFolderConfig): FolderFileEntry[] {
const root = config.path;
const mtimeMap = walkFolderMtimes(config);
const entries: FolderFileEntry[] = [];
for (const [relativePath, mtimeMs] of Object.entries(mtimeMap)) {
const fullPath = path.join(root, relativePath);
try {
const stat = fs.statSync(fullPath);
entries.push({ relativePath, fullPath, size: stat.size, mtimeMs });
} catch {
// File may have been removed between walk and stat
}
}
return entries;
}
function getMainWindow(): BrowserWindow | null {
const windows = BrowserWindow.getAllWindows();
return windows.length > 0 ? windows[0] : null;
@ -424,14 +449,30 @@ export async function acknowledgeFileEvents(eventIds: string[]): Promise<{ ackno
const ackSet = new Set(eventIds);
let acknowledged = 0;
const foldersToUpdate = new Set<string>();
for (const [key, event] of outboxEvents.entries()) {
if (ackSet.has(event.id)) {
if (event.action !== 'unlink') {
const map = mtimeMaps.get(event.folderPath);
if (map) {
try {
map[event.relativePath] = fs.statSync(event.fullPath).mtimeMs;
foldersToUpdate.add(event.folderPath);
} catch {
// File may have been removed
}
}
}
outboxEvents.delete(key);
acknowledged += 1;
}
}
for (const fp of foldersToUpdate) {
persistMtimeMap(fp);
}
if (acknowledged > 0) {
persistOutbox();
}
@ -439,6 +480,17 @@ export async function acknowledgeFileEvents(eventIds: string[]): Promise<{ ackno
return { acknowledged };
}
export async function seedFolderMtimes(
folderPath: string,
mtimes: Record<string, number>,
): Promise<void> {
const ms = await getMtimeStore();
const existing: MtimeMap = ms.get(folderPath) ?? {};
const merged = { ...existing, ...mtimes };
mtimeMaps.set(folderPath, merged);
ms.set(folderPath, merged);
}
export async function pauseWatcher(): Promise<void> {
for (const [, entry] of watchers) {
if (entry.watcher) {

View file

@ -64,6 +64,9 @@ contextBridge.exposeInMainWorld('electronAPI', {
signalRendererReady: () => ipcRenderer.invoke(IPC_CHANNELS.FOLDER_SYNC_RENDERER_READY),
getPendingFileEvents: () => ipcRenderer.invoke(IPC_CHANNELS.FOLDER_SYNC_GET_PENDING_EVENTS),
acknowledgeFileEvents: (eventIds: string[]) => ipcRenderer.invoke(IPC_CHANNELS.FOLDER_SYNC_ACK_EVENTS, eventIds),
listFolderFiles: (config: any) => ipcRenderer.invoke(IPC_CHANNELS.FOLDER_SYNC_LIST_FILES, config),
seedFolderMtimes: (folderPath: string, mtimes: Record<string, number>) =>
ipcRenderer.invoke(IPC_CHANNELS.FOLDER_SYNC_SEED_MTIMES, folderPath, mtimes),
// Browse files via native dialog
browseFiles: () => ipcRenderer.invoke(IPC_CHANNELS.BROWSE_FILES),

View file

@ -1,8 +1,7 @@
"use client";
import { AnimatePresence, motion } from "motion/react";
import { useRouter } from "next/navigation";
import { useSearchParams } from "next/navigation";
import { useRouter, useSearchParams } from "next/navigation";
import { useTranslations } from "next-intl";
import { Suspense, useEffect, useState } from "react";
import { toast } from "sonner";

View file

@ -46,7 +46,6 @@ import { useParams } from "next/navigation";
import { useTranslations } from "next-intl";
import React, { useCallback, useContext, useEffect, useId, useMemo, useRef, useState } from "react";
import { toast } from "sonner";
import { useDebouncedValue } from "@/hooks/use-debounced-value";
import {
createLogMutationAtom,
deleteLogMutationAtom,
@ -96,6 +95,7 @@ import {
TableRow,
} from "@/components/ui/table";
import type { CreateLogRequest, Log, UpdateLogRequest } from "@/contracts/types/log.types";
import { useDebouncedValue } from "@/hooks/use-debounced-value";
import { type LogLevel, type LogStatus, useLogs, useLogsSummary } from "@/hooks/use-logs";
import { cn } from "@/lib/utils";
@ -728,10 +728,7 @@ function LogsFilters({
<motion.div className="relative w-full sm:w-auto" variants={fadeInScale}>
<Input
ref={inputRef}
className={cn(
"peer w-full sm:min-w-60 ps-9",
Boolean(filterInput) && "pe-9"
)}
className={cn("peer w-full sm:min-w-60 ps-9", Boolean(filterInput) && "pe-9")}
value={filterInput}
onChange={(e) => setFilterInput(e.target.value)}
placeholder={t("filter_by_message")}

View file

@ -38,18 +38,29 @@ import { removeChatTabAtom, updateChatTabTitleAtom } from "@/atoms/tabs/tabs.ato
import { currentUserAtom } from "@/atoms/user/user-query.atoms";
import { ThinkingStepsDataUI } from "@/components/assistant-ui/thinking-steps";
import { Thread } from "@/components/assistant-ui/thread";
import { useChatSessionStateSync } from "@/hooks/use-chat-session-state";
import { useMessagesSync } from "@/hooks/use-messages-sync";
import Loading from "../loading";
const MobileEditorPanel = dynamic(
() => import("@/components/editor-panel/editor-panel").then((m) => ({ default: m.MobileEditorPanel })),
() =>
import("@/components/editor-panel/editor-panel").then((m) => ({
default: m.MobileEditorPanel,
})),
{ ssr: false }
);
const MobileHitlEditPanel = dynamic(
() => import("@/components/hitl-edit-panel/hitl-edit-panel").then((m) => ({ default: m.MobileHitlEditPanel })),
() =>
import("@/components/hitl-edit-panel/hitl-edit-panel").then((m) => ({
default: m.MobileHitlEditPanel,
})),
{ ssr: false }
);
const MobileReportPanel = dynamic(
() => import("@/components/report-panel/report-panel").then((m) => ({ default: m.MobileReportPanel })),
() =>
import("@/components/report-panel/report-panel").then((m) => ({
default: m.MobileReportPanel,
})),
{ ssr: false }
);

View file

@ -51,131 +51,172 @@ const IS_QUICK_ASSIST_WINDOW =
// Dynamically import tool UI components to avoid loading them in main bundle
const GenerateReportToolUI = dynamic(
() => import("@/components/tool-ui/generate-report").then(m => ({ default: m.GenerateReportToolUI })),
() =>
import("@/components/tool-ui/generate-report").then((m) => ({
default: m.GenerateReportToolUI,
})),
{ ssr: false }
);
const GeneratePodcastToolUI = dynamic(
() => import("@/components/tool-ui/generate-podcast").then(m => ({ default: m.GeneratePodcastToolUI })),
() =>
import("@/components/tool-ui/generate-podcast").then((m) => ({
default: m.GeneratePodcastToolUI,
})),
{ ssr: false }
);
const GenerateVideoPresentationToolUI = dynamic(
() => import("@/components/tool-ui/video-presentation").then(m => ({ default: m.GenerateVideoPresentationToolUI })),
() =>
import("@/components/tool-ui/video-presentation").then((m) => ({
default: m.GenerateVideoPresentationToolUI,
})),
{ ssr: false }
);
const GenerateImageToolUI = dynamic(
() => import("@/components/tool-ui/generate-image").then(m => ({ default: m.GenerateImageToolUI })),
() =>
import("@/components/tool-ui/generate-image").then((m) => ({ default: m.GenerateImageToolUI })),
{ ssr: false }
);
const SaveMemoryToolUI = dynamic(
() => import("@/components/tool-ui/user-memory").then(m => ({ default: m.SaveMemoryToolUI })),
() => import("@/components/tool-ui/user-memory").then((m) => ({ default: m.SaveMemoryToolUI })),
{ ssr: false }
);
const RecallMemoryToolUI = dynamic(
() => import("@/components/tool-ui/user-memory").then(m => ({ default: m.RecallMemoryToolUI })),
() => import("@/components/tool-ui/user-memory").then((m) => ({ default: m.RecallMemoryToolUI })),
{ ssr: false }
);
const SandboxExecuteToolUI = dynamic(
() => import("@/components/tool-ui/sandbox-execute").then(m => ({ default: m.SandboxExecuteToolUI })),
() =>
import("@/components/tool-ui/sandbox-execute").then((m) => ({
default: m.SandboxExecuteToolUI,
})),
{ ssr: false }
);
const CreateNotionPageToolUI = dynamic(
() => import("@/components/tool-ui/notion").then(m => ({ default: m.CreateNotionPageToolUI })),
() => import("@/components/tool-ui/notion").then((m) => ({ default: m.CreateNotionPageToolUI })),
{ ssr: false }
);
const UpdateNotionPageToolUI = dynamic(
() => import("@/components/tool-ui/notion").then(m => ({ default: m.UpdateNotionPageToolUI })),
() => import("@/components/tool-ui/notion").then((m) => ({ default: m.UpdateNotionPageToolUI })),
{ ssr: false }
);
const DeleteNotionPageToolUI = dynamic(
() => import("@/components/tool-ui/notion").then(m => ({ default: m.DeleteNotionPageToolUI })),
() => import("@/components/tool-ui/notion").then((m) => ({ default: m.DeleteNotionPageToolUI })),
{ ssr: false }
);
const CreateLinearIssueToolUI = dynamic(
() => import("@/components/tool-ui/linear").then(m => ({ default: m.CreateLinearIssueToolUI })),
() => import("@/components/tool-ui/linear").then((m) => ({ default: m.CreateLinearIssueToolUI })),
{ ssr: false }
);
const UpdateLinearIssueToolUI = dynamic(
() => import("@/components/tool-ui/linear").then(m => ({ default: m.UpdateLinearIssueToolUI })),
() => import("@/components/tool-ui/linear").then((m) => ({ default: m.UpdateLinearIssueToolUI })),
{ ssr: false }
);
const DeleteLinearIssueToolUI = dynamic(
() => import("@/components/tool-ui/linear").then(m => ({ default: m.DeleteLinearIssueToolUI })),
() => import("@/components/tool-ui/linear").then((m) => ({ default: m.DeleteLinearIssueToolUI })),
{ ssr: false }
);
const CreateGoogleDriveFileToolUI = dynamic(
() => import("@/components/tool-ui/google-drive").then(m => ({ default: m.CreateGoogleDriveFileToolUI })),
() =>
import("@/components/tool-ui/google-drive").then((m) => ({
default: m.CreateGoogleDriveFileToolUI,
})),
{ ssr: false }
);
const DeleteGoogleDriveFileToolUI = dynamic(
() => import("@/components/tool-ui/google-drive").then(m => ({ default: m.DeleteGoogleDriveFileToolUI })),
() =>
import("@/components/tool-ui/google-drive").then((m) => ({
default: m.DeleteGoogleDriveFileToolUI,
})),
{ ssr: false }
);
const CreateOneDriveFileToolUI = dynamic(
() => import("@/components/tool-ui/onedrive").then(m => ({ default: m.CreateOneDriveFileToolUI })),
() =>
import("@/components/tool-ui/onedrive").then((m) => ({ default: m.CreateOneDriveFileToolUI })),
{ ssr: false }
);
const DeleteOneDriveFileToolUI = dynamic(
() => import("@/components/tool-ui/onedrive").then(m => ({ default: m.DeleteOneDriveFileToolUI })),
() =>
import("@/components/tool-ui/onedrive").then((m) => ({ default: m.DeleteOneDriveFileToolUI })),
{ ssr: false }
);
const CreateDropboxFileToolUI = dynamic(
() => import("@/components/tool-ui/dropbox").then(m => ({ default: m.CreateDropboxFileToolUI })),
() =>
import("@/components/tool-ui/dropbox").then((m) => ({ default: m.CreateDropboxFileToolUI })),
{ ssr: false }
);
const DeleteDropboxFileToolUI = dynamic(
() => import("@/components/tool-ui/dropbox").then(m => ({ default: m.DeleteDropboxFileToolUI })),
() =>
import("@/components/tool-ui/dropbox").then((m) => ({ default: m.DeleteDropboxFileToolUI })),
{ ssr: false }
);
const CreateCalendarEventToolUI = dynamic(
() => import("@/components/tool-ui/google-calendar").then(m => ({ default: m.CreateCalendarEventToolUI })),
() =>
import("@/components/tool-ui/google-calendar").then((m) => ({
default: m.CreateCalendarEventToolUI,
})),
{ ssr: false }
);
const UpdateCalendarEventToolUI = dynamic(
() => import("@/components/tool-ui/google-calendar").then(m => ({ default: m.UpdateCalendarEventToolUI })),
() =>
import("@/components/tool-ui/google-calendar").then((m) => ({
default: m.UpdateCalendarEventToolUI,
})),
{ ssr: false }
);
const DeleteCalendarEventToolUI = dynamic(
() => import("@/components/tool-ui/google-calendar").then(m => ({ default: m.DeleteCalendarEventToolUI })),
() =>
import("@/components/tool-ui/google-calendar").then((m) => ({
default: m.DeleteCalendarEventToolUI,
})),
{ ssr: false }
);
const CreateGmailDraftToolUI = dynamic(
() => import("@/components/tool-ui/gmail").then(m => ({ default: m.CreateGmailDraftToolUI })),
() => import("@/components/tool-ui/gmail").then((m) => ({ default: m.CreateGmailDraftToolUI })),
{ ssr: false }
);
const UpdateGmailDraftToolUI = dynamic(
() => import("@/components/tool-ui/gmail").then(m => ({ default: m.UpdateGmailDraftToolUI })),
() => import("@/components/tool-ui/gmail").then((m) => ({ default: m.UpdateGmailDraftToolUI })),
{ ssr: false }
);
const SendGmailEmailToolUI = dynamic(
() => import("@/components/tool-ui/gmail").then(m => ({ default: m.SendGmailEmailToolUI })),
() => import("@/components/tool-ui/gmail").then((m) => ({ default: m.SendGmailEmailToolUI })),
{ ssr: false }
);
const TrashGmailEmailToolUI = dynamic(
() => import("@/components/tool-ui/gmail").then(m => ({ default: m.TrashGmailEmailToolUI })),
() => import("@/components/tool-ui/gmail").then((m) => ({ default: m.TrashGmailEmailToolUI })),
{ ssr: false }
);
const CreateJiraIssueToolUI = dynamic(
() => import("@/components/tool-ui/jira").then(m => ({ default: m.CreateJiraIssueToolUI })),
() => import("@/components/tool-ui/jira").then((m) => ({ default: m.CreateJiraIssueToolUI })),
{ ssr: false }
);
const UpdateJiraIssueToolUI = dynamic(
() => import("@/components/tool-ui/jira").then(m => ({ default: m.UpdateJiraIssueToolUI })),
() => import("@/components/tool-ui/jira").then((m) => ({ default: m.UpdateJiraIssueToolUI })),
{ ssr: false }
);
const DeleteJiraIssueToolUI = dynamic(
() => import("@/components/tool-ui/jira").then(m => ({ default: m.DeleteJiraIssueToolUI })),
() => import("@/components/tool-ui/jira").then((m) => ({ default: m.DeleteJiraIssueToolUI })),
{ ssr: false }
);
const CreateConfluencePageToolUI = dynamic(
() => import("@/components/tool-ui/confluence").then(m => ({ default: m.CreateConfluencePageToolUI })),
() =>
import("@/components/tool-ui/confluence").then((m) => ({
default: m.CreateConfluencePageToolUI,
})),
{ ssr: false }
);
const UpdateConfluencePageToolUI = dynamic(
() => import("@/components/tool-ui/confluence").then(m => ({ default: m.UpdateConfluencePageToolUI })),
() =>
import("@/components/tool-ui/confluence").then((m) => ({
default: m.UpdateConfluencePageToolUI,
})),
{ ssr: false }
);
const DeleteConfluencePageToolUI = dynamic(
() => import("@/components/tool-ui/confluence").then(m => ({ default: m.DeleteConfluencePageToolUI })),
() =>
import("@/components/tool-ui/confluence").then((m) => ({
default: m.DeleteConfluencePageToolUI,
})),
{ ssr: false }
);

View file

@ -25,16 +25,38 @@ export interface ConnectFormProps {
export type ConnectFormComponent = FC<ConnectFormProps>;
const formMap: Record<string, () => Promise<{ default: FC<ConnectFormProps> }>> = {
TAVILY_API: () => import("./components/tavily-api-connect-form").then(m => ({ default: m.TavilyApiConnectForm })),
LINKUP_API: () => import("./components/linkup-api-connect-form").then(m => ({ default: m.LinkupApiConnectForm })),
BAIDU_SEARCH_API: () => import("./components/baidu-search-api-connect-form").then(m => ({ default: m.BaiduSearchApiConnectForm })),
ELASTICSEARCH_CONNECTOR: () => import("./components/elasticsearch-connect-form").then(m => ({ default: m.ElasticsearchConnectForm })),
BOOKSTACK_CONNECTOR: () => import("./components/bookstack-connect-form").then(m => ({ default: m.BookStackConnectForm })),
GITHUB_CONNECTOR: () => import("./components/github-connect-form").then(m => ({ default: m.GithubConnectForm })),
LUMA_CONNECTOR: () => import("./components/luma-connect-form").then(m => ({ default: m.LumaConnectForm })),
CIRCLEBACK_CONNECTOR: () => import("./components/circleback-connect-form").then(m => ({ default: m.CirclebackConnectForm })),
MCP_CONNECTOR: () => import("./components/mcp-connect-form").then(m => ({ default: m.MCPConnectForm })),
OBSIDIAN_CONNECTOR: () => import("./components/obsidian-connect-form").then(m => ({ default: m.ObsidianConnectForm })),
TAVILY_API: () =>
import("./components/tavily-api-connect-form").then((m) => ({
default: m.TavilyApiConnectForm,
})),
LINKUP_API: () =>
import("./components/linkup-api-connect-form").then((m) => ({
default: m.LinkupApiConnectForm,
})),
BAIDU_SEARCH_API: () =>
import("./components/baidu-search-api-connect-form").then((m) => ({
default: m.BaiduSearchApiConnectForm,
})),
ELASTICSEARCH_CONNECTOR: () =>
import("./components/elasticsearch-connect-form").then((m) => ({
default: m.ElasticsearchConnectForm,
})),
BOOKSTACK_CONNECTOR: () =>
import("./components/bookstack-connect-form").then((m) => ({
default: m.BookStackConnectForm,
})),
GITHUB_CONNECTOR: () =>
import("./components/github-connect-form").then((m) => ({ default: m.GithubConnectForm })),
LUMA_CONNECTOR: () =>
import("./components/luma-connect-form").then((m) => ({ default: m.LumaConnectForm })),
CIRCLEBACK_CONNECTOR: () =>
import("./components/circleback-connect-form").then((m) => ({
default: m.CirclebackConnectForm,
})),
MCP_CONNECTOR: () =>
import("./components/mcp-connect-form").then((m) => ({ default: m.MCPConnectForm })),
OBSIDIAN_CONNECTOR: () =>
import("./components/obsidian-connect-form").then((m) => ({ default: m.ObsidianConnectForm })),
};
const componentCache = new Map<string, ConnectFormComponent>();

View file

@ -14,29 +14,53 @@ export interface ConnectorConfigProps {
export type ConnectorConfigComponent = FC<ConnectorConfigProps>;
const configMap: Record<string, () => Promise<{ default: FC<ConnectorConfigProps> }>> = {
GOOGLE_DRIVE_CONNECTOR: () => import("./components/google-drive-config").then(m => ({ default: m.GoogleDriveConfig })),
TAVILY_API: () => import("./components/tavily-api-config").then(m => ({ default: m.TavilyApiConfig })),
LINKUP_API: () => import("./components/linkup-api-config").then(m => ({ default: m.LinkupApiConfig })),
BAIDU_SEARCH_API: () => import("./components/baidu-search-api-config").then(m => ({ default: m.BaiduSearchApiConfig })),
WEBCRAWLER_CONNECTOR: () => import("./components/webcrawler-config").then(m => ({ default: m.WebcrawlerConfig })),
ELASTICSEARCH_CONNECTOR: () => import("./components/elasticsearch-config").then(m => ({ default: m.ElasticsearchConfig })),
SLACK_CONNECTOR: () => import("./components/slack-config").then(m => ({ default: m.SlackConfig })),
DISCORD_CONNECTOR: () => import("./components/discord-config").then(m => ({ default: m.DiscordConfig })),
TEAMS_CONNECTOR: () => import("./components/teams-config").then(m => ({ default: m.TeamsConfig })),
DROPBOX_CONNECTOR: () => import("./components/dropbox-config").then(m => ({ default: m.DropboxConfig })),
ONEDRIVE_CONNECTOR: () => import("./components/onedrive-config").then(m => ({ default: m.OneDriveConfig })),
CONFLUENCE_CONNECTOR: () => import("./components/confluence-config").then(m => ({ default: m.ConfluenceConfig })),
BOOKSTACK_CONNECTOR: () => import("./components/bookstack-config").then(m => ({ default: m.BookStackConfig })),
GITHUB_CONNECTOR: () => import("./components/github-config").then(m => ({ default: m.GithubConfig })),
JIRA_CONNECTOR: () => import("./components/jira-config").then(m => ({ default: m.JiraConfig })),
CLICKUP_CONNECTOR: () => import("./components/clickup-config").then(m => ({ default: m.ClickUpConfig })),
LUMA_CONNECTOR: () => import("./components/luma-config").then(m => ({ default: m.LumaConfig })),
CIRCLEBACK_CONNECTOR: () => import("./components/circleback-config").then(m => ({ default: m.CirclebackConfig })),
MCP_CONNECTOR: () => import("./components/mcp-config").then(m => ({ default: m.MCPConfig })),
OBSIDIAN_CONNECTOR: () => import("./components/obsidian-config").then(m => ({ default: m.ObsidianConfig })),
COMPOSIO_GOOGLE_DRIVE_CONNECTOR: () => import("./components/composio-drive-config").then(m => ({ default: m.ComposioDriveConfig })),
COMPOSIO_GMAIL_CONNECTOR: () => import("./components/composio-gmail-config").then(m => ({ default: m.ComposioGmailConfig })),
COMPOSIO_GOOGLE_CALENDAR_CONNECTOR: () => import("./components/composio-calendar-config").then(m => ({ default: m.ComposioCalendarConfig })),
GOOGLE_DRIVE_CONNECTOR: () =>
import("./components/google-drive-config").then((m) => ({ default: m.GoogleDriveConfig })),
TAVILY_API: () =>
import("./components/tavily-api-config").then((m) => ({ default: m.TavilyApiConfig })),
LINKUP_API: () =>
import("./components/linkup-api-config").then((m) => ({ default: m.LinkupApiConfig })),
BAIDU_SEARCH_API: () =>
import("./components/baidu-search-api-config").then((m) => ({
default: m.BaiduSearchApiConfig,
})),
WEBCRAWLER_CONNECTOR: () =>
import("./components/webcrawler-config").then((m) => ({ default: m.WebcrawlerConfig })),
ELASTICSEARCH_CONNECTOR: () =>
import("./components/elasticsearch-config").then((m) => ({ default: m.ElasticsearchConfig })),
SLACK_CONNECTOR: () =>
import("./components/slack-config").then((m) => ({ default: m.SlackConfig })),
DISCORD_CONNECTOR: () =>
import("./components/discord-config").then((m) => ({ default: m.DiscordConfig })),
TEAMS_CONNECTOR: () =>
import("./components/teams-config").then((m) => ({ default: m.TeamsConfig })),
DROPBOX_CONNECTOR: () =>
import("./components/dropbox-config").then((m) => ({ default: m.DropboxConfig })),
ONEDRIVE_CONNECTOR: () =>
import("./components/onedrive-config").then((m) => ({ default: m.OneDriveConfig })),
CONFLUENCE_CONNECTOR: () =>
import("./components/confluence-config").then((m) => ({ default: m.ConfluenceConfig })),
BOOKSTACK_CONNECTOR: () =>
import("./components/bookstack-config").then((m) => ({ default: m.BookStackConfig })),
GITHUB_CONNECTOR: () =>
import("./components/github-config").then((m) => ({ default: m.GithubConfig })),
JIRA_CONNECTOR: () => import("./components/jira-config").then((m) => ({ default: m.JiraConfig })),
CLICKUP_CONNECTOR: () =>
import("./components/clickup-config").then((m) => ({ default: m.ClickUpConfig })),
LUMA_CONNECTOR: () => import("./components/luma-config").then((m) => ({ default: m.LumaConfig })),
CIRCLEBACK_CONNECTOR: () =>
import("./components/circleback-config").then((m) => ({ default: m.CirclebackConfig })),
MCP_CONNECTOR: () => import("./components/mcp-config").then((m) => ({ default: m.MCPConfig })),
OBSIDIAN_CONNECTOR: () =>
import("./components/obsidian-config").then((m) => ({ default: m.ObsidianConfig })),
COMPOSIO_GOOGLE_DRIVE_CONNECTOR: () =>
import("./components/composio-drive-config").then((m) => ({ default: m.ComposioDriveConfig })),
COMPOSIO_GMAIL_CONNECTOR: () =>
import("./components/composio-gmail-config").then((m) => ({ default: m.ComposioGmailConfig })),
COMPOSIO_GOOGLE_CALENDAR_CONNECTOR: () =>
import("./components/composio-calendar-config").then((m) => ({
default: m.ComposioCalendarConfig,
})),
};
const componentCache = new Map<string, ConnectorConfigComponent>();

View file

@ -302,12 +302,12 @@ export const AllConnectorsTab: FC<AllConnectorsTabProps> = ({
return (
<div className="space-y-8">
{/* Document/Files Connectors */}
{/* File Storage Integrations */}
{hasDocumentFileConnectors && (
<section>
<div className="flex items-center gap-2 mb-4">
<h3 className="text-sm font-semibold text-muted-foreground">
Document/Files Connectors
File Storage Integrations
</h3>
</div>
<div className="grid grid-cols-1 sm:grid-cols-2 gap-3">

View file

@ -20,7 +20,13 @@ import { searchSpaceSettingsDialogAtom } from "@/atoms/settings/settings-dialog.
import { DocumentUploadTab } from "@/components/sources/DocumentUploadTab";
import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert";
import { Button } from "@/components/ui/button";
import { Dialog, DialogContent, DialogTitle } from "@/components/ui/dialog";
import {
Dialog,
DialogContent,
DialogDescription,
DialogHeader,
DialogTitle,
} from "@/components/ui/dialog";
// Context for opening the dialog from anywhere
interface DocumentUploadDialogContextType {
@ -127,17 +133,15 @@ const DocumentUploadPopupContent: FC<{
onEscapeKeyDown={(e) => e.preventDefault()}
className="select-none max-w-2xl w-[95vw] sm:w-[640px] h-[min(440px,75dvh)] sm:h-[min(520px,80vh)] flex flex-col p-0 gap-0 overflow-hidden border border-border ring-0 bg-muted dark:bg-muted text-foreground [&>button]:right-3 sm:[&>button]:right-6 [&>button]:top-5 sm:[&>button]:top-8 [&>button]:opacity-80 [&>button]:hover:opacity-100 [&>button]:hover:bg-foreground/10 [&>button]:z-[100] [&>button>svg]:size-4 sm:[&>button>svg]:size-5"
>
<DialogTitle className="sr-only">Upload Document</DialogTitle>
<div className="flex-1 min-h-0 overflow-y-auto overscroll-contain">
<div className="sticky top-0 z-20 bg-muted px-4 sm:px-6 pt-6 sm:pt-8 pb-10">
<div className="flex items-center gap-2 mb-1 pr-8 sm:pr-0">
<h2 className="text-xl sm:text-3xl font-semibold tracking-tight">Upload Documents</h2>
</div>
<p className="text-xs sm:text-base text-muted-foreground/80 line-clamp-1">
<DialogHeader className="sticky top-0 z-20 bg-muted px-4 sm:px-6 pt-6 sm:pt-8 pb-10">
<DialogTitle className="text-xl sm:text-3xl font-semibold tracking-tight pr-8 sm:pr-0">
Upload Documents
</DialogTitle>
<DialogDescription className="text-xs sm:text-base text-muted-foreground/80 line-clamp-1">
Upload and sync your documents to your search space
</p>
</div>
</DialogDescription>
</DialogHeader>
<div className="px-4 sm:px-6 pb-4 sm:pb-6">
{!isLoading && !hasDocumentSummaryLLM ? (

View file

@ -1,7 +1,7 @@
"use client";
import { Slottable } from "@radix-ui/react-slot";
import { type ComponentPropsWithRef, forwardRef, type ReactNode } from "react";
import { type ComponentPropsWithRef, forwardRef, type ReactNode, useState } from "react";
import { Button } from "@/components/ui/button";
import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip";
import { useMediaQuery } from "@/hooks/use-media-query";
@ -17,9 +17,13 @@ export const TooltipIconButton = forwardRef<HTMLButtonElement, TooltipIconButton
({ children, tooltip, side = "bottom", className, disableTooltip, ...rest }, ref) => {
const isTouchDevice = useMediaQuery("(pointer: coarse)");
const suppressTooltip = disableTooltip || isTouchDevice;
const [tooltipOpen, setTooltipOpen] = useState(false);
return (
<Tooltip open={suppressTooltip ? false : undefined}>
<Tooltip
open={suppressTooltip ? false : tooltipOpen}
onOpenChange={suppressTooltip ? undefined : setTooltipOpen}
>
<TooltipTrigger asChild>
<Button
variant="ghost"

View file

@ -49,6 +49,7 @@ export interface FolderDisplay {
position: string;
parentId: number | null;
searchSpaceId: number;
metadata?: Record<string, unknown> | null;
}
interface FolderNodeProps {
@ -354,7 +355,7 @@ export const FolderNode = React.memo(function FolderNode({
className="hidden sm:inline-flex h-6 w-6 shrink-0 opacity-0 group-hover:opacity-100 transition-opacity"
onClick={(e) => e.stopPropagation()}
>
<MoreHorizontal className="h-3.5 w-3.5" />
<MoreHorizontal className="h-3.5 w-3.5 text-muted-foreground" />
</Button>
</DropdownMenuTrigger>
<DropdownMenuContent align="end" className="w-40">

View file

@ -168,6 +168,12 @@ export function FolderTreeView({
return states;
}, [folders, docsByFolder, foldersByParent, mentionedDocIds]);
const folderMap = useMemo(() => {
const map: Record<number, FolderDisplay> = {};
for (const f of folders) map[f.id] = f;
return map;
}, [folders]);
const folderProcessingStates = useMemo(() => {
const states: Record<number, "idle" | "processing" | "failed"> = {};
@ -178,6 +184,11 @@ export function FolderTreeView({
);
let hasFailed = directDocs.some((d) => d.status?.state === "failed");
const folder = folderMap[folderId];
if (folder?.metadata?.indexing_in_progress) {
hasProcessing = true;
}
for (const child of foldersByParent[folderId] ?? []) {
const sub = compute(child.id);
hasProcessing = hasProcessing || sub.hasProcessing;
@ -195,7 +206,7 @@ export function FolderTreeView({
if (states[f.id] === undefined) compute(f.id);
}
return states;
}, [folders, docsByFolder, foldersByParent]);
}, [folders, docsByFolder, foldersByParent, folderMap]);
function renderLevel(parentId: number | null, depth: number): React.ReactNode[] {
const key = parentId ?? "root";
@ -283,7 +294,7 @@ export function FolderTreeView({
if (treeNodes.length === 0 && folders.length === 0 && documents.length === 0) {
return (
<div className="flex flex-1 flex-col items-center justify-center gap-1 px-4 py-12 text-muted-foreground">
<div className="flex flex-1 flex-col items-center justify-center gap-1 px-4 py-12 text-muted-foreground select-none">
<p className="text-sm font-medium">No documents found</p>
<p className="text-xs text-muted-foreground/70">
Use the upload button or connect a source above

View file

@ -59,13 +59,15 @@ const TAB_ITEMS = [
},
{
title: "Extreme Assist",
description: "Get inline writing suggestions powered by your knowledge base as you type in any app.",
description:
"Get inline writing suggestions powered by your knowledge base as you type in any app.",
src: "/homepage/hero_tutorial/extreme_assist.mp4",
featured: true,
},
{
title: "Watch Local Folder",
description: "Watch a local folder and automatically sync file changes to your knowledge base. Works great with Obsidian vaults.",
description:
"Watch a local folder and automatically sync file changes to your knowledge base. Works great with Obsidian vaults.",
src: "/homepage/hero_tutorial/folder_watch.mp4",
featured: true,
},
@ -84,7 +86,8 @@ const TAB_ITEMS = [
// },
{
title: "Video & Presentations",
description: "Create short videos and editable presentations with AI-generated visuals and narration from your sources.",
description:
"Create short videos and editable presentations with AI-generated visuals and narration from your sources.",
src: "/homepage/hero_tutorial/video_gen_surf.mp4",
featured: false,
},
@ -343,7 +346,12 @@ function DownloadButton() {
</DropdownMenuItem>
))}
<DropdownMenuItem asChild>
<a href={fallbackUrl} target="_blank" rel="noopener noreferrer" className="cursor-pointer">
<a
href={fallbackUrl}
target="_blank"
rel="noopener noreferrer"
className="cursor-pointer"
>
All downloads
</a>
</DropdownMenuItem>
@ -498,4 +506,3 @@ const TabVideo = memo(function TabVideo({ src }: { src: string }) {
});
const GITHUB_RELEASES_URL = "https://github.com/MODSetter/SurfSense/releases/latest";

View file

@ -144,13 +144,13 @@ const MobileNav = ({ navItems, isScrolled, scrolledBgClassName }: any) => {
ref={navRef}
animate={{ borderRadius: open ? "4px" : "2rem" }}
key={String(open)}
className={cn(
"relative mx-auto flex w-full max-w-[calc(100vw-2rem)] flex-col items-center justify-between px-4 py-2 lg:hidden transition-[background-color,border-color,box-shadow] duration-300",
isScrolled
? (scrolledBgClassName ??
"bg-white/80 backdrop-blur-md border border-white/20 shadow-lg dark:bg-neutral-950/80 dark:border-neutral-800/50")
: "bg-transparent border border-transparent"
)}
className={cn(
"relative mx-auto flex w-full max-w-[calc(100vw-2rem)] flex-col items-center justify-between px-4 py-2 lg:hidden transition-[background-color,border-color,box-shadow] duration-300",
isScrolled
? (scrolledBgClassName ??
"bg-white/80 backdrop-blur-md border border-white/20 shadow-lg dark:bg-neutral-950/80 dark:border-neutral-800/50")
: "bg-transparent border border-transparent"
)}
className={cn(
"relative mx-auto flex w-full max-w-[calc(100vw-2rem)] flex-col items-center justify-between px-4 py-2 lg:hidden transition-all duration-300",
isScrolled

View file

@ -1,9 +1,9 @@
"use client";
import { useRef, useState } from "react";
import { motion, useInView } from "motion/react";
import { IconPointerFilled } from "@tabler/icons-react";
import { Check, X } from "lucide-react";
import { motion, useInView } from "motion/react";
import { useRef, useState } from "react";
import { Badge } from "@/components/ui/badge";
import { Separator } from "@/components/ui/separator";
import { cn } from "@/lib/utils";
@ -40,8 +40,8 @@ export function WhySurfSense() {
Everything NotebookLM should have been
</h2>
<p className="mx-auto mt-4 max-w-2xl text-base text-muted-foreground">
Open source. No data limits. No vendor lock-in. Built for teams that
care about privacy and flexibility.
Open source. No data limits. No vendor lock-in. Built for teams that care about privacy
and flexibility.
</p>
</div>
@ -68,10 +68,7 @@ function UnlimitedSkeleton({ className }: { className?: string }) {
];
return (
<div
ref={ref}
className={cn("flex h-full flex-col justify-center gap-2.5", className)}
>
<div ref={ref} className={cn("flex h-full flex-col justify-center gap-2.5", className)}>
{items.map((item, index) => (
<motion.div
key={item.label}
@ -81,9 +78,7 @@ function UnlimitedSkeleton({ className }: { className?: string }) {
className="flex items-center gap-2 rounded-lg bg-background px-3 py-2 shadow-sm ring-1 ring-border"
>
<span className="text-sm">{item.icon}</span>
<span className="min-w-[60px] text-xs font-medium text-foreground">
{item.label}
</span>
<span className="min-w-[60px] text-xs font-medium text-foreground">{item.label}</span>
<div className="ml-auto flex items-center gap-2">
<span className="text-[10px] text-muted-foreground line-through">
{item.notebookLm}
@ -125,10 +120,7 @@ function LLMFlexibilitySkeleton({ className }: { className?: string }) {
return (
<div
ref={ref}
className={cn(
"flex h-full flex-col items-center justify-center gap-3",
className,
)}
className={cn("flex h-full flex-col items-center justify-center gap-3", className)}
>
<motion.div
initial={{ opacity: 0, y: 8 }}
@ -146,19 +138,13 @@ function LLMFlexibilitySkeleton({ className }: { className?: string }) {
transition={{ duration: 0.3, delay: 0.1 + index * 0.1 }}
className={cn(
"flex w-full cursor-pointer items-center gap-2 rounded-lg px-2.5 py-1.5 text-left transition-all",
selected === index
? "bg-background shadow-sm ring-1 ring-border"
: "hover:bg-accent",
selected === index ? "bg-background shadow-sm ring-1 ring-border" : "hover:bg-accent"
)}
>
<div className={cn("size-2 shrink-0 rounded-full", model.color)} />
<div className="min-w-0">
<p className="truncate text-xs font-medium text-foreground">
{model.name}
</p>
<p className="text-[10px] text-muted-foreground">
{model.provider}
</p>
<p className="truncate text-xs font-medium text-foreground">{model.name}</p>
<p className="text-[10px] text-muted-foreground">{model.provider}</p>
</div>
{selected === index && (
<motion.div
@ -220,10 +206,7 @@ function MultiplayerSkeleton({ className }: { className?: string }) {
return (
<div
ref={ref}
className={cn(
"relative flex h-full items-center justify-center overflow-visible",
className,
)}
className={cn("relative flex h-full items-center justify-center overflow-visible", className)}
>
<motion.div
className="relative w-full max-w-[160px] rounded-lg bg-background p-3 shadow-sm ring-1 ring-border"
@ -246,10 +229,7 @@ function MultiplayerSkeleton({ className }: { className?: string }) {
className="my-1.5 flex items-center"
style={{ paddingLeft: line.indent * 8 }}
>
<div
className={cn("h-1.5 rounded-full", line.color)}
style={{ width: line.width }}
/>
<div className={cn("h-1.5 rounded-full", line.color)} style={{ width: line.width }} />
</div>
))}
</motion.div>
@ -295,9 +275,7 @@ function MultiplayerSkeleton({ className }: { className?: string }) {
<div className="flex size-5 items-center justify-center rounded-full bg-white/20 text-[9px] font-bold text-white">
{collaborator.name[0]}
</div>
<span className="shrink-0 text-[10px] font-medium text-white">
{collaborator.name}
</span>
<span className="shrink-0 text-[10px] font-medium text-white">{collaborator.name}</span>
<span className="rounded bg-white/20 px-1 py-px text-[8px] text-white/80">
{collaborator.role}
</span>
@ -321,9 +299,7 @@ function FeatureCard({
<div className="flex h-full flex-col justify-between bg-card p-10 first:rounded-l-2xl last:rounded-r-2xl">
<div className="h-60 w-full overflow-visible rounded-md">{skeleton}</div>
<div className="mt-4">
<h3 className="text-base font-bold tracking-tight text-card-foreground">
{title}
</h3>
<h3 className="text-base font-bold tracking-tight text-card-foreground">{title}</h3>
<p className="mt-2 text-sm leading-relaxed tracking-tight text-muted-foreground">
{description}
</p>
@ -408,9 +384,7 @@ function ComparisonStrip() {
transition={{ duration: 0.3, delay: 0.15 + index * 0.06 }}
>
<div className="grid grid-cols-3 items-center px-4 py-2.5 text-sm sm:px-6">
<span className="font-medium text-card-foreground">
{row.feature}
</span>
<span className="font-medium text-card-foreground">{row.feature}</span>
<span className="flex justify-center">
{typeof row.notebookLm === "boolean" ? (
row.notebookLm ? (
@ -419,9 +393,7 @@ function ComparisonStrip() {
<X className="size-4 text-muted-foreground/40" />
)
) : (
<span className="text-muted-foreground">
{row.notebookLm}
</span>
<span className="text-muted-foreground">{row.notebookLm}</span>
)}
</span>
<span className="flex justify-center">
@ -436,9 +408,7 @@ function ComparisonStrip() {
)}
</span>
</div>
{index !== comparisonRows.length - 1 && (
<Separator />
)}
{index !== comparisonRows.length - 1 && <Separator />}
</motion.div>
))}
</motion.div>

View file

@ -152,16 +152,10 @@ export function CreateSearchSpaceDialog({ open, onOpenChange }: CreateSearchSpac
<Button
type="submit"
disabled={isSubmitting}
className="h-8 sm:h-9 text-xs sm:text-sm"
className="h-8 sm:h-9 text-xs sm:text-sm relative"
>
{isSubmitting ? (
<>
<Spinner size="sm" className="mr-1.5" />
{t("creating")}
</>
) : (
<>{t("create_button")}</>
)}
<span className={isSubmitting ? "opacity-0" : ""}>{t("create_button")}</span>
{isSubmitting && <Spinner size="sm" className="absolute" />}
</Button>
</DialogFooter>
</form>

View file

@ -23,7 +23,11 @@ import { FolderPickerDialog } from "@/components/documents/FolderPickerDialog";
import { FolderTreeView } from "@/components/documents/FolderTreeView";
import { VersionHistoryDialog } from "@/components/documents/version-history";
import { EXPORT_FILE_EXTENSIONS } from "@/components/shared/ExportMenuItems";
import { FolderWatchDialog, type SelectedFolder } from "@/components/sources/FolderWatchDialog";
import {
DEFAULT_EXCLUDE_PATTERNS,
FolderWatchDialog,
type SelectedFolder,
} from "@/components/sources/FolderWatchDialog";
import {
AlertDialog,
AlertDialogAction,
@ -46,6 +50,8 @@ import { useElectronAPI } from "@/hooks/use-platform";
import { documentsApiService } from "@/lib/apis/documents-api.service";
import { foldersApiService } from "@/lib/apis/folders-api.service";
import { authenticatedFetch } from "@/lib/auth-utils";
import { uploadFolderScan } from "@/lib/folder-sync-upload";
import { getSupportedExtensionsSet } from "@/lib/supported-extensions";
import { queries } from "@/zero/queries/index";
import { SidebarSlideOutPanel } from "./SidebarSlideOutPanel";
@ -114,48 +120,48 @@ export function DocumentsSidebar({
setFolderWatchOpen(true);
}, []);
useEffect(() => {
const refreshWatchedIds = useCallback(async () => {
if (!electronAPI?.getWatchedFolders) return;
const api = electronAPI;
async function loadWatchedIds() {
const folders = await api.getWatchedFolders();
const folders = await api.getWatchedFolders();
if (folders.length === 0) {
try {
const backendFolders = await documentsApiService.getWatchedFolders(searchSpaceId);
for (const bf of backendFolders) {
const meta = bf.metadata as Record<string, unknown> | null;
if (!meta?.watched || !meta.folder_path) continue;
await api.addWatchedFolder({
path: meta.folder_path as string,
name: bf.name,
rootFolderId: bf.id,
searchSpaceId: bf.search_space_id,
excludePatterns: (meta.exclude_patterns as string[]) ?? [],
fileExtensions: (meta.file_extensions as string[] | null) ?? null,
active: true,
});
}
const recovered = await api.getWatchedFolders();
const ids = new Set(
recovered.filter((f) => f.rootFolderId != null).map((f) => f.rootFolderId as number)
);
setWatchedFolderIds(ids);
return;
} catch (err) {
console.error("[DocumentsSidebar] Recovery from backend failed:", err);
if (folders.length === 0) {
try {
const backendFolders = await documentsApiService.getWatchedFolders(searchSpaceId);
for (const bf of backendFolders) {
const meta = bf.metadata as Record<string, unknown> | null;
if (!meta?.watched || !meta.folder_path) continue;
await api.addWatchedFolder({
path: meta.folder_path as string,
name: bf.name,
rootFolderId: bf.id,
searchSpaceId: bf.search_space_id,
excludePatterns: (meta.exclude_patterns as string[]) ?? [],
fileExtensions: (meta.file_extensions as string[] | null) ?? null,
active: true,
});
}
const recovered = await api.getWatchedFolders();
const ids = new Set(
recovered.filter((f) => f.rootFolderId != null).map((f) => f.rootFolderId as number)
);
setWatchedFolderIds(ids);
return;
} catch (err) {
console.error("[DocumentsSidebar] Recovery from backend failed:", err);
}
const ids = new Set(
folders.filter((f) => f.rootFolderId != null).map((f) => f.rootFolderId as number)
);
setWatchedFolderIds(ids);
}
loadWatchedIds();
const ids = new Set(
folders.filter((f) => f.rootFolderId != null).map((f) => f.rootFolderId as number)
);
setWatchedFolderIds(ids);
}, [searchSpaceId, electronAPI]);
useEffect(() => {
refreshWatchedIds();
}, [refreshWatchedIds]);
const { mutateAsync: deleteDocumentMutation } = useAtomValue(deleteDocumentMutationAtom);
const [sidebarDocs, setSidebarDocs] = useAtom(sidebarSelectedDocumentsAtom);
@ -192,6 +198,7 @@ export function DocumentsSidebar({
position: f.position,
parentId: f.parentId ?? null,
searchSpaceId: f.searchSpaceId,
metadata: f.metadata as Record<string, unknown> | null | undefined,
})),
[zeroFolders]
);
@ -304,14 +311,17 @@ export function DocumentsSidebar({
}
try {
await documentsApiService.folderIndex(searchSpaceId, {
folder_path: matched.path,
folder_name: matched.name,
search_space_id: searchSpaceId,
root_folder_id: folder.id,
file_extensions: matched.fileExtensions ?? undefined,
toast.info(`Re-scanning folder: ${matched.name}`);
await uploadFolderScan({
folderPath: matched.path,
folderName: matched.name,
searchSpaceId,
excludePatterns: matched.excludePatterns ?? DEFAULT_EXCLUDE_PATTERNS,
fileExtensions: matched.fileExtensions ?? Array.from(getSupportedExtensionsSet()),
enableSummary: false,
rootFolderId: folder.id,
});
toast.success(`Re-scanning folder: ${matched.name}`);
toast.success(`Re-scan complete: ${matched.name}`);
} catch (err) {
toast.error((err as Error)?.message || "Failed to re-scan folder");
}
@ -337,8 +347,9 @@ export function DocumentsSidebar({
console.error("[DocumentsSidebar] Failed to clear watched metadata:", err);
}
toast.success(`Stopped watching: ${matched.name}`);
refreshWatchedIds();
},
[electronAPI]
[electronAPI, refreshWatchedIds]
);
const handleRenameFolder = useCallback(async (folder: FolderDisplay, newName: string) => {
@ -867,6 +878,7 @@ export function DocumentsSidebar({
}}
searchSpaceId={searchSpaceId}
initialFolder={watchInitialFolder}
onSuccess={refreshWatchedIds}
/>
)}

View file

@ -91,13 +91,12 @@ export function SidebarSlideOutPanel({
{/* Panel extending from sidebar's right edge, flush with the wrapper border */}
<motion.div
style={{ width }}
initial={{ x: -width }}
animate={{ x: 0 }}
exit={{ x: -width }}
initial={{ width: 0 }}
animate={{ width }}
exit={{ width: 0 }}
transition={{ type: "tween", duration: 0.2, ease: [0.4, 0, 0.2, 1] }}
className="absolute z-20 overflow-hidden"
style={{ width, left: "100%", top: -1, bottom: -1 }}
style={{ left: "100%", top: -1, bottom: -1 }}
>
<div
style={{ width }}

View file

@ -20,7 +20,10 @@ import { GeneratePodcastToolUI } from "@/components/tool-ui/generate-podcast";
import { GenerateReportToolUI } from "@/components/tool-ui/generate-report";
const GenerateVideoPresentationToolUI = dynamic(
() => import("@/components/tool-ui/video-presentation").then((m) => ({ default: m.GenerateVideoPresentationToolUI })),
() =>
import("@/components/tool-ui/video-presentation").then((m) => ({
default: m.GenerateVideoPresentationToolUI,
})),
{ ssr: false }
);

View file

@ -1,43 +1,62 @@
"use client";
import dynamic from "next/dynamic";
import { useAtom } from "jotai";
import { Bot, Brain, Eye, FileText, Globe, ImageIcon, MessageSquare, Shield } from "lucide-react";
import dynamic from "next/dynamic";
import { useTranslations } from "next-intl";
import type React from "react";
import { searchSpaceSettingsDialogAtom } from "@/atoms/settings/settings-dialog.atoms";
import { SettingsDialog } from "@/components/settings/settings-dialog";
const GeneralSettingsManager = dynamic(
() => import("@/components/settings/general-settings-manager").then(m => ({ default: m.GeneralSettingsManager })),
() =>
import("@/components/settings/general-settings-manager").then((m) => ({
default: m.GeneralSettingsManager,
})),
{ ssr: false }
);
const ModelConfigManager = dynamic(
() => import("@/components/settings/model-config-manager").then(m => ({ default: m.ModelConfigManager })),
() =>
import("@/components/settings/model-config-manager").then((m) => ({
default: m.ModelConfigManager,
})),
{ ssr: false }
);
const LLMRoleManager = dynamic(
() => import("@/components/settings/llm-role-manager").then(m => ({ default: m.LLMRoleManager })),
() =>
import("@/components/settings/llm-role-manager").then((m) => ({ default: m.LLMRoleManager })),
{ ssr: false }
);
const ImageModelManager = dynamic(
() => import("@/components/settings/image-model-manager").then(m => ({ default: m.ImageModelManager })),
() =>
import("@/components/settings/image-model-manager").then((m) => ({
default: m.ImageModelManager,
})),
{ ssr: false }
);
const VisionModelManager = dynamic(
() => import("@/components/settings/vision-model-manager").then(m => ({ default: m.VisionModelManager })),
() =>
import("@/components/settings/vision-model-manager").then((m) => ({
default: m.VisionModelManager,
})),
{ ssr: false }
);
const RolesManager = dynamic(
() => import("@/components/settings/roles-manager").then(m => ({ default: m.RolesManager })),
() => import("@/components/settings/roles-manager").then((m) => ({ default: m.RolesManager })),
{ ssr: false }
);
const PromptConfigManager = dynamic(
() => import("@/components/settings/prompt-config-manager").then(m => ({ default: m.PromptConfigManager })),
() =>
import("@/components/settings/prompt-config-manager").then((m) => ({
default: m.PromptConfigManager,
})),
{ ssr: false }
);
const PublicChatSnapshotsManager = dynamic(
() => import("@/components/public-chat-snapshots/public-chat-snapshots-manager").then(m => ({ default: m.PublicChatSnapshotsManager })),
() =>
import("@/components/public-chat-snapshots/public-chat-snapshots-manager").then((m) => ({
default: m.PublicChatSnapshotsManager,
})),
{ ssr: false }
);

View file

@ -1,8 +1,8 @@
"use client";
import dynamic from "next/dynamic";
import { useAtom } from "jotai";
import { Globe, KeyRound, Monitor, Receipt, Sparkles, User } from "lucide-react";
import dynamic from "next/dynamic";
import { useTranslations } from "next-intl";
import { useMemo } from "react";
import { userSettingsDialogAtom } from "@/atoms/settings/settings-dialog.atoms";
@ -10,27 +10,45 @@ import { SettingsDialog } from "@/components/settings/settings-dialog";
import { usePlatform } from "@/hooks/use-platform";
const ProfileContent = dynamic(
() => import("@/app/dashboard/[search_space_id]/user-settings/components/ProfileContent").then(m => ({ default: m.ProfileContent })),
() =>
import("@/app/dashboard/[search_space_id]/user-settings/components/ProfileContent").then(
(m) => ({ default: m.ProfileContent })
),
{ ssr: false }
);
const ApiKeyContent = dynamic(
() => import("@/app/dashboard/[search_space_id]/user-settings/components/ApiKeyContent").then(m => ({ default: m.ApiKeyContent })),
() =>
import("@/app/dashboard/[search_space_id]/user-settings/components/ApiKeyContent").then(
(m) => ({ default: m.ApiKeyContent })
),
{ ssr: false }
);
const PromptsContent = dynamic(
() => import("@/app/dashboard/[search_space_id]/user-settings/components/PromptsContent").then(m => ({ default: m.PromptsContent })),
() =>
import("@/app/dashboard/[search_space_id]/user-settings/components/PromptsContent").then(
(m) => ({ default: m.PromptsContent })
),
{ ssr: false }
);
const CommunityPromptsContent = dynamic(
() => import("@/app/dashboard/[search_space_id]/user-settings/components/CommunityPromptsContent").then(m => ({ default: m.CommunityPromptsContent })),
() =>
import(
"@/app/dashboard/[search_space_id]/user-settings/components/CommunityPromptsContent"
).then((m) => ({ default: m.CommunityPromptsContent })),
{ ssr: false }
);
const PurchaseHistoryContent = dynamic(
() => import("@/app/dashboard/[search_space_id]/user-settings/components/PurchaseHistoryContent").then(m => ({ default: m.PurchaseHistoryContent })),
() =>
import(
"@/app/dashboard/[search_space_id]/user-settings/components/PurchaseHistoryContent"
).then((m) => ({ default: m.PurchaseHistoryContent })),
{ ssr: false }
);
const DesktopContent = dynamic(
() => import("@/app/dashboard/[search_space_id]/user-settings/components/DesktopContent").then(m => ({ default: m.DesktopContent })),
() =>
import("@/app/dashboard/[search_space_id]/user-settings/components/DesktopContent").then(
(m) => ({ default: m.DesktopContent })
),
{ ssr: false }
);

View file

@ -341,36 +341,36 @@ export function DocumentUploadTab({
</button>
)
) : (
<div
role="button"
tabIndex={0}
className="flex flex-col items-center gap-4 py-12 px-4 cursor-pointer w-full bg-transparent border-none"
onClick={() => {
if (!isElectron) fileInputRef.current?.click();
}}
onKeyDown={(e) => {
if (e.key === "Enter" || e.key === " ") {
e.preventDefault();
if (!isElectron) fileInputRef.current?.click();
}
}}
>
<Upload className="h-10 w-10 text-muted-foreground" />
<div className="text-center space-y-1.5">
<p className="text-base font-medium">
{isElectron ? "Select files or folder" : "Tap to select files or folder"}
</p>
<p className="text-sm text-muted-foreground">{t("file_size_limit")}</p>
</div>
<div
className="w-full mt-1"
onClick={(e) => e.stopPropagation()}
onKeyDown={(e) => e.stopPropagation()}
role="group"
role="button"
tabIndex={0}
className="flex flex-col items-center gap-4 py-12 px-4 cursor-pointer w-full bg-transparent border-none"
onClick={() => {
if (!isElectron) fileInputRef.current?.click();
}}
onKeyDown={(e) => {
if (e.key === "Enter" || e.key === " ") {
e.preventDefault();
if (!isElectron) fileInputRef.current?.click();
}
}}
>
{renderBrowseButton({ fullWidth: true })}
<Upload className="h-10 w-10 text-muted-foreground" />
<div className="text-center space-y-1.5">
<p className="text-base font-medium">
{isElectron ? "Select files or folder" : "Tap to select files or folder"}
</p>
<p className="text-sm text-muted-foreground">{t("file_size_limit")}</p>
</div>
<div
className="w-full mt-1"
onClick={(e) => e.stopPropagation()}
onKeyDown={(e) => e.stopPropagation()}
role="group"
>
{renderBrowseButton({ fullWidth: true })}
</div>
</div>
</div>
)}
</div>

View file

@ -1,7 +1,7 @@
"use client";
import { X } from "lucide-react";
import { useCallback, useEffect, useMemo, useState } from "react";
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
import { toast } from "sonner";
import { Button } from "@/components/ui/button";
import {
@ -13,7 +13,7 @@ import {
} from "@/components/ui/dialog";
import { Spinner } from "@/components/ui/spinner";
import { Switch } from "@/components/ui/switch";
import { documentsApiService } from "@/lib/apis/documents-api.service";
import { type FolderSyncProgress, uploadFolderScan } from "@/lib/folder-sync-upload";
import { getSupportedExtensionsSet } from "@/lib/supported-extensions";
export interface SelectedFolder {
@ -29,7 +29,7 @@ interface FolderWatchDialogProps {
initialFolder?: SelectedFolder | null;
}
const DEFAULT_EXCLUDE_PATTERNS = [
export const DEFAULT_EXCLUDE_PATTERNS = [
".git",
"node_modules",
"__pycache__",
@ -48,6 +48,8 @@ export function FolderWatchDialog({
const [selectedFolder, setSelectedFolder] = useState<SelectedFolder | null>(null);
const [shouldSummarize, setShouldSummarize] = useState(false);
const [submitting, setSubmitting] = useState(false);
const [progress, setProgress] = useState<FolderSyncProgress | null>(null);
const abortRef = useRef<AbortController | null>(null);
useEffect(() => {
if (open && initialFolder) {
@ -64,33 +66,42 @@ export function FolderWatchDialog({
const folderPath = await api.selectFolder();
if (!folderPath) return;
const folderName = folderPath.split("/").pop() || folderPath.split("\\").pop() || folderPath;
const folderName = folderPath.split(/[/\\]/).pop() || folderPath;
setSelectedFolder({ path: folderPath, name: folderName });
}, []);
const handleCancel = useCallback(() => {
abortRef.current?.abort();
}, []);
const handleSubmit = useCallback(async () => {
if (!selectedFolder) return;
const api = window.electronAPI;
if (!api) return;
const controller = new AbortController();
abortRef.current = controller;
setSubmitting(true);
try {
const result = await documentsApiService.folderIndex(searchSpaceId, {
folder_path: selectedFolder.path,
folder_name: selectedFolder.name,
search_space_id: searchSpaceId,
enable_summary: shouldSummarize,
file_extensions: supportedExtensions,
});
setProgress(null);
const rootFolderId = (result as { root_folder_id?: number })?.root_folder_id ?? null;
try {
const rootFolderId = await uploadFolderScan({
folderPath: selectedFolder.path,
folderName: selectedFolder.name,
searchSpaceId,
excludePatterns: DEFAULT_EXCLUDE_PATTERNS,
fileExtensions: supportedExtensions,
enableSummary: shouldSummarize,
onProgress: setProgress,
signal: controller.signal,
});
await api.addWatchedFolder({
path: selectedFolder.path,
name: selectedFolder.name,
excludePatterns: DEFAULT_EXCLUDE_PATTERNS,
fileExtensions: supportedExtensions,
rootFolderId,
rootFolderId: rootFolderId ?? null,
searchSpaceId,
active: true,
});
@ -98,12 +109,19 @@ export function FolderWatchDialog({
toast.success(`Watching folder: ${selectedFolder.name}`);
setSelectedFolder(null);
setShouldSummarize(false);
setProgress(null);
onOpenChange(false);
onSuccess?.();
} catch (err) {
toast.error((err as Error)?.message || "Failed to watch folder");
if ((err as Error)?.name === "AbortError") {
toast.info("Folder sync cancelled. Partial progress was saved.");
} else {
toast.error((err as Error)?.message || "Failed to watch folder");
}
} finally {
abortRef.current = null;
setSubmitting(false);
setProgress(null);
}
}, [
selectedFolder,
@ -119,21 +137,44 @@ export function FolderWatchDialog({
if (!nextOpen && !submitting) {
setSelectedFolder(null);
setShouldSummarize(false);
setProgress(null);
}
onOpenChange(nextOpen);
},
[onOpenChange, submitting]
);
const progressLabel = useMemo(() => {
if (!progress) return null;
switch (progress.phase) {
case "listing":
return "Scanning folder...";
case "checking":
return `Checking ${progress.total} file(s)...`;
case "uploading":
return `Uploading ${progress.uploaded}/${progress.total} file(s)...`;
case "finalizing":
return "Finalizing...";
case "done":
return "Done!";
default:
return null;
}
}, [progress]);
return (
<Dialog open={open} onOpenChange={handleOpenChange}>
<DialogContent className="sm:max-w-md select-none">
<DialogHeader>
<DialogTitle>Watch Local Folder</DialogTitle>
<DialogDescription>Select a folder to sync and watch for changes.</DialogDescription>
<DialogContent className="sm:max-w-md select-none p-0 gap-0 overflow-hidden bg-muted dark:bg-muted border border-border [&>button]:opacity-80 [&>button]:hover:opacity-100 [&>button]:hover:bg-foreground/10">
<DialogHeader className="px-4 sm:px-6 pt-5 sm:pt-6 pb-3">
<DialogTitle className="text-lg sm:text-xl font-semibold tracking-tight">
Watch Local Folder
</DialogTitle>
<DialogDescription className="text-xs sm:text-sm text-muted-foreground/80">
Select a folder to sync and watch for changes
</DialogDescription>
</DialogHeader>
<div className="space-y-3 pt-2">
<div className="flex flex-col gap-3 px-4 sm:px-6 pb-4 sm:pb-6 min-h-[17rem]">
{selectedFolder ? (
<div className="flex items-center gap-2 py-1.5 pl-4 pr-2 rounded-md bg-slate-400/5 dark:bg-white/5 overflow-hidden">
<div className="min-w-0 flex-1 select-text">
@ -156,7 +197,7 @@ export function FolderWatchDialog({
<button
type="button"
onClick={handleSelectFolder}
className="flex w-full items-center justify-center gap-2 rounded-lg border-2 border-dashed border-muted-foreground/30 py-8 text-sm text-muted-foreground transition-colors hover:border-foreground/50 hover:text-foreground"
className="flex flex-1 w-full items-center justify-center gap-2 rounded-lg border-2 border-dashed border-muted-foreground/30 text-sm text-muted-foreground transition-colors hover:border-foreground/50 hover:text-foreground"
>
Browse for a folder
</button>
@ -174,14 +215,41 @@ export function FolderWatchDialog({
<Switch checked={shouldSummarize} onCheckedChange={setShouldSummarize} />
</div>
<Button className="w-full relative" onClick={handleSubmit} disabled={submitting}>
<span className={submitting ? "invisible" : ""}>Start Folder Sync</span>
{submitting && (
<span className="absolute inset-0 flex items-center justify-center">
<Spinner size="sm" />
</span>
{progressLabel && (
<div className="rounded-lg bg-slate-400/5 dark:bg-white/5 px-3 py-2">
<p className="text-xs text-muted-foreground">{progressLabel}</p>
{progress && progress.phase === "uploading" && progress.total > 0 && (
<div className="mt-1.5 h-1.5 w-full rounded-full bg-muted overflow-hidden">
<div
className="h-full bg-primary rounded-full transition-[width] duration-300"
style={{
width: `${Math.round((progress.uploaded / progress.total) * 100)}%`,
}}
/>
</div>
)}
</div>
)}
<div className="flex gap-2 mt-auto">
{submitting ? (
<>
<Button variant="secondary" className="flex-1" onClick={handleCancel}>
Cancel
</Button>
<Button className="flex-1 relative" disabled>
<span className="invisible">Syncing...</span>
<span className="absolute inset-0 flex items-center justify-center">
<Spinner size="sm" />
</span>
</Button>
</>
) : (
<Button className="w-full" onClick={handleSubmit}>
Start Folder Sync
</Button>
)}
</Button>
</div>
</>
)}
</div>

View file

@ -9,7 +9,7 @@ export const folder = z.object({
created_by_id: z.string().nullable().optional(),
created_at: z.string(),
updated_at: z.string(),
metadata: z.record(z.unknown()).nullable().optional(),
metadata: z.record(z.string(), z.any()).nullable().optional(),
});
export const folderCreateRequest = z.object({

View file

@ -20,12 +20,18 @@ const DEBOUNCE_MS = 2000;
const MAX_WAIT_MS = 10_000;
const MAX_BATCH_SIZE = 50;
interface FileEntry {
fullPath: string;
relativePath: string;
action: string;
}
interface BatchItem {
folderPath: string;
folderName: string;
searchSpaceId: number;
rootFolderId: number | null;
filePaths: string[];
files: FileEntry[];
ackIds: string[];
}
@ -44,18 +50,42 @@ export function useFolderSync() {
while (queueRef.current.length > 0) {
const batch = queueRef.current.shift()!;
try {
await documentsApiService.folderIndexFiles(batch.searchSpaceId, {
folder_path: batch.folderPath,
folder_name: batch.folderName,
search_space_id: batch.searchSpaceId,
target_file_paths: batch.filePaths,
root_folder_id: batch.rootFolderId,
});
const addChangeFiles = batch.files.filter(
(f) => f.action === "add" || f.action === "change"
);
const unlinkFiles = batch.files.filter((f) => f.action === "unlink");
if (addChangeFiles.length > 0 && electronAPI?.readLocalFiles) {
const fullPaths = addChangeFiles.map((f) => f.fullPath);
const fileDataArr = await electronAPI.readLocalFiles(fullPaths);
const files: File[] = fileDataArr.map((fd) => {
const blob = new Blob([fd.data], { type: fd.mimeType || "application/octet-stream" });
return new File([blob], fd.name, { type: blob.type });
});
await documentsApiService.folderUploadFiles(files, {
folder_name: batch.folderName,
search_space_id: batch.searchSpaceId,
relative_paths: addChangeFiles.map((f) => f.relativePath),
root_folder_id: batch.rootFolderId,
});
}
if (unlinkFiles.length > 0) {
await documentsApiService.folderNotifyUnlinked({
folder_name: batch.folderName,
search_space_id: batch.searchSpaceId,
root_folder_id: batch.rootFolderId,
relative_paths: unlinkFiles.map((f) => f.relativePath),
});
}
if (electronAPI?.acknowledgeFileEvents && batch.ackIds.length > 0) {
await electronAPI.acknowledgeFileEvents(batch.ackIds);
}
} catch (err) {
console.error("[FolderSync] Failed to trigger batch re-index:", err);
console.error("[FolderSync] Failed to process batch:", err);
}
}
processingRef.current = false;
@ -68,10 +98,10 @@ export function useFolderSync() {
if (!pending) return;
pendingByFolder.current.delete(folderKey);
for (let i = 0; i < pending.filePaths.length; i += MAX_BATCH_SIZE) {
for (let i = 0; i < pending.files.length; i += MAX_BATCH_SIZE) {
queueRef.current.push({
...pending,
filePaths: pending.filePaths.slice(i, i + MAX_BATCH_SIZE),
files: pending.files.slice(i, i + MAX_BATCH_SIZE),
ackIds: i === 0 ? pending.ackIds : [],
});
}
@ -83,9 +113,14 @@ export function useFolderSync() {
const existing = pendingByFolder.current.get(folderKey);
if (existing) {
const pathSet = new Set(existing.filePaths);
pathSet.add(event.fullPath);
existing.filePaths = Array.from(pathSet);
const pathSet = new Set(existing.files.map((f) => f.fullPath));
if (!pathSet.has(event.fullPath)) {
existing.files.push({
fullPath: event.fullPath,
relativePath: event.relativePath,
action: event.action,
});
}
if (!existing.ackIds.includes(event.id)) {
existing.ackIds.push(event.id);
}
@ -95,7 +130,13 @@ export function useFolderSync() {
folderName: event.folderName,
searchSpaceId: event.searchSpaceId,
rootFolderId: event.rootFolderId,
filePaths: [event.fullPath],
files: [
{
fullPath: event.fullPath,
relativePath: event.relativePath,
action: event.action,
},
],
ackIds: [event.id],
});
firstEventTime.current.set(folderKey, Date.now());

View file

@ -424,33 +424,79 @@ class DocumentsApiService {
return baseApiService.post(`/api/v1/documents/${documentId}/versions/${versionNumber}/restore`);
};
folderIndex = async (
searchSpaceId: number,
body: {
folder_path: string;
folder_name: string;
search_space_id: number;
exclude_patterns?: string[];
file_extensions?: string[];
root_folder_id?: number;
enable_summary?: boolean;
}
) => {
return baseApiService.post(`/api/v1/documents/folder-index`, undefined, { body });
folderMtimeCheck = async (body: {
folder_name: string;
search_space_id: number;
files: { relative_path: string; mtime: number }[];
}): Promise<{ files_to_upload: string[] }> => {
return baseApiService.post(`/api/v1/documents/folder-mtime-check`, undefined, {
body,
}) as unknown as { files_to_upload: string[] };
};
folderIndexFiles = async (
searchSpaceId: number,
body: {
folder_path: string;
folderUploadFiles = async (
files: File[],
metadata: {
folder_name: string;
search_space_id: number;
target_file_paths: string[];
relative_paths: string[];
root_folder_id?: number | null;
enable_summary?: boolean;
},
signal?: AbortSignal
): Promise<{ message: string; status: string; root_folder_id: number; file_count: number }> => {
const formData = new FormData();
for (const file of files) {
formData.append("files", file);
}
) => {
return baseApiService.post(`/api/v1/documents/folder-index-files`, undefined, { body });
formData.append("folder_name", metadata.folder_name);
formData.append("search_space_id", String(metadata.search_space_id));
formData.append("relative_paths", JSON.stringify(metadata.relative_paths));
if (metadata.root_folder_id != null) {
formData.append("root_folder_id", String(metadata.root_folder_id));
}
formData.append("enable_summary", String(metadata.enable_summary ?? false));
const totalSize = files.reduce((acc, f) => acc + f.size, 0);
const timeoutMs = Math.min(Math.max((totalSize / (1024 * 1024)) * 5000, 30_000), 600_000);
const controller = new AbortController();
const timeoutId = setTimeout(() => controller.abort(), timeoutMs);
if (signal) {
signal.addEventListener("abort", () => controller.abort(), { once: true });
}
try {
return (await baseApiService.postFormData(`/api/v1/documents/folder-upload`, undefined, {
body: formData,
signal: controller.signal,
})) as { message: string; status: string; root_folder_id: number; file_count: number };
} finally {
clearTimeout(timeoutId);
}
};
folderNotifyUnlinked = async (body: {
folder_name: string;
search_space_id: number;
root_folder_id: number | null;
relative_paths: string[];
}): Promise<{ deleted_count: number }> => {
return baseApiService.post(`/api/v1/documents/folder-unlink`, undefined, {
body,
}) as unknown as { deleted_count: number };
};
folderSyncFinalize = async (body: {
folder_name: string;
search_space_id: number;
root_folder_id: number | null;
all_relative_paths: string[];
}): Promise<{ deleted_count: number }> => {
return baseApiService.post(`/api/v1/documents/folder-sync-finalize`, undefined, {
body,
}) as unknown as { deleted_count: number };
};
getWatchedFolders = async (searchSpaceId: number) => {

View file

@ -0,0 +1,239 @@
import { documentsApiService } from "@/lib/apis/documents-api.service";
const MAX_BATCH_SIZE_BYTES = 20 * 1024 * 1024; // 20 MB
const MAX_BATCH_FILES = 10;
const UPLOAD_CONCURRENCY = 3;
export interface FolderSyncProgress {
phase: "listing" | "checking" | "uploading" | "finalizing" | "done";
uploaded: number;
total: number;
}
export interface FolderSyncParams {
folderPath: string;
folderName: string;
searchSpaceId: number;
excludePatterns: string[];
fileExtensions: string[];
enableSummary: boolean;
rootFolderId?: number | null;
onProgress?: (progress: FolderSyncProgress) => void;
signal?: AbortSignal;
}
function buildBatches(entries: FolderFileEntry[]): FolderFileEntry[][] {
const batches: FolderFileEntry[][] = [];
let currentBatch: FolderFileEntry[] = [];
let currentSize = 0;
for (const entry of entries) {
if (entry.size >= MAX_BATCH_SIZE_BYTES) {
if (currentBatch.length > 0) {
batches.push(currentBatch);
currentBatch = [];
currentSize = 0;
}
batches.push([entry]);
continue;
}
if (currentBatch.length >= MAX_BATCH_FILES || currentSize + entry.size > MAX_BATCH_SIZE_BYTES) {
batches.push(currentBatch);
currentBatch = [];
currentSize = 0;
}
currentBatch.push(entry);
currentSize += entry.size;
}
if (currentBatch.length > 0) {
batches.push(currentBatch);
}
return batches;
}
async function uploadBatchesWithConcurrency(
batches: FolderFileEntry[][],
params: {
folderName: string;
searchSpaceId: number;
rootFolderId: number | null;
enableSummary: boolean;
signal?: AbortSignal;
onBatchComplete?: (filesInBatch: number) => void;
}
): Promise<number | null> {
const api = window.electronAPI;
if (!api) throw new Error("Electron API not available");
let batchIdx = 0;
let resolvedRootFolderId = params.rootFolderId;
const errors: string[] = [];
async function processNext(): Promise<void> {
while (true) {
if (params.signal?.aborted) return;
const idx = batchIdx++;
if (idx >= batches.length) return;
const batch = batches[idx];
const fullPaths = batch.map((e) => e.fullPath);
try {
const fileDataArr = await api.readLocalFiles(fullPaths);
const files: File[] = fileDataArr.map((fd) => {
const blob = new Blob([fd.data], { type: fd.mimeType || "application/octet-stream" });
return new File([blob], fd.name, { type: blob.type });
});
const result = await documentsApiService.folderUploadFiles(
files,
{
folder_name: params.folderName,
search_space_id: params.searchSpaceId,
relative_paths: batch.map((e) => e.relativePath),
root_folder_id: resolvedRootFolderId,
enable_summary: params.enableSummary,
},
params.signal
);
if (result.root_folder_id && !resolvedRootFolderId) {
resolvedRootFolderId = result.root_folder_id;
}
params.onBatchComplete?.(batch.length);
} catch (err) {
if (params.signal?.aborted) return;
const msg = (err as Error)?.message || "Upload failed";
errors.push(`Batch ${idx}: ${msg}`);
}
}
}
const workers = Array.from({ length: Math.min(UPLOAD_CONCURRENCY, batches.length) }, () =>
processNext()
);
await Promise.all(workers);
if (errors.length > 0 && !params.signal?.aborted) {
console.error("Some batches failed:", errors);
}
return resolvedRootFolderId;
}
/**
* Run a full upload-based folder scan: list files, mtime-check, upload
* changed files in parallel batches, and finalize (delete orphans).
*
* Returns the root_folder_id to pass to addWatchedFolder.
*/
export async function uploadFolderScan(params: FolderSyncParams): Promise<number | null> {
const api = window.electronAPI;
if (!api) throw new Error("Electron API not available");
const {
folderPath,
folderName,
searchSpaceId,
excludePatterns,
fileExtensions,
enableSummary,
signal,
} = params;
let rootFolderId = params.rootFolderId ?? null;
params.onProgress?.({ phase: "listing", uploaded: 0, total: 0 });
if (signal?.aborted) throw new DOMException("Aborted", "AbortError");
const allFiles = await api.listFolderFiles({
path: folderPath,
name: folderName,
excludePatterns,
fileExtensions,
rootFolderId: rootFolderId ?? null,
searchSpaceId,
active: true,
});
if (signal?.aborted) throw new DOMException("Aborted", "AbortError");
params.onProgress?.({ phase: "checking", uploaded: 0, total: allFiles.length });
const mtimeCheckResult = await documentsApiService.folderMtimeCheck({
folder_name: folderName,
search_space_id: searchSpaceId,
files: allFiles.map((f) => ({ relative_path: f.relativePath, mtime: f.mtimeMs / 1000 })),
});
const filesToUpload = mtimeCheckResult.files_to_upload;
const uploadSet = new Set(filesToUpload);
const entriesToUpload = allFiles.filter((f) => uploadSet.has(f.relativePath));
if (signal?.aborted) throw new DOMException("Aborted", "AbortError");
if (entriesToUpload.length > 0) {
const batches = buildBatches(entriesToUpload);
let uploaded = 0;
params.onProgress?.({ phase: "uploading", uploaded: 0, total: entriesToUpload.length });
const uploadedRootId = await uploadBatchesWithConcurrency(batches, {
folderName,
searchSpaceId,
rootFolderId: rootFolderId ?? null,
enableSummary,
signal,
onBatchComplete: (count) => {
uploaded += count;
params.onProgress?.({ phase: "uploading", uploaded, total: entriesToUpload.length });
},
});
if (signal?.aborted) throw new DOMException("Aborted", "AbortError");
if (uploadedRootId) {
rootFolderId = uploadedRootId;
}
}
if (signal?.aborted) throw new DOMException("Aborted", "AbortError");
params.onProgress?.({
phase: "finalizing",
uploaded: entriesToUpload.length,
total: entriesToUpload.length,
});
await documentsApiService.folderSyncFinalize({
folder_name: folderName,
search_space_id: searchSpaceId,
root_folder_id: rootFolderId ?? null,
all_relative_paths: allFiles.map((f) => f.relativePath),
});
params.onProgress?.({
phase: "done",
uploaded: entriesToUpload.length,
total: entriesToUpload.length,
});
// Seed the Electron mtime store so the reconciliation scan in
// startWatcher won't re-emit events for files we just indexed.
if (api.seedFolderMtimes) {
const mtimes: Record<string, number> = {};
for (const f of allFiles) {
mtimes[f.relativePath] = f.mtimeMs;
}
await api.seedFolderMtimes(folderPath, mtimes);
}
return rootFolderId;
}

View file

@ -34,6 +34,13 @@ interface LocalFileData {
size: number;
}
interface FolderFileEntry {
relativePath: string;
fullPath: string;
size: number;
mtimeMs: number;
}
interface ElectronAPI {
versions: {
electron: string;
@ -82,6 +89,8 @@ interface ElectronAPI {
signalRendererReady: () => Promise<void>;
getPendingFileEvents: () => Promise<FolderSyncFileChangedEvent[]>;
acknowledgeFileEvents: (eventIds: string[]) => Promise<{ acknowledged: number }>;
listFolderFiles: (config: WatchedFolderConfig) => Promise<FolderFileEntry[]>;
seedFolderMtimes: (folderPath: string, mtimes: Record<string, number>) => Promise<void>;
// Browse files/folders via native dialogs
browseFiles: () => Promise<string[] | null>;
readLocalFiles: (paths: string[]) => Promise<LocalFileData[]>;

View file

@ -1,4 +1,4 @@
import { number, string, table } from "@rocicorp/zero";
import { json, number, string, table } from "@rocicorp/zero";
export const folderTable = table("folders")
.columns({
@ -10,5 +10,6 @@ export const folderTable = table("folders")
createdById: string().optional().from("created_by_id"),
createdAt: number().from("created_at"),
updatedAt: number().from("updated_at"),
metadata: json<Record<string, unknown>>().optional().from("metadata"),
})
.primaryKey("id");