2026-04-08 15:46:52 +05:30
|
|
|
import { documentsApiService } from "@/lib/apis/documents-api.service";
|
|
|
|
|
|
|
|
|
|
const MAX_BATCH_SIZE_BYTES = 20 * 1024 * 1024; // 20 MB
|
|
|
|
|
const MAX_BATCH_FILES = 10;
|
|
|
|
|
const UPLOAD_CONCURRENCY = 3;
|
|
|
|
|
|
|
|
|
|
export interface FolderSyncProgress {
|
|
|
|
|
phase: "listing" | "checking" | "uploading" | "finalizing" | "done";
|
|
|
|
|
uploaded: number;
|
|
|
|
|
total: number;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
export interface FolderSyncParams {
|
|
|
|
|
folderPath: string;
|
|
|
|
|
folderName: string;
|
|
|
|
|
searchSpaceId: number;
|
|
|
|
|
excludePatterns: string[];
|
|
|
|
|
fileExtensions: string[];
|
|
|
|
|
enableSummary: boolean;
|
|
|
|
|
rootFolderId?: number | null;
|
|
|
|
|
onProgress?: (progress: FolderSyncProgress) => void;
|
|
|
|
|
signal?: AbortSignal;
|
|
|
|
|
}
|
|
|
|
|
|
2026-04-08 18:23:03 +05:30
|
|
|
function buildBatches(entries: FolderFileEntry[]): FolderFileEntry[][] {
|
2026-04-08 15:46:52 +05:30
|
|
|
const batches: FolderFileEntry[][] = [];
|
|
|
|
|
let currentBatch: FolderFileEntry[] = [];
|
|
|
|
|
let currentSize = 0;
|
|
|
|
|
|
|
|
|
|
for (const entry of entries) {
|
|
|
|
|
if (entry.size >= MAX_BATCH_SIZE_BYTES) {
|
|
|
|
|
if (currentBatch.length > 0) {
|
|
|
|
|
batches.push(currentBatch);
|
|
|
|
|
currentBatch = [];
|
|
|
|
|
currentSize = 0;
|
|
|
|
|
}
|
|
|
|
|
batches.push([entry]);
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
2026-04-08 18:23:03 +05:30
|
|
|
if (currentBatch.length >= MAX_BATCH_FILES || currentSize + entry.size > MAX_BATCH_SIZE_BYTES) {
|
2026-04-08 15:46:52 +05:30
|
|
|
batches.push(currentBatch);
|
|
|
|
|
currentBatch = [];
|
|
|
|
|
currentSize = 0;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
currentBatch.push(entry);
|
|
|
|
|
currentSize += entry.size;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (currentBatch.length > 0) {
|
|
|
|
|
batches.push(currentBatch);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return batches;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async function uploadBatchesWithConcurrency(
|
|
|
|
|
batches: FolderFileEntry[][],
|
|
|
|
|
params: {
|
|
|
|
|
folderName: string;
|
|
|
|
|
searchSpaceId: number;
|
|
|
|
|
rootFolderId: number | null;
|
|
|
|
|
enableSummary: boolean;
|
|
|
|
|
signal?: AbortSignal;
|
|
|
|
|
onBatchComplete?: (filesInBatch: number) => void;
|
2026-04-08 18:23:03 +05:30
|
|
|
}
|
2026-04-08 15:49:36 +05:30
|
|
|
): Promise<number | null> {
|
2026-04-08 15:46:52 +05:30
|
|
|
const api = window.electronAPI;
|
|
|
|
|
if (!api) throw new Error("Electron API not available");
|
|
|
|
|
|
|
|
|
|
let batchIdx = 0;
|
2026-04-08 15:49:36 +05:30
|
|
|
let resolvedRootFolderId = params.rootFolderId;
|
2026-04-08 15:46:52 +05:30
|
|
|
const errors: string[] = [];
|
|
|
|
|
|
|
|
|
|
async function processNext(): Promise<void> {
|
|
|
|
|
while (true) {
|
|
|
|
|
if (params.signal?.aborted) return;
|
|
|
|
|
|
|
|
|
|
const idx = batchIdx++;
|
|
|
|
|
if (idx >= batches.length) return;
|
|
|
|
|
|
|
|
|
|
const batch = batches[idx];
|
|
|
|
|
const fullPaths = batch.map((e) => e.fullPath);
|
|
|
|
|
|
|
|
|
|
try {
|
|
|
|
|
const fileDataArr = await api.readLocalFiles(fullPaths);
|
|
|
|
|
|
|
|
|
|
const files: File[] = fileDataArr.map((fd) => {
|
|
|
|
|
const blob = new Blob([fd.data], { type: fd.mimeType || "application/octet-stream" });
|
|
|
|
|
return new File([blob], fd.name, { type: blob.type });
|
|
|
|
|
});
|
|
|
|
|
|
2026-04-08 15:49:36 +05:30
|
|
|
const result = await documentsApiService.folderUploadFiles(
|
2026-04-08 15:46:52 +05:30
|
|
|
files,
|
|
|
|
|
{
|
|
|
|
|
folder_name: params.folderName,
|
|
|
|
|
search_space_id: params.searchSpaceId,
|
|
|
|
|
relative_paths: batch.map((e) => e.relativePath),
|
2026-04-08 15:49:36 +05:30
|
|
|
root_folder_id: resolvedRootFolderId,
|
2026-04-08 15:46:52 +05:30
|
|
|
enable_summary: params.enableSummary,
|
|
|
|
|
},
|
2026-04-08 18:23:03 +05:30
|
|
|
params.signal
|
2026-04-08 15:46:52 +05:30
|
|
|
);
|
|
|
|
|
|
2026-04-08 15:49:36 +05:30
|
|
|
if (result.root_folder_id && !resolvedRootFolderId) {
|
|
|
|
|
resolvedRootFolderId = result.root_folder_id;
|
|
|
|
|
}
|
|
|
|
|
|
2026-04-08 15:46:52 +05:30
|
|
|
params.onBatchComplete?.(batch.length);
|
|
|
|
|
} catch (err) {
|
|
|
|
|
if (params.signal?.aborted) return;
|
|
|
|
|
const msg = (err as Error)?.message || "Upload failed";
|
|
|
|
|
errors.push(`Batch ${idx}: ${msg}`);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2026-04-08 18:23:03 +05:30
|
|
|
const workers = Array.from({ length: Math.min(UPLOAD_CONCURRENCY, batches.length) }, () =>
|
|
|
|
|
processNext()
|
|
|
|
|
);
|
2026-04-08 15:46:52 +05:30
|
|
|
await Promise.all(workers);
|
|
|
|
|
|
|
|
|
|
if (errors.length > 0 && !params.signal?.aborted) {
|
|
|
|
|
console.error("Some batches failed:", errors);
|
|
|
|
|
}
|
2026-04-08 15:49:36 +05:30
|
|
|
|
|
|
|
|
return resolvedRootFolderId;
|
2026-04-08 15:46:52 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Run a full upload-based folder scan: list files, mtime-check, upload
|
|
|
|
|
* changed files in parallel batches, and finalize (delete orphans).
|
|
|
|
|
*
|
|
|
|
|
* Returns the root_folder_id to pass to addWatchedFolder.
|
|
|
|
|
*/
|
|
|
|
|
export async function uploadFolderScan(params: FolderSyncParams): Promise<number | null> {
|
|
|
|
|
const api = window.electronAPI;
|
|
|
|
|
if (!api) throw new Error("Electron API not available");
|
|
|
|
|
|
2026-04-08 18:23:03 +05:30
|
|
|
const {
|
|
|
|
|
folderPath,
|
|
|
|
|
folderName,
|
|
|
|
|
searchSpaceId,
|
|
|
|
|
excludePatterns,
|
|
|
|
|
fileExtensions,
|
|
|
|
|
enableSummary,
|
|
|
|
|
signal,
|
|
|
|
|
} = params;
|
2026-04-08 15:46:52 +05:30
|
|
|
let rootFolderId = params.rootFolderId ?? null;
|
|
|
|
|
|
|
|
|
|
params.onProgress?.({ phase: "listing", uploaded: 0, total: 0 });
|
|
|
|
|
|
|
|
|
|
if (signal?.aborted) throw new DOMException("Aborted", "AbortError");
|
|
|
|
|
|
|
|
|
|
const allFiles = await api.listFolderFiles({
|
|
|
|
|
path: folderPath,
|
|
|
|
|
name: folderName,
|
|
|
|
|
excludePatterns,
|
|
|
|
|
fileExtensions,
|
|
|
|
|
rootFolderId: rootFolderId ?? null,
|
|
|
|
|
searchSpaceId,
|
|
|
|
|
active: true,
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
if (signal?.aborted) throw new DOMException("Aborted", "AbortError");
|
|
|
|
|
|
|
|
|
|
params.onProgress?.({ phase: "checking", uploaded: 0, total: allFiles.length });
|
|
|
|
|
|
|
|
|
|
const mtimeCheckResult = await documentsApiService.folderMtimeCheck({
|
|
|
|
|
folder_name: folderName,
|
|
|
|
|
search_space_id: searchSpaceId,
|
|
|
|
|
files: allFiles.map((f) => ({ relative_path: f.relativePath, mtime: f.mtimeMs / 1000 })),
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const filesToUpload = mtimeCheckResult.files_to_upload;
|
|
|
|
|
const uploadSet = new Set(filesToUpload);
|
|
|
|
|
const entriesToUpload = allFiles.filter((f) => uploadSet.has(f.relativePath));
|
|
|
|
|
|
|
|
|
|
if (signal?.aborted) throw new DOMException("Aborted", "AbortError");
|
|
|
|
|
|
|
|
|
|
if (entriesToUpload.length > 0) {
|
|
|
|
|
const batches = buildBatches(entriesToUpload);
|
|
|
|
|
|
|
|
|
|
let uploaded = 0;
|
|
|
|
|
params.onProgress?.({ phase: "uploading", uploaded: 0, total: entriesToUpload.length });
|
|
|
|
|
|
2026-04-08 15:49:36 +05:30
|
|
|
const uploadedRootId = await uploadBatchesWithConcurrency(batches, {
|
2026-04-08 15:46:52 +05:30
|
|
|
folderName,
|
|
|
|
|
searchSpaceId,
|
|
|
|
|
rootFolderId: rootFolderId ?? null,
|
|
|
|
|
enableSummary,
|
|
|
|
|
signal,
|
|
|
|
|
onBatchComplete: (count) => {
|
|
|
|
|
uploaded += count;
|
|
|
|
|
params.onProgress?.({ phase: "uploading", uploaded, total: entriesToUpload.length });
|
|
|
|
|
},
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
if (signal?.aborted) throw new DOMException("Aborted", "AbortError");
|
|
|
|
|
|
2026-04-08 15:49:36 +05:30
|
|
|
if (uploadedRootId) {
|
|
|
|
|
rootFolderId = uploadedRootId;
|
2026-04-08 15:46:52 +05:30
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (signal?.aborted) throw new DOMException("Aborted", "AbortError");
|
|
|
|
|
|
2026-04-08 18:23:03 +05:30
|
|
|
params.onProgress?.({
|
|
|
|
|
phase: "finalizing",
|
|
|
|
|
uploaded: entriesToUpload.length,
|
|
|
|
|
total: entriesToUpload.length,
|
|
|
|
|
});
|
2026-04-08 15:46:52 +05:30
|
|
|
|
|
|
|
|
await documentsApiService.folderSyncFinalize({
|
|
|
|
|
folder_name: folderName,
|
|
|
|
|
search_space_id: searchSpaceId,
|
|
|
|
|
root_folder_id: rootFolderId ?? null,
|
|
|
|
|
all_relative_paths: allFiles.map((f) => f.relativePath),
|
|
|
|
|
});
|
|
|
|
|
|
2026-04-08 18:23:03 +05:30
|
|
|
params.onProgress?.({
|
|
|
|
|
phase: "done",
|
|
|
|
|
uploaded: entriesToUpload.length,
|
|
|
|
|
total: entriesToUpload.length,
|
|
|
|
|
});
|
2026-04-08 15:46:52 +05:30
|
|
|
|
2026-04-08 16:07:25 +05:30
|
|
|
// Seed the Electron mtime store so the reconciliation scan in
|
|
|
|
|
// startWatcher won't re-emit events for files we just indexed.
|
|
|
|
|
if (api.seedFolderMtimes) {
|
|
|
|
|
const mtimes: Record<string, number> = {};
|
|
|
|
|
for (const f of allFiles) {
|
|
|
|
|
mtimes[f.relativePath] = f.mtimeMs;
|
|
|
|
|
}
|
|
|
|
|
await api.seedFolderMtimes(folderPath, mtimes);
|
|
|
|
|
}
|
|
|
|
|
|
2026-04-08 15:46:52 +05:30
|
|
|
return rootFolderId;
|
|
|
|
|
}
|