Merge remote-tracking branch 'upstream/dev' into fix/github-and-ui-fixes

This commit is contained in:
Anish Sarkar 2026-03-08 16:57:10 +05:30
commit 701bb7063f
89 changed files with 3627 additions and 3951 deletions

View file

@ -47,7 +47,9 @@ export function useComments({ messageId, enabled = true }: UseCommentsOptions) {
if (_batchInflight && _batchTargetIds.has(messageId)) {
await _batchInflight;
const cached = queryClient.getQueryData<GetCommentsResponse>(cacheKeys.comments.byMessage(messageId));
const cached = queryClient.getQueryData<GetCommentsResponse>(
cacheKeys.comments.byMessage(messageId)
);
if (cached) return cached;
}

View file

@ -0,0 +1,127 @@
"use client";
import { useCallback, useEffect, useRef, useState } from "react";
import type { DocumentTypeEnum } from "@/contracts/types/document.types";
import { documentsApiService } from "@/lib/apis/documents-api.service";
import { type DocumentDisplay, toDisplayDoc } from "./use-documents";
const SEARCH_INITIAL_SIZE = 20;
const SEARCH_SCROLL_SIZE = 5;
/**
* Paginated document search hook.
*
* Handles title-based search with server-side filtering,
* pagination via skip/page_size, and staleness detection
* so fast typing never renders stale results.
*
* @param searchSpaceId - The search space to search within
* @param query - The debounced search query
* @param activeTypes - Document types to filter by
* @param enabled - When false the hook resets and stops fetching
*/
export function useDocumentSearch(
searchSpaceId: number,
query: string,
activeTypes: DocumentTypeEnum[],
enabled: boolean
) {
const [documents, setDocuments] = useState<DocumentDisplay[]>([]);
const [loading, setLoading] = useState(false);
const [loadingMore, setLoadingMore] = useState(false);
const [hasMore, setHasMore] = useState(false);
const [error, setError] = useState(false);
const apiLoadedRef = useRef(0);
const queryRef = useRef(query);
const isActive = enabled && !!query.trim();
const activeTypesKey = activeTypes.join(",");
// biome-ignore lint/correctness/useExhaustiveDependencies: activeTypesKey serializes activeTypes
useEffect(() => {
if (!isActive || !searchSpaceId) {
setDocuments([]);
setHasMore(false);
setError(false);
apiLoadedRef.current = 0;
return;
}
let cancelled = false;
queryRef.current = query;
setLoading(true);
setError(false);
documentsApiService
.searchDocuments({
queryParams: {
search_space_id: searchSpaceId,
page: 0,
page_size: SEARCH_INITIAL_SIZE,
title: query.trim(),
...(activeTypes.length > 0 && { document_types: activeTypes }),
},
})
.then((response) => {
if (cancelled || queryRef.current !== query) return;
setDocuments(response.items.map(toDisplayDoc));
setHasMore(response.has_more);
apiLoadedRef.current = response.items.length;
})
.catch((err) => {
if (cancelled) return;
console.error("[useDocumentSearch] Search failed:", err);
setError(true);
})
.finally(() => {
if (!cancelled) setLoading(false);
});
return () => {
cancelled = true;
};
}, [query, searchSpaceId, isActive, activeTypesKey]);
// biome-ignore lint/correctness/useExhaustiveDependencies: activeTypesKey serializes activeTypes
const loadMore = useCallback(async () => {
if (loadingMore || !isActive || !hasMore) return;
setLoadingMore(true);
try {
const response = await documentsApiService.searchDocuments({
queryParams: {
search_space_id: searchSpaceId,
skip: apiLoadedRef.current,
page_size: SEARCH_SCROLL_SIZE,
title: query.trim(),
...(activeTypes.length > 0 && { document_types: activeTypes }),
},
});
if (queryRef.current !== query) return;
setDocuments((prev) => [...prev, ...response.items.map(toDisplayDoc)]);
setHasMore(response.has_more);
apiLoadedRef.current += response.items.length;
} catch (err) {
console.error("[useDocumentSearch] Load more failed:", err);
} finally {
setLoadingMore(false);
}
}, [loadingMore, isActive, hasMore, searchSpaceId, query, activeTypesKey]);
const removeItems = useCallback((ids: number[]) => {
const idSet = new Set(ids);
setDocuments((prev) => prev.filter((item) => !idSet.has(item.id)));
}, []);
return {
documents,
loading,
loadingMore,
hasMore,
loadMore,
error,
removeItems,
};
}

View file

@ -0,0 +1,118 @@
"use client";
import { useEffect, useRef, useState } from "react";
import { useElectricClient } from "@/lib/electric/context";
/**
* Returns whether any documents in the search space are currently being
* uploaded or indexed (status = "pending" | "processing").
*
* Covers both manual file uploads (2-phase pattern) and all connector indexers,
* since both create documents with status = pending before processing.
*
* The sync shape uses the same columns as useDocuments so Electric can share
* the subscription when both hooks are active simultaneously.
*/
export function useDocumentsProcessing(searchSpaceId: number | null): boolean {
const electricClient = useElectricClient();
const [isProcessing, setIsProcessing] = useState(false);
const liveQueryRef = useRef<{ unsubscribe?: () => void } | null>(null);
useEffect(() => {
if (!searchSpaceId || !electricClient) return;
const spaceId = searchSpaceId;
const client = electricClient;
let mounted = true;
async function setup() {
if (liveQueryRef.current) {
try {
liveQueryRef.current.unsubscribe?.();
} catch {
/* PGlite may be closed */
}
liveQueryRef.current = null;
}
try {
const handle = await client.syncShape({
table: "documents",
where: `search_space_id = ${spaceId}`,
columns: [
"id",
"document_type",
"search_space_id",
"title",
"created_by_id",
"created_at",
"status",
],
primaryKey: ["id"],
});
if (!mounted) return;
if (!handle.isUpToDate && handle.initialSyncPromise) {
await Promise.race([
handle.initialSyncPromise,
new Promise((resolve) => setTimeout(resolve, 5000)),
]);
}
if (!mounted) return;
const db = client.db as {
live?: {
query: <T>(
sql: string,
params?: (number | string)[]
) => Promise<{
subscribe: (cb: (result: { rows: T[] }) => void) => void;
unsubscribe?: () => void;
}>;
};
};
if (!db.live?.query) return;
const liveQuery = await db.live.query<{ count: number | string }>(
`SELECT COUNT(*) as count FROM documents
WHERE search_space_id = $1
AND (status->>'state' = 'pending' OR status->>'state' = 'processing')`,
[spaceId]
);
if (!mounted) {
liveQuery.unsubscribe?.();
return;
}
liveQuery.subscribe((result: { rows: Array<{ count: number | string }> }) => {
if (!mounted || !result.rows?.[0]) return;
setIsProcessing((Number(result.rows[0].count) || 0) > 0);
});
liveQueryRef.current = liveQuery;
} catch (err) {
console.error("[useDocumentsProcessing] Electric setup failed:", err);
}
}
setup();
return () => {
mounted = false;
if (liveQueryRef.current) {
try {
liveQueryRef.current.unsubscribe?.();
} catch {
/* PGlite may be closed */
}
liveQueryRef.current = null;
}
};
}, [searchSpaceId, electricClient]);
return isProcessing;
}

View file

@ -1,22 +1,17 @@
"use client";
import { useQuery } from "@tanstack/react-query";
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
import type { DocumentTypeEnum } from "@/contracts/types/document.types";
import { useCallback, useEffect, useRef, useState } from "react";
import type { DocumentSortBy, DocumentTypeEnum, SortOrder } from "@/contracts/types/document.types";
import { documentsApiService } from "@/lib/apis/documents-api.service";
import { filterNewElectricItems, getNewestTimestamp } from "@/lib/electric/baseline";
import type { SyncHandle } from "@/lib/electric/client";
import { useElectricClient } from "@/lib/electric/context";
// Stable empty array to prevent infinite re-renders when no typeFilter is provided
const EMPTY_TYPE_FILTER: DocumentTypeEnum[] = [];
// Document status type (matches backend DocumentStatus JSONB)
export interface DocumentStatusType {
state: "ready" | "pending" | "processing" | "failed";
reason?: string;
}
// Document from Electric sync (lightweight table columns - NO content/metadata)
interface DocumentElectric {
id: number;
search_space_id: number;
@ -27,7 +22,6 @@ interface DocumentElectric {
status: DocumentStatusType | null;
}
// Document for display (with resolved user name and email)
export interface DocumentDisplay {
id: number;
search_space_id: number;
@ -40,87 +34,86 @@ export interface DocumentDisplay {
status: DocumentStatusType;
}
/**
* Deduplicate by ID and sort by created_at descending (newest first)
*/
function deduplicateAndSort<T extends { id: number; created_at: string }>(items: T[]): T[] {
const seen = new Map<number, T>();
for (const item of items) {
// Keep the most recent version if duplicate
const existing = seen.get(item.id);
if (!existing || new Date(item.created_at) > new Date(existing.created_at)) {
seen.set(item.id, item);
}
}
return Array.from(seen.values()).sort(
(a, b) => new Date(b.created_at).getTime() - new Date(a.created_at).getTime()
);
export interface ApiDocumentInput {
id: number;
search_space_id: number;
document_type: string;
title: string;
created_by_id?: string | null;
created_by_name?: string | null;
created_by_email?: string | null;
created_at: string;
status?: DocumentStatusType | null;
}
/**
* Check if a document has valid/complete data
*/
export function toDisplayDoc(item: ApiDocumentInput): DocumentDisplay {
return {
id: item.id,
search_space_id: item.search_space_id,
document_type: item.document_type,
title: item.title,
created_by_id: item.created_by_id ?? null,
created_by_name: item.created_by_name ?? null,
created_by_email: item.created_by_email ?? null,
created_at: item.created_at,
status: item.status ?? { state: "ready" },
};
}
const EMPTY_TYPE_FILTER: DocumentTypeEnum[] = [];
const INITIAL_PAGE_SIZE = 20;
const SCROLL_PAGE_SIZE = 5;
function isValidDocument(doc: DocumentElectric): boolean {
return doc.id != null && doc.title != null && doc.title !== "";
}
/**
* Real-time documents hook with Electric SQL
* Paginated documents hook with Electric SQL real-time updates.
*
* Architecture (100% Reliable):
* 1. API is the PRIMARY source of truth - always loads first
* 2. Electric provides REAL-TIME updates for additions and deletions
* 3. Use syncHandle.isUpToDate to determine if deletions can be trusted
* 4. Handles bulk deletions correctly by checking sync state
* Architecture:
* 1. API is the PRIMARY data source fetches pages on demand
* 2. Type counts come from a dedicated lightweight API endpoint
* 3. Electric provides REAL-TIME updates (new docs, deletions, status changes)
* 4. Server-side sorting via sort_by + sort_order params
*
* Filtering strategy:
* - Internal state always stores ALL documents (unfiltered)
* - typeFilter is applied client-side when returning documents
* - typeCounts always reflect the full dataset so the filter sidebar stays complete
* - Changing filters is instant (no API re-fetch or Electric re-sync)
*
* @param searchSpaceId - The search space ID to filter documents
* @param typeFilter - Optional document types to filter by (applied client-side)
* @param searchSpaceId - The search space to load documents for
* @param typeFilter - Document types to filter by (server-side)
* @param sortBy - Column to sort by (server-side)
* @param sortOrder - Sort direction (server-side)
*/
export function useDocuments(
searchSpaceId: number | null,
typeFilter: DocumentTypeEnum[] = EMPTY_TYPE_FILTER
typeFilter: DocumentTypeEnum[] = EMPTY_TYPE_FILTER,
sortBy: DocumentSortBy = "created_at",
sortOrder: SortOrder = "desc"
) {
const electricClient = useElectricClient();
// Internal state: ALL documents (unfiltered)
const [allDocuments, setAllDocuments] = useState<DocumentDisplay[]>([]);
const [documents, setDocuments] = useState<DocumentDisplay[]>([]);
const [typeCounts, setTypeCounts] = useState<Record<string, number>>({});
const [total, setTotal] = useState(0);
const [loading, setLoading] = useState(true);
const [loadingMore, setLoadingMore] = useState(false);
const [hasMore, setHasMore] = useState(false);
const [error, setError] = useState<Error | null>(null);
// Track if initial API load is complete (source of truth)
const apiLoadedRef = useRef(false);
// User cache: userId → displayName / email
const apiLoadedCountRef = useRef(0);
const initialLoadDoneRef = useRef(false);
const prevParamsRef = useRef<{ sortBy: string; sortOrder: string; typeFilterKey: string } | null>(
null
);
// Snapshot of all doc IDs from Electric's first callback after initial load.
// Anything appearing in subsequent callbacks NOT in this set is genuinely new.
const electricBaselineIdsRef = useRef<Set<number> | null>(null);
const newestApiTimestampRef = useRef<string | null>(null);
const userCacheRef = useRef<Map<string, string>>(new Map());
const emailCacheRef = useRef<Map<string, string>>(new Map());
// Electric sync refs
const syncHandleRef = useRef<SyncHandle | null>(null);
const liveQueryRef = useRef<{ unsubscribe?: () => void } | null>(null);
// Type counts from ALL documents (unfiltered) — keeps filter sidebar complete
const typeCounts = useMemo(() => {
const counts: Record<string, number> = {};
for (const doc of allDocuments) {
counts[doc.document_type] = (counts[doc.document_type] || 0) + 1;
}
return counts;
}, [allDocuments]);
const typeFilterKey = typeFilter.join(",");
// Client-side filtered documents for display
const documents = useMemo(() => {
if (typeFilter.length === 0) return allDocuments;
const filterSet = new Set<string>(typeFilter);
return allDocuments.filter((doc) => filterSet.has(doc.document_type));
}, [allDocuments, typeFilter]);
// Populate user cache from API response
const populateUserCache = useCallback(
(
items: Array<{
@ -143,33 +136,11 @@ export function useDocuments(
[]
);
// Convert API item to display doc
const apiToDisplayDoc = useCallback(
(item: {
id: number;
search_space_id: number;
document_type: string;
title: string;
created_by_id?: string | null;
created_by_name?: string | null;
created_by_email?: string | null;
created_at: string;
status?: DocumentStatusType | null;
}): DocumentDisplay => ({
id: item.id,
search_space_id: item.search_space_id,
document_type: item.document_type,
title: item.title,
created_by_id: item.created_by_id ?? null,
created_by_name: item.created_by_name ?? null,
created_by_email: item.created_by_email ?? null,
created_at: item.created_at,
status: item.status ?? { state: "ready" },
}),
(item: ApiDocumentInput): DocumentDisplay => toDisplayDoc(item),
[]
);
// Convert Electric doc to display doc
const electricToDisplayDoc = useCallback(
(doc: DocumentElectric): DocumentDisplay => ({
...doc,
@ -184,66 +155,91 @@ export function useDocuments(
[]
);
// STEP 1: Load ALL documents from API (PRIMARY source of truth).
// Uses React Query for automatic deduplication, caching, and staleTime so
// multiple components mounting useDocuments(sameId) share a single request.
const {
data: apiResponse,
isLoading: apiLoading,
error: apiError,
} = useQuery({
queryKey: ["documents", "all", searchSpaceId],
queryFn: () =>
documentsApiService.getDocuments({
queryParams: {
search_space_id: searchSpaceId!,
page: 0,
page_size: -1,
},
}),
enabled: !!searchSpaceId,
staleTime: 30_000,
});
// Seed local state from API response (runs once per fresh fetch)
// EFFECT 1: Fetch first page + type counts when params change
// biome-ignore lint/correctness/useExhaustiveDependencies: typeFilterKey serializes typeFilter
useEffect(() => {
if (!apiResponse) return;
populateUserCache(apiResponse.items);
const docs = apiResponse.items.map(apiToDisplayDoc);
setAllDocuments(docs);
apiLoadedRef.current = true;
setError(null);
}, [apiResponse, populateUserCache, apiToDisplayDoc]);
if (!searchSpaceId) return;
// Propagate loading / error from React Query
useEffect(() => {
setLoading(apiLoading);
}, [apiLoading]);
let cancelled = false;
useEffect(() => {
if (apiError) {
setError(apiError instanceof Error ? apiError : new Error("Failed to load documents"));
const prev = prevParamsRef.current;
const isSortOnlyChange =
initialLoadDoneRef.current &&
prev !== null &&
prev.typeFilterKey === typeFilterKey &&
(prev.sortBy !== sortBy || prev.sortOrder !== sortOrder);
prevParamsRef.current = { sortBy, sortOrder, typeFilterKey };
if (!isSortOnlyChange) {
setLoading(true);
setDocuments([]);
setTotal(0);
setHasMore(false);
}
}, [apiError]);
apiLoadedCountRef.current = 0;
initialLoadDoneRef.current = false;
electricBaselineIdsRef.current = null;
newestApiTimestampRef.current = null;
// EFFECT 2: Start Electric sync + live query for real-time updates
// No type filter — syncs and queries ALL documents; filtering is client-side
const fetchInitialData = async () => {
try {
const [docsResponse, countsResponse] = await Promise.all([
documentsApiService.getDocuments({
queryParams: {
search_space_id: searchSpaceId,
page: 0,
page_size: INITIAL_PAGE_SIZE,
...(typeFilter.length > 0 && { document_types: typeFilter }),
sort_by: sortBy,
sort_order: sortOrder,
},
}),
documentsApiService.getDocumentTypeCounts({
queryParams: { search_space_id: searchSpaceId },
}),
]);
if (cancelled) return;
populateUserCache(docsResponse.items);
const docs = docsResponse.items.map(apiToDisplayDoc);
setDocuments(docs);
setTotal(docsResponse.total);
setHasMore(docsResponse.has_more);
setTypeCounts(countsResponse);
setError(null);
apiLoadedCountRef.current = docsResponse.items.length;
newestApiTimestampRef.current = getNewestTimestamp(docsResponse.items);
initialLoadDoneRef.current = true;
} catch (err) {
if (cancelled) return;
console.error("[useDocuments] Initial load failed:", err);
setError(err instanceof Error ? err : new Error("Failed to load documents"));
} finally {
if (!cancelled) setLoading(false);
}
};
fetchInitialData();
return () => {
cancelled = true;
};
}, [searchSpaceId, typeFilterKey, sortBy, sortOrder, populateUserCache, apiToDisplayDoc]);
// EFFECT 2: Electric sync + live query for real-time updates
useEffect(() => {
if (!searchSpaceId || !electricClient) return;
// Capture validated values for async closure
const spaceId = searchSpaceId;
const client = electricClient;
let mounted = true;
async function setupElectricRealtime() {
// Cleanup previous subscriptions
if (syncHandleRef.current) {
try {
syncHandleRef.current.unsubscribe();
} catch {
// PGlite may already be closed during cleanup
/* PGlite may already be closed */
}
syncHandleRef.current = null;
}
@ -251,15 +247,12 @@ export function useDocuments(
try {
liveQueryRef.current.unsubscribe?.();
} catch {
// PGlite may already be closed during cleanup
/* PGlite may already be closed */
}
liveQueryRef.current = null;
}
try {
console.log("[useDocuments] Starting Electric sync for real-time updates");
// Start Electric sync (all documents for this search space)
const handle = await client.syncShape({
table: "documents",
where: `search_space_id = ${spaceId}`,
@ -281,20 +274,16 @@ export function useDocuments(
}
syncHandleRef.current = handle;
console.log("[useDocuments] Sync started, isUpToDate:", handle.isUpToDate);
// Wait for initial sync (with timeout)
if (!handle.isUpToDate && handle.initialSyncPromise) {
await Promise.race([
handle.initialSyncPromise,
new Promise((resolve) => setTimeout(resolve, 5000)),
]);
console.log("[useDocuments] Initial sync complete, isUpToDate:", handle.isUpToDate);
}
if (!mounted) return;
// Set up live query (unfiltered — type filtering is done client-side)
const db = client.db as {
live?: {
query: <T>(
@ -307,13 +296,10 @@ export function useDocuments(
};
};
if (!db.live?.query) {
console.warn("[useDocuments] Live queries not available");
return;
}
if (!db.live?.query) return;
const query = `SELECT id, document_type, search_space_id, title, created_by_id, created_at, status
FROM documents
FROM documents
WHERE search_space_id = $1
ORDER BY created_at DESC`;
@ -324,22 +310,12 @@ export function useDocuments(
return;
}
console.log("[useDocuments] Live query subscribed");
liveQuery.subscribe((result: { rows: DocumentElectric[] }) => {
if (!mounted || !result.rows) return;
// DEBUG: Log first few raw documents to see what's coming from Electric
console.log("[useDocuments] Raw data sample:", result.rows.slice(0, 3));
if (!mounted || !result.rows || !initialLoadDoneRef.current) return;
const validItems = result.rows.filter(isValidDocument);
const isFullySynced = syncHandleRef.current?.isUpToDate ?? false;
console.log(
`[useDocuments] Live update: ${result.rows.length} raw, ${validItems.length} valid, synced: ${isFullySynced}`
);
// Fetch user names for new users (non-blocking)
const unknownUserIds = validItems
.filter(
(doc): doc is DocumentElectric & { created_by_id: string } =>
@ -350,12 +326,16 @@ export function useDocuments(
if (unknownUserIds.length > 0) {
documentsApiService
.getDocuments({
queryParams: { search_space_id: spaceId, page: 0, page_size: 20 },
queryParams: {
search_space_id: spaceId,
page: 0,
page_size: 20,
},
})
.then((response) => {
populateUserCache(response.items);
if (mounted) {
setAllDocuments((prev) =>
setDocuments((prev) =>
prev.map((doc) => ({
...doc,
created_by_name: doc.created_by_id
@ -371,46 +351,20 @@ export function useDocuments(
.catch(() => {});
}
// Smart update logic based on sync state
setAllDocuments((prev) => {
// Don't process if API hasn't loaded yet
if (!apiLoadedRef.current) {
console.log("[useDocuments] Waiting for API load, skipping live update");
return prev;
}
// Case 1: Live query is empty
if (validItems.length === 0) {
if (isFullySynced && prev.length > 0) {
// Electric is fully synced and says 0 items - trust it (all deleted)
console.log("[useDocuments] All documents deleted (Electric synced)");
return [];
}
// Partial sync or error - keep existing
console.log("[useDocuments] Empty live result, keeping existing");
return prev;
}
// Case 2: Electric is fully synced - TRUST IT COMPLETELY (handles bulk deletes)
if (isFullySynced) {
const liveDocs = deduplicateAndSort(validItems.map(electricToDisplayDoc));
console.log(
`[useDocuments] Synced update: ${liveDocs.length} docs (was ${prev.length})`
);
return liveDocs;
}
// Case 3: Partial sync - only ADD new items, don't remove any
const existingIds = new Set(prev.map((d) => d.id));
setDocuments((prev) => {
const liveIds = new Set(validItems.map((d) => d.id));
const prevIds = new Set(prev.map((d) => d.id));
// Find new items (in live but not in prev)
const newItems = validItems
.filter((item) => !existingIds.has(item.id))
.map(electricToDisplayDoc);
const newItems = filterNewElectricItems(
validItems,
liveIds,
prevIds,
electricBaselineIdsRef,
newestApiTimestampRef.current
).map(electricToDisplayDoc);
// Find updated items (in both, update with latest data)
const updatedPrev = prev.map((doc) => {
// Update existing docs (status changes, title edits)
let updated = prev.map((doc) => {
if (liveIds.has(doc.id)) {
const liveItem = validItems.find((v) => v.id === doc.id);
if (liveItem) {
@ -420,19 +374,32 @@ export function useDocuments(
return doc;
});
if (newItems.length > 0) {
console.log(`[useDocuments] Adding ${newItems.length} new items (partial sync)`);
return deduplicateAndSort([...newItems, ...updatedPrev]);
// Remove deleted docs (only when fully synced)
if (isFullySynced) {
updated = updated.filter((doc) => liveIds.has(doc.id));
}
return updatedPrev;
if (newItems.length > 0) {
return [...newItems, ...updated];
}
return updated;
});
// Update type counts when Electric detects changes
if (isFullySynced && validItems.length > 0) {
const counts: Record<string, number> = {};
for (const item of validItems) {
counts[item.document_type] = (counts[item.document_type] || 0) + 1;
}
setTypeCounts(counts);
setTotal(validItems.length);
}
});
liveQueryRef.current = liveQuery;
} catch (err) {
console.error("[useDocuments] Electric setup failed:", err);
// Don't set error - API data is already loaded
}
}
@ -444,7 +411,7 @@ export function useDocuments(
try {
syncHandleRef.current.unsubscribe();
} catch {
// PGlite may already be closed during cleanup
/* PGlite may already be closed */
}
syncHandleRef.current = null;
}
@ -452,32 +419,85 @@ export function useDocuments(
try {
liveQueryRef.current.unsubscribe?.();
} catch {
// PGlite may already be closed during cleanup
/* PGlite may already be closed */
}
liveQueryRef.current = null;
}
};
}, [searchSpaceId, electricClient, electricToDisplayDoc, populateUserCache]);
// Track previous searchSpaceId to detect actual changes
// Reset on search space change
const prevSearchSpaceIdRef = useRef<number | null>(null);
// Reset on search space change (not on initial mount)
useEffect(() => {
if (prevSearchSpaceIdRef.current !== null && prevSearchSpaceIdRef.current !== searchSpaceId) {
setAllDocuments([]);
apiLoadedRef.current = false;
setDocuments([]);
setTypeCounts({});
setTotal(0);
setHasMore(false);
apiLoadedCountRef.current = 0;
initialLoadDoneRef.current = false;
electricBaselineIdsRef.current = null;
newestApiTimestampRef.current = null;
userCacheRef.current.clear();
emailCacheRef.current.clear();
}
prevSearchSpaceIdRef.current = searchSpaceId;
}, [searchSpaceId]);
// Load more pages via API
// biome-ignore lint/correctness/useExhaustiveDependencies: typeFilterKey serializes typeFilter
const loadMore = useCallback(async () => {
if (loadingMore || !hasMore || !searchSpaceId) return;
setLoadingMore(true);
try {
const response = await documentsApiService.getDocuments({
queryParams: {
search_space_id: searchSpaceId,
skip: apiLoadedCountRef.current,
page_size: SCROLL_PAGE_SIZE,
...(typeFilter.length > 0 && { document_types: typeFilter }),
sort_by: sortBy,
sort_order: sortOrder,
},
});
populateUserCache(response.items);
const newDocs = response.items.map(apiToDisplayDoc);
setDocuments((prev) => {
const existingIds = new Set(prev.map((d) => d.id));
const deduped = newDocs.filter((d) => !existingIds.has(d.id));
return [...prev, ...deduped];
});
setTotal(response.total);
setHasMore(response.has_more);
apiLoadedCountRef.current += response.items.length;
} catch (err) {
console.error("[useDocuments] Load more failed:", err);
} finally {
setLoadingMore(false);
}
}, [
loadingMore,
hasMore,
searchSpaceId,
typeFilterKey,
sortBy,
sortOrder,
populateUserCache,
apiToDisplayDoc,
]);
return {
documents,
typeCounts,
total: documents.length,
total,
loading,
loadingMore,
hasMore,
loadMore,
error,
};
}

View file

@ -1,497 +1,402 @@
"use client";
import { useCallback, useEffect, useRef, useState } from "react";
import type { InboxItem, InboxItemTypeEnum } from "@/contracts/types/inbox.types";
import type { InboxItem, NotificationCategory } from "@/contracts/types/inbox.types";
import { notificationsApiService } from "@/lib/apis/notifications-api.service";
import type { SyncHandle } from "@/lib/electric/client";
import { filterNewElectricItems, getNewestTimestamp } from "@/lib/electric/baseline";
import { useElectricClient } from "@/lib/electric/context";
export type { InboxItem, InboxItemTypeEnum } from "@/contracts/types/inbox.types";
export type {
InboxItem,
InboxItemTypeEnum,
NotificationCategory,
} from "@/contracts/types/inbox.types";
const PAGE_SIZE = 50;
const SYNC_WINDOW_DAYS = 14;
const INITIAL_PAGE_SIZE = 50;
const SCROLL_PAGE_SIZE = 30;
const SYNC_WINDOW_DAYS = 4;
const CATEGORY_TYPE_SQL: Record<NotificationCategory, string> = {
comments: "AND type IN ('new_mention', 'comment_reply')",
status:
"AND type IN ('connector_indexing', 'connector_deletion', 'document_processing', 'page_limit_exceeded')",
};
/**
* Check if an item is older than the sync window
*/
function isOlderThanSyncWindow(createdAt: string): boolean {
const cutoffDate = new Date();
cutoffDate.setDate(cutoffDate.getDate() - SYNC_WINDOW_DAYS);
return new Date(createdAt) < cutoffDate;
}
/**
* Deduplicate by ID and sort by created_at descending.
* This is the SINGLE source of truth for deduplication - prevents race conditions.
*/
function deduplicateAndSort(items: InboxItem[]): InboxItem[] {
const seen = new Map<number, InboxItem>();
for (const item of items) {
if (!seen.has(item.id)) {
seen.set(item.id, item);
}
}
return Array.from(seen.values()).sort(
(a, b) => new Date(b.created_at).getTime() - new Date(a.created_at).getTime()
);
}
/**
* Calculate the cutoff date for sync window
* IMPORTANT: Rounds to the start of the day (midnight UTC) to ensure stable values
* across re-renders. Without this, millisecond differences cause multiple syncs!
* Calculate the cutoff date for sync window.
* Rounds to the start of the day (midnight UTC) to ensure stable values
* across re-renders.
*/
function getSyncCutoffDate(): string {
const cutoff = new Date();
cutoff.setDate(cutoff.getDate() - SYNC_WINDOW_DAYS);
// Round to start of day to prevent millisecond differences causing duplicate syncs
cutoff.setUTCHours(0, 0, 0, 0);
return cutoff.toISOString();
}
/**
* Convert a date value to ISO string format
*/
function toISOString(date: string | Date | null | undefined): string | null {
if (!date) return null;
if (date instanceof Date) return date.toISOString();
if (typeof date === "string") {
if (date.includes("T")) return date;
try {
return new Date(date).toISOString();
} catch {
return date;
}
}
return null;
}
/**
* Hook for managing inbox items with Electric SQL real-time sync + API fallback
* Hook for managing inbox items with API-first architecture + Electric real-time deltas.
*
* Architecture (Simplified & Race-Condition Free):
* - Electric SQL: Syncs recent items (within SYNC_WINDOW_DAYS) for real-time updates
* - Live Query: Provides reactive first page from PGLite
* - API: Handles all pagination (more reliable than mixing with Electric)
* Architecture (Documents pattern, per-tab):
* 1. API is the PRIMARY data source fetches first page on mount with category filter
* 2. Electric provides REAL-TIME updates (new items, status changes, read state)
* 3. Baseline pattern prevents duplicates between API and Electric
* 4. Electric sync shape is SHARED across instances (client-level caching)
* each instance creates its own type-filtered live queries
*
* Key Design Decisions:
* 1. No mutable refs for cursor - cursor computed from current state
* 2. Single deduplicateAndSort function - prevents inconsistencies
* 3. Filter-based preservation in live query - prevents data loss
* 4. Auto-fetch from API when Electric returns 0 items
* Unread count strategy:
* - API provides the category-filtered total on mount (ground truth across all time)
* - Electric live query counts unread within SYNC_WINDOW_DAYS (filtered by type)
* - olderUnreadOffsetRef bridges the gap: total = offset + recent
* - Optimistic updates adjust both the count and the offset (for old items)
*
* @param userId - The user ID to fetch inbox items for
* @param searchSpaceId - The search space ID to filter inbox items
* @param typeFilter - Optional inbox item type to filter by
* @param category - Which tab: "comments" or "status"
*/
export function useInbox(
userId: string | null,
searchSpaceId: number | null,
typeFilter: InboxItemTypeEnum | null = null
category: NotificationCategory
) {
const electricClient = useElectricClient();
const [inboxItems, setInboxItems] = useState<InboxItem[]>([]);
const [loading, setLoading] = useState(true);
const [loadingMore, setLoadingMore] = useState(false);
const [hasMore, setHasMore] = useState(true);
const [hasMore, setHasMore] = useState(false);
const [error, setError] = useState<Error | null>(null);
const [unreadCount, setUnreadCount] = useState(0);
// Split unread count tracking for accurate counts with 14-day sync window
// olderUnreadCount = unread items OLDER than sync window (from server, static until reconciliation)
// recentUnreadCount = unread items within sync window (from live query, real-time)
const [olderUnreadCount, setOlderUnreadCount] = useState(0);
const [recentUnreadCount, setRecentUnreadCount] = useState(0);
const initialLoadDoneRef = useRef(false);
const electricBaselineIdsRef = useRef<Set<number> | null>(null);
const newestApiTimestampRef = useRef<string | null>(null);
const liveQueryRef = useRef<{ unsubscribe?: () => void } | null>(null);
const unreadLiveQueryRef = useRef<{ unsubscribe?: () => void } | null>(null);
const syncHandleRef = useRef<SyncHandle | null>(null);
const liveQueryRef = useRef<{ unsubscribe: () => void } | null>(null);
const userSyncKeyRef = useRef<string | null>(null);
const unreadCountLiveQueryRef = useRef<{ unsubscribe: () => void } | null>(null);
const olderUnreadOffsetRef = useRef<number | null>(null);
const apiUnreadTotalRef = useRef(0);
// Total unread = older (static from server) + recent (live from Electric)
const totalUnreadCount = olderUnreadCount + recentUnreadCount;
// EFFECT 1: Electric SQL sync for real-time updates
// EFFECT 1: Fetch first page + unread count from API with category filter
useEffect(() => {
if (!userId || !electricClient) {
setLoading(!electricClient);
return;
}
if (!userId || !searchSpaceId) return;
let cancelled = false;
setLoading(true);
setInboxItems([]);
setHasMore(false);
initialLoadDoneRef.current = false;
electricBaselineIdsRef.current = null;
newestApiTimestampRef.current = null;
olderUnreadOffsetRef.current = null;
apiUnreadTotalRef.current = 0;
const fetchInitialData = async () => {
try {
const [notificationsResponse, unreadResponse] = await Promise.all([
notificationsApiService.getNotifications({
queryParams: {
search_space_id: searchSpaceId,
category,
limit: INITIAL_PAGE_SIZE,
},
}),
notificationsApiService.getUnreadCount(searchSpaceId, undefined, category),
]);
if (cancelled) return;
setInboxItems(notificationsResponse.items);
setHasMore(notificationsResponse.has_more);
setUnreadCount(unreadResponse.total_unread);
apiUnreadTotalRef.current = unreadResponse.total_unread;
newestApiTimestampRef.current = getNewestTimestamp(notificationsResponse.items);
setError(null);
initialLoadDoneRef.current = true;
} catch (err) {
if (cancelled) return;
console.error(`[useInbox:${category}] Initial load failed:`, err);
setError(err instanceof Error ? err : new Error("Failed to load notifications"));
} finally {
if (!cancelled) setLoading(false);
}
};
fetchInitialData();
return () => {
cancelled = true;
};
}, [userId, searchSpaceId, category]);
// EFFECT 2: Electric sync (shared shape) + per-instance type-filtered live queries
useEffect(() => {
if (!userId || !searchSpaceId || !electricClient) return;
const uid = userId;
const spaceId = searchSpaceId;
const client = electricClient;
const typeFilter = CATEGORY_TYPE_SQL[category];
let mounted = true;
async function startSync() {
async function setupElectricRealtime() {
// Clean up previous live queries (NOT the sync shape — it's shared)
if (liveQueryRef.current) {
try {
liveQueryRef.current.unsubscribe?.();
} catch {
/* PGlite may be closed */
}
liveQueryRef.current = null;
}
if (unreadLiveQueryRef.current) {
try {
unreadLiveQueryRef.current.unsubscribe?.();
} catch {
/* PGlite may be closed */
}
unreadLiveQueryRef.current = null;
}
try {
const cutoffDate = getSyncCutoffDate();
const userSyncKey = `inbox_${userId}_${cutoffDate}`;
// Skip if already syncing with this key
if (userSyncKeyRef.current === userSyncKey) return;
// Clean up previous sync
if (syncHandleRef.current) {
try {
syncHandleRef.current.unsubscribe();
} catch {
// PGlite may already be closed during cleanup
}
syncHandleRef.current = null;
}
console.log("[useInbox] Starting sync for:", userId);
userSyncKeyRef.current = userSyncKey;
// Sync shape is cached by the Electric client — multiple hook instances
// calling syncShape with the same params get the same handle.
const handle = await client.syncShape({
table: "notifications",
where: `user_id = '${userId}' AND created_at > '${cutoffDate}'`,
where: `user_id = '${uid}' AND created_at > '${cutoffDate}'`,
primaryKey: ["id"],
});
// Wait for initial sync with timeout
if (!mounted) return;
if (!handle.isUpToDate && handle.initialSyncPromise) {
await Promise.race([
handle.initialSyncPromise,
new Promise((resolve) => setTimeout(resolve, 3000)),
new Promise((resolve) => setTimeout(resolve, 5000)),
]);
}
if (!mounted) return;
const db = client.db as {
live?: {
query: <T>(
sql: string,
params?: (number | string)[]
) => Promise<{
subscribe: (cb: (result: { rows: T[] }) => void) => void;
unsubscribe?: () => void;
}>;
};
};
if (!db.live?.query) return;
// Per-instance live query filtered by category types
const itemsQuery = `SELECT * FROM notifications
WHERE user_id = $1
AND (search_space_id = $2 OR search_space_id IS NULL)
AND created_at > '${cutoffDate}'
${typeFilter}
ORDER BY created_at DESC`;
const liveQuery = await db.live.query<InboxItem>(itemsQuery, [uid, spaceId]);
if (!mounted) {
handle.unsubscribe();
liveQuery.unsubscribe?.();
return;
}
syncHandleRef.current = handle;
setLoading(false);
setError(null);
} catch (err) {
if (!mounted) return;
console.error("[useInbox] Sync failed:", err);
setError(err instanceof Error ? err : new Error("Sync failed"));
setLoading(false);
}
}
liveQuery.subscribe((result: { rows: InboxItem[] }) => {
if (!mounted || !result.rows || !initialLoadDoneRef.current) return;
startSync();
const validItems = result.rows.filter((item) => item.id != null && item.title != null);
const cutoff = new Date(getSyncCutoffDate());
return () => {
mounted = false;
userSyncKeyRef.current = null;
if (syncHandleRef.current) {
try {
syncHandleRef.current.unsubscribe();
} catch {
// PGlite may already be closed during cleanup
}
syncHandleRef.current = null;
}
};
}, [userId, electricClient]);
const liveItemMap = new Map(validItems.map((d) => [d.id, d]));
const liveIds = new Set(liveItemMap.keys());
// Reset when filters change
useEffect(() => {
setHasMore(true);
setInboxItems([]);
// Reset count states - will be refetched by the unread count effect
setOlderUnreadCount(0);
setRecentUnreadCount(0);
}, [userId, searchSpaceId, typeFilter]);
setInboxItems((prev) => {
const prevIds = new Set(prev.map((d) => d.id));
// EFFECT 2: Live query for real-time updates + auto-fetch from API if empty
useEffect(() => {
if (!userId || !electricClient) return;
const client = electricClient;
let mounted = true;
async function setupLiveQuery() {
// Clean up previous live query
if (liveQueryRef.current) {
try {
liveQueryRef.current.unsubscribe();
} catch {
// PGlite may already be closed during cleanup
}
liveQueryRef.current = null;
}
try {
const cutoff = getSyncCutoffDate();
const query = `SELECT * FROM notifications
WHERE user_id = $1
AND (search_space_id = $2 OR search_space_id IS NULL)
AND created_at > '${cutoff}'
${typeFilter ? "AND type = $3" : ""}
ORDER BY created_at DESC
LIMIT ${PAGE_SIZE}`;
const params = typeFilter ? [userId, searchSpaceId, typeFilter] : [userId, searchSpaceId];
const db = client.db as any;
// Initial fetch from PGLite - no validation needed, schema is enforced by Electric SQL sync
const result = await client.db.query<InboxItem>(query, params);
if (mounted && result.rows) {
const items = deduplicateAndSort(result.rows);
setInboxItems(items);
// AUTO-FETCH: If Electric returned 0 items, check API for older items
// This handles the edge case where user has no recent notifications
// but has older ones outside the sync window
if (items.length === 0) {
console.log(
"[useInbox] Electric returned 0 items, checking API for older notifications"
const newItems = filterNewElectricItems(
validItems,
liveIds,
prevIds,
electricBaselineIdsRef,
newestApiTimestampRef.current
);
try {
// Use the API service with proper Zod validation for API responses
const data = await notificationsApiService.getNotifications({
queryParams: {
search_space_id: searchSpaceId ?? undefined,
type: typeFilter ?? undefined,
limit: PAGE_SIZE,
},
});
if (mounted) {
if (data.items.length > 0) {
setInboxItems(data.items);
}
setHasMore(data.has_more);
}
} catch (err) {
console.error("[useInbox] API fallback failed:", err);
}
}
}
// Set up live query for real-time updates
if (db.live?.query) {
const liveQuery = await db.live.query(query, params);
if (!mounted) {
liveQuery.unsubscribe?.();
return;
}
if (liveQuery.subscribe) {
// Live query data comes from PGlite - no validation needed
liveQuery.subscribe((result: { rows: InboxItem[] }) => {
if (mounted && result.rows) {
const liveItems = result.rows;
setInboxItems((prev) => {
const liveItemIds = new Set(liveItems.map((item) => item.id));
// FIXED: Keep ALL items not in live result (not just slice)
// This prevents data loss when new notifications push items
// out of the LIMIT window
const itemsToKeep = prev.filter((item) => !liveItemIds.has(item.id));
return deduplicateAndSort([...liveItems, ...itemsToKeep]);
});
}
let updated = prev.map((item) => {
const liveItem = liveItemMap.get(item.id);
if (liveItem) return liveItem;
return item;
});
const isFullySynced = handle.isUpToDate;
if (isFullySynced) {
updated = updated.filter((item) => {
if (new Date(item.created_at) < cutoff) return true;
return liveIds.has(item.id);
});
}
if (newItems.length > 0) {
return [...newItems, ...updated];
}
return updated;
});
// Calibrate the older-unread offset using baseline items
// (items present in both Electric and the API-loaded list).
// This avoids the timing bug where new items arriving between
// the API fetch and Electric's first callback would be absorbed
// into the offset, making the count appear unchanged.
const baseline = electricBaselineIdsRef.current;
if (olderUnreadOffsetRef.current === null && baseline !== null) {
const baselineUnreadCount = validItems.filter(
(item) => baseline.has(item.id) && !item.read
).length;
olderUnreadOffsetRef.current = Math.max(
0,
apiUnreadTotalRef.current - baselineUnreadCount
);
}
if (liveQuery.unsubscribe) {
liveQueryRef.current = liveQuery;
// Derive unread count from all Electric items + the older offset
if (olderUnreadOffsetRef.current !== null) {
const electricUnreadCount = validItems.filter((item) => !item.read).length;
setUnreadCount(olderUnreadOffsetRef.current + electricUnreadCount);
}
});
liveQueryRef.current = liveQuery;
// Per-instance unread count live query filtered by category types.
// Acts as a secondary reactive path for read-status changes that
// may not trigger the items live query in all edge cases.
const countQuery = `SELECT COUNT(*) as count FROM notifications
WHERE user_id = $1
AND (search_space_id = $2 OR search_space_id IS NULL)
AND created_at > '${cutoffDate}'
AND read = false
${typeFilter}`;
const countLiveQuery = await db.live.query<{ count: number | string }>(countQuery, [
uid,
spaceId,
]);
if (!mounted) {
countLiveQuery.unsubscribe?.();
return;
}
countLiveQuery.subscribe((result: { rows: Array<{ count: number | string }> }) => {
if (!mounted || !result.rows?.[0] || !initialLoadDoneRef.current) return;
if (olderUnreadOffsetRef.current === null) return;
const liveRecentUnread = Number(result.rows[0].count) || 0;
setUnreadCount(olderUnreadOffsetRef.current + liveRecentUnread);
});
unreadLiveQueryRef.current = countLiveQuery;
} catch (err) {
console.error("[useInbox] Live query error:", err);
console.error(`[useInbox:${category}] Electric setup failed:`, err);
}
}
setupLiveQuery();
setupElectricRealtime();
return () => {
mounted = false;
// Only clean up live queries — sync shape is shared across instances
if (liveQueryRef.current) {
try {
liveQueryRef.current.unsubscribe();
liveQueryRef.current.unsubscribe?.();
} catch {
// PGlite may already be closed during cleanup
/* PGlite may be closed */
}
liveQueryRef.current = null;
}
};
}, [userId, searchSpaceId, typeFilter, electricClient]);
// EFFECT 3: Dedicated unread count sync with split tracking
// - Fetches server count on mount (accurate total)
// - Sets up live query for recent count (real-time updates)
// - Handles items older than sync window separately
useEffect(() => {
if (!userId || !electricClient) return;
const client = electricClient;
let mounted = true;
async function setupUnreadCountSync() {
// Cleanup previous live query
if (unreadCountLiveQueryRef.current) {
unreadCountLiveQueryRef.current.unsubscribe();
unreadCountLiveQueryRef.current = null;
}
try {
// STEP 1: Fetch server counts (total and recent) - guaranteed accurate
console.log(
"[useInbox] Fetching unread count from server",
typeFilter ? `for type: ${typeFilter}` : "for all types"
);
const serverCounts = await notificationsApiService.getUnreadCount(
searchSpaceId ?? undefined,
typeFilter ?? undefined
);
if (mounted) {
// Calculate older count = total - recent
const olderCount = serverCounts.total_unread - serverCounts.recent_unread;
setOlderUnreadCount(olderCount);
setRecentUnreadCount(serverCounts.recent_unread);
console.log(
`[useInbox] Server counts: total=${serverCounts.total_unread}, recent=${serverCounts.recent_unread}, older=${olderCount}`
);
if (unreadLiveQueryRef.current) {
try {
unreadLiveQueryRef.current.unsubscribe?.();
} catch {
/* PGlite may be closed */
}
// STEP 2: Set up PGLite live query for RECENT unread count only
// This provides real-time updates for notifications within sync window
const db = client.db as any;
const cutoff = getSyncCutoffDate();
// Count query - NO LIMIT, counts all unread in synced window
const countQuery = `
SELECT COUNT(*) as count FROM notifications
WHERE user_id = $1
AND (search_space_id = $2 OR search_space_id IS NULL)
AND created_at > '${cutoff}'
AND read = false
${typeFilter ? "AND type = $3" : ""}
`;
const params = typeFilter ? [userId, searchSpaceId, typeFilter] : [userId, searchSpaceId];
if (db.live?.query) {
const liveQuery = await db.live.query(countQuery, params);
if (!mounted) {
liveQuery.unsubscribe?.();
return;
}
if (liveQuery.subscribe) {
liveQuery.subscribe((result: { rows: Array<{ count: number | string }> }) => {
if (mounted && result.rows?.[0]) {
const liveCount = Number(result.rows[0].count) || 0;
// Update recent count from live query
// This fires in real-time when Electric syncs new/updated notifications
setRecentUnreadCount(liveCount);
}
});
}
if (liveQuery.unsubscribe) {
unreadCountLiveQueryRef.current = liveQuery;
}
}
} catch (err) {
console.error("[useInbox] Unread count sync error:", err);
// On error, counts will remain at 0 or previous values
// The items-based count will be the fallback
}
}
setupUnreadCountSync();
return () => {
mounted = false;
if (unreadCountLiveQueryRef.current) {
unreadCountLiveQueryRef.current.unsubscribe();
unreadCountLiveQueryRef.current = null;
unreadLiveQueryRef.current = null;
}
};
}, [userId, searchSpaceId, typeFilter, electricClient]);
}, [userId, searchSpaceId, electricClient, category]);
// loadMore - Pure cursor-based pagination, no race conditions
// Cursor is computed from current state, not stored in refs
// Load more pages via API (cursor-based using before_date)
const loadMore = useCallback(async () => {
// Removed inboxItems.length === 0 check to allow loading older items
// when Electric returns 0 items
if (!userId || loadingMore || !hasMore) return;
if (loadingMore || !hasMore || !userId || !searchSpaceId) return;
setLoadingMore(true);
try {
// Cursor is computed from current state - no stale refs possible
const oldestItem = inboxItems.length > 0 ? inboxItems[inboxItems.length - 1] : null;
const beforeDate = oldestItem ? toISOString(oldestItem.created_at) : null;
const beforeDate = oldestItem?.created_at ?? undefined;
console.log("[useInbox] Loading more, before:", beforeDate ?? "none (initial)");
// Use the API service with proper Zod validation
const data = await notificationsApiService.getNotifications({
const response = await notificationsApiService.getNotifications({
queryParams: {
search_space_id: searchSpaceId ?? undefined,
type: typeFilter ?? undefined,
before_date: beforeDate ?? undefined,
limit: PAGE_SIZE,
search_space_id: searchSpaceId,
category,
before_date: beforeDate,
limit: SCROLL_PAGE_SIZE,
},
});
if (data.items.length > 0) {
// Functional update ensures we always merge with latest state
// Items are already validated by the API service
setInboxItems((prev) => deduplicateAndSort([...prev, ...data.items]));
}
const newItems = response.items;
// Use API's has_more flag
setHasMore(data.has_more);
setInboxItems((prev) => {
const existingIds = new Set(prev.map((d) => d.id));
const deduped = newItems.filter((d) => !existingIds.has(d.id));
return [...prev, ...deduped];
});
setHasMore(response.has_more);
} catch (err) {
console.error("[useInbox] Load more failed:", err);
console.error(`[useInbox:${category}] Load more failed:`, err);
} finally {
setLoadingMore(false);
}
}, [userId, searchSpaceId, typeFilter, loadingMore, hasMore, inboxItems]);
}, [loadingMore, hasMore, userId, searchSpaceId, inboxItems, category]);
// Mark inbox item as read with optimistic update
// Handles both recent items (live query updates count) and older items (manual count decrement)
// Mark single item as read with optimistic update
const markAsRead = useCallback(
async (itemId: number) => {
// Find the item to check if it's older than sync window
const item = inboxItems.find((i) => i.id === itemId);
const isOlderItem = item && !item.read && isOlderThanSyncWindow(item.created_at);
if (!item || item.read) return true;
const cutoff = new Date(getSyncCutoffDate());
const isOlderItem = new Date(item.created_at) < cutoff;
// Optimistic update: mark as read immediately for instant UI feedback
setInboxItems((prev) => prev.map((i) => (i.id === itemId ? { ...i, read: true } : i)));
setUnreadCount((prev) => Math.max(0, prev - 1));
// If older item, manually decrement older count
// (live query won't see items outside sync window)
if (isOlderItem) {
setOlderUnreadCount((prev) => Math.max(0, prev - 1));
if (isOlderItem && olderUnreadOffsetRef.current !== null) {
olderUnreadOffsetRef.current = Math.max(0, olderUnreadOffsetRef.current - 1);
}
try {
// Use the API service with proper Zod validation
const result = await notificationsApiService.markAsRead({ notificationId: itemId });
if (!result.success) {
// Rollback on error
setInboxItems((prev) => prev.map((i) => (i.id === itemId ? { ...i, read: false } : i)));
if (isOlderItem) {
setOlderUnreadCount((prev) => prev + 1);
setUnreadCount((prev) => prev + 1);
if (isOlderItem && olderUnreadOffsetRef.current !== null) {
olderUnreadOffsetRef.current += 1;
}
}
// If successful, Electric SQL will sync the change and live query will update
// This ensures eventual consistency even if optimistic update was wrong
return result.success;
} catch (err) {
console.error("Failed to mark as read:", err);
// Rollback on error
setInboxItems((prev) => prev.map((i) => (i.id === itemId ? { ...i, read: false } : i)));
if (isOlderItem) {
setOlderUnreadCount((prev) => prev + 1);
setUnreadCount((prev) => prev + 1);
if (isOlderItem && olderUnreadOffsetRef.current !== null) {
olderUnreadOffsetRef.current += 1;
}
return false;
}
@ -499,49 +404,42 @@ export function useInbox(
[inboxItems]
);
// Mark all inbox items as read with optimistic update
// Resets both older and recent counts to 0
// Mark all as read with optimistic update
const markAllAsRead = useCallback(async () => {
// Store previous counts for potential rollback
const prevOlderCount = olderUnreadCount;
const prevRecentCount = recentUnreadCount;
const prevItems = inboxItems;
const prevCount = unreadCount;
const prevOffset = olderUnreadOffsetRef.current;
// Optimistic update: mark all as read immediately for instant UI feedback
setInboxItems((prev) => prev.map((item) => ({ ...item, read: true })));
setOlderUnreadCount(0);
setRecentUnreadCount(0);
setUnreadCount(0);
olderUnreadOffsetRef.current = 0;
try {
// Use the API service with proper Zod validation
const result = await notificationsApiService.markAllAsRead();
if (!result.success) {
console.error("Failed to mark all as read");
// Rollback counts on error
setOlderUnreadCount(prevOlderCount);
setRecentUnreadCount(prevRecentCount);
setInboxItems(prevItems);
setUnreadCount(prevCount);
olderUnreadOffsetRef.current = prevOffset;
}
// Electric SQL will sync and live query will ensure consistency
return result.success;
} catch (err) {
console.error("Failed to mark all as read:", err);
// Rollback counts on error
setOlderUnreadCount(prevOlderCount);
setRecentUnreadCount(prevRecentCount);
setInboxItems(prevItems);
setUnreadCount(prevCount);
olderUnreadOffsetRef.current = prevOffset;
return false;
}
}, [olderUnreadCount, recentUnreadCount]);
}, [inboxItems, unreadCount]);
return {
inboxItems,
unreadCount: totalUnreadCount,
unreadCount,
markAsRead,
markAllAsRead,
loading,
loadingMore,
hasMore,
loadMore,
isUsingApiFallback: true, // Always use API for pagination
error,
};
}

View file

@ -0,0 +1,47 @@
import { useCallback, useEffect, useRef } from "react";
const LONG_PRESS_DELAY = 500;
export function useLongPress(onLongPress: () => void, delay = LONG_PRESS_DELAY) {
const timerRef = useRef<ReturnType<typeof setTimeout> | null>(null);
const triggeredRef = useRef(false);
const start = useCallback(() => {
triggeredRef.current = false;
timerRef.current = setTimeout(() => {
triggeredRef.current = true;
onLongPress();
}, delay);
}, [onLongPress, delay]);
const cancel = useCallback(() => {
if (timerRef.current) {
clearTimeout(timerRef.current);
timerRef.current = null;
}
}, []);
useEffect(() => {
return () => {
if (timerRef.current) {
clearTimeout(timerRef.current);
}
};
}, []);
const handlers = {
onTouchStart: start,
onTouchEnd: cancel,
onTouchMove: cancel,
};
const wasLongPress = useCallback(() => {
if (triggeredRef.current) {
triggeredRef.current = false;
return true;
}
return false;
}, []);
return { handlers, wasLongPress };
}

View file

@ -33,25 +33,37 @@ export function usePlatformShortcut() {
setReady(true);
}, []);
const shortcut = useCallback(
(...keys: string[]) => {
if (!ready) return "";
const resolveKeys = useCallback(
(keys: string[]) => {
const mod = isMac ? "⌘" : "Ctrl";
const shift = isMac ? "⇧" : "Shift";
const alt = isMac ? "⌥" : "Alt";
const mapped = keys.map((k) => {
return keys.map((k) => {
if (k === "Mod") return mod;
if (k === "Shift") return shift;
if (k === "Alt") return alt;
return k;
});
return `(${mapped.join("+")})`;
},
[ready, isMac]
[isMac]
);
return { shortcut, isMac, ready };
const shortcut = useCallback(
(...keys: string[]) => {
if (!ready) return "";
return `(${resolveKeys(keys).join("+")})`;
},
[ready, resolveKeys]
);
const shortcutKeys = useCallback(
(...keys: string[]) => {
if (!ready) return [];
return resolveKeys(keys);
},
[ready, resolveKeys]
);
return { shortcut, shortcutKeys, isMac, ready };
}