feat: enhance notifications API and inbox functionality

- Added a new endpoint to list notifications with pagination, allowing users to fetch older notifications beyond the sync window.
- Introduced response models for notifications and improved error handling for date filtering.
- Updated the useInbox hook to support API fallback for loading older notifications when Electric SQL returns no recent items.
- Implemented deduplication and sorting logic for inbox items to prevent race conditions and ensure consistent data display.
- Enhanced loading logic for inbox items, including improved pagination and handling of loading states.
This commit is contained in:
Anish Sarkar 2026-01-22 16:02:25 +05:30
parent 36f1d28632
commit a449e7e2a6
3 changed files with 354 additions and 192 deletions

View file

@ -1,12 +1,16 @@
""" """
Notifications API routes. Notifications API routes.
These endpoints allow marking notifications as read. These endpoints allow marking notifications as read and fetching older notifications.
Electric SQL automatically syncs the changes to all connected clients. Electric SQL automatically syncs the changes to all connected clients for recent items.
For older items (beyond the sync window), use the list endpoint.
""" """
from fastapi import APIRouter, Depends, HTTPException, status from datetime import datetime
from typing import Optional
from fastapi import APIRouter, Depends, HTTPException, Query, status
from pydantic import BaseModel from pydantic import BaseModel
from sqlalchemy import select, update from sqlalchemy import desc, func, select, update
from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.ext.asyncio import AsyncSession
from app.db import Notification, User, get_async_session from app.db import Notification, User, get_async_session
@ -15,6 +19,33 @@ from app.users import current_active_user
router = APIRouter(prefix="/notifications", tags=["notifications"]) router = APIRouter(prefix="/notifications", tags=["notifications"])
class NotificationResponse(BaseModel):
"""Response model for a single notification."""
id: int
user_id: str
search_space_id: Optional[int]
type: str
title: str
message: str
read: bool
metadata: dict
created_at: str
updated_at: Optional[str]
class Config:
from_attributes = True
class NotificationListResponse(BaseModel):
"""Response for listing notifications with pagination."""
items: list[NotificationResponse]
total: int
has_more: bool
next_offset: Optional[int]
class MarkReadResponse(BaseModel): class MarkReadResponse(BaseModel):
"""Response for mark as read operations.""" """Response for mark as read operations."""
@ -30,6 +61,96 @@ class MarkAllReadResponse(BaseModel):
updated_count: int updated_count: int
@router.get("", response_model=NotificationListResponse)
async def list_notifications(
search_space_id: Optional[int] = Query(None, description="Filter by search space ID"),
type_filter: Optional[str] = Query(None, alias="type", description="Filter by notification type"),
before_date: Optional[str] = Query(None, description="Get notifications before this ISO date (for pagination)"),
limit: int = Query(50, ge=1, le=100, description="Number of items to return"),
offset: int = Query(0, ge=0, description="Number of items to skip"),
user: User = Depends(current_active_user),
session: AsyncSession = Depends(get_async_session),
) -> NotificationListResponse:
"""
List notifications for the current user with pagination.
This endpoint is used as a fallback for older notifications that are
outside the Electric SQL sync window (2 weeks).
Use `before_date` to paginate through older notifications efficiently.
"""
# Build base query
query = select(Notification).where(Notification.user_id == user.id)
count_query = select(func.count(Notification.id)).where(Notification.user_id == user.id)
# Filter by search space (include null search_space_id for global notifications)
if search_space_id is not None:
query = query.where(
(Notification.search_space_id == search_space_id) |
(Notification.search_space_id.is_(None))
)
count_query = count_query.where(
(Notification.search_space_id == search_space_id) |
(Notification.search_space_id.is_(None))
)
# Filter by type
if type_filter:
query = query.where(Notification.type == type_filter)
count_query = count_query.where(Notification.type == type_filter)
# Filter by date (for efficient pagination of older items)
if before_date:
try:
before_datetime = datetime.fromisoformat(before_date.replace("Z", "+00:00"))
query = query.where(Notification.created_at < before_datetime)
count_query = count_query.where(Notification.created_at < before_datetime)
except ValueError:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Invalid date format. Use ISO format (e.g., 2024-01-15T00:00:00Z)",
) from None
# Get total count
total_result = await session.execute(count_query)
total = total_result.scalar() or 0
# Apply ordering and pagination
query = query.order_by(desc(Notification.created_at)).offset(offset).limit(limit + 1)
# Execute query
result = await session.execute(query)
notifications = result.scalars().all()
# Check if there are more items
has_more = len(notifications) > limit
if has_more:
notifications = notifications[:limit]
# Convert to response format
items = []
for notification in notifications:
items.append(NotificationResponse(
id=notification.id,
user_id=str(notification.user_id),
search_space_id=notification.search_space_id,
type=notification.type,
title=notification.title,
message=notification.message,
read=notification.read,
metadata=notification.notification_metadata or {},
created_at=notification.created_at.isoformat() if notification.created_at else "",
updated_at=notification.updated_at.isoformat() if notification.updated_at else None,
))
return NotificationListResponse(
items=items,
total=total,
has_more=has_more,
next_offset=offset + limit if has_more else None,
)
@router.patch("/{notification_id}/read", response_model=MarkReadResponse) @router.patch("/{notification_id}/read", response_model=MarkReadResponse)
async def mark_notification_as_read( async def mark_notification_as_read(
notification_id: int, notification_id: int,

View file

@ -8,31 +8,74 @@ import { useElectricClient } from "@/lib/electric/context";
export type { InboxItem, InboxItemTypeEnum } from "@/contracts/types/inbox.types"; export type { InboxItem, InboxItemTypeEnum } from "@/contracts/types/inbox.types";
const PAGE_SIZE = 50; // Items per batch const PAGE_SIZE = 50;
const SYNC_WINDOW_DAYS = 14;
/** /**
* Hook for managing inbox items with Electric SQL real-time sync * Deduplicate by ID and sort by created_at descending.
* This is the SINGLE source of truth for deduplication - prevents race conditions.
*/
function deduplicateAndSort(items: InboxItem[]): InboxItem[] {
const seen = new Map<number, InboxItem>();
for (const item of items) {
if (!seen.has(item.id)) {
seen.set(item.id, item);
}
}
return Array.from(seen.values()).sort(
(a, b) => new Date(b.created_at).getTime() - new Date(a.created_at).getTime()
);
}
/**
* Calculate the cutoff date for sync window
*/
function getSyncCutoffDate(): string {
const cutoff = new Date();
cutoff.setDate(cutoff.getDate() - SYNC_WINDOW_DAYS);
return cutoff.toISOString();
}
/**
* Convert a date value to ISO string format
*/
function toISOString(date: string | Date | null | undefined): string | null {
if (!date) return null;
if (date instanceof Date) return date.toISOString();
if (typeof date === "string") {
if (date.includes("T")) return date;
try {
return new Date(date).toISOString();
} catch {
return date;
}
}
return null;
}
/**
* Hook for managing inbox items with Electric SQL real-time sync + API fallback
* *
* Uses the Electric client from context (provided by ElectricProvider) * Architecture (Simplified & Race-Condition Free):
* instead of initializing its own - prevents race conditions and memory leaks * - Electric SQL: Syncs recent items (within SYNC_WINDOW_DAYS) for real-time updates
* - Live Query: Provides reactive first page from PGLite
* - API: Handles all pagination (more reliable than mixing with Electric)
* *
* Architecture: * Key Design Decisions:
* - User-level sync: Syncs ALL inbox items for a user (runs once per user) * 1. No mutable refs for cursor - cursor computed from current state
* - Search-space-level query: Filters inbox items by searchSpaceId (updates on search space change) * 2. Single deduplicateAndSort function - prevents inconsistencies
* - Pagination: Loads items in batches for better performance with large datasets * 3. Filter-based preservation in live query - prevents data loss
* * 4. Auto-fetch from API when Electric returns 0 items
* This separation ensures smooth transitions when switching search spaces (no flash).
* *
* @param userId - The user ID to fetch inbox items for * @param userId - The user ID to fetch inbox items for
* @param searchSpaceId - The search space ID to filter inbox items (null shows global items only) * @param searchSpaceId - The search space ID to filter inbox items
* @param typeFilter - Optional inbox item type to filter by (null shows all types) * @param typeFilter - Optional inbox item type to filter by
*/ */
export function useInbox( export function useInbox(
userId: string | null, userId: string | null,
searchSpaceId: number | null, searchSpaceId: number | null,
typeFilter: InboxItemTypeEnum | null = null typeFilter: InboxItemTypeEnum | null = null
) { ) {
// Get Electric client from context - ElectricProvider handles initialization
const electricClient = useElectricClient(); const electricClient = useElectricClient();
const [inboxItems, setInboxItems] = useState<InboxItem[]>([]); const [inboxItems, setInboxItems] = useState<InboxItem[]>([]);
@ -41,58 +84,51 @@ export function useInbox(
const [loadingMore, setLoadingMore] = useState(false); const [loadingMore, setLoadingMore] = useState(false);
const [hasMore, setHasMore] = useState(true); const [hasMore, setHasMore] = useState(true);
const [error, setError] = useState<Error | null>(null); const [error, setError] = useState<Error | null>(null);
const syncHandleRef = useRef<SyncHandle | null>(null); const syncHandleRef = useRef<SyncHandle | null>(null);
const liveQueryRef = useRef<{ unsubscribe: () => void } | null>(null); const liveQueryRef = useRef<{ unsubscribe: () => void } | null>(null);
const unreadCountLiveQueryRef = useRef<{ unsubscribe: () => void } | null>(null); const unreadCountLiveQueryRef = useRef<{ unsubscribe: () => void } | null>(null);
const offsetRef = useRef(0);
// Track user-level sync key to prevent duplicate sync subscriptions
const userSyncKeyRef = useRef<string | null>(null); const userSyncKeyRef = useRef<string | null>(null);
// EFFECT 1: User-level sync - runs once per user, syncs ALL inbox items // EFFECT 1: Electric SQL sync for real-time updates
useEffect(() => { useEffect(() => {
if (!userId || !electricClient) { if (!userId || !electricClient) {
setLoading(!electricClient); setLoading(!electricClient);
return; return;
} }
const userSyncKey = `inbox_${userId}`;
if (userSyncKeyRef.current === userSyncKey) {
// Already syncing for this user
return;
}
// Capture electricClient to satisfy TypeScript in async function
const client = electricClient; const client = electricClient;
let mounted = true; let mounted = true;
async function startSync() {
try {
const cutoffDate = getSyncCutoffDate();
const userSyncKey = `inbox_${userId}_${cutoffDate}`;
// Skip if already syncing with this key
if (userSyncKeyRef.current === userSyncKey) return;
// Clean up previous sync
if (syncHandleRef.current) {
syncHandleRef.current.unsubscribe();
syncHandleRef.current = null;
}
console.log("[useInbox] Starting sync for:", userId);
userSyncKeyRef.current = userSyncKey; userSyncKeyRef.current = userSyncKey;
async function startUserSync() {
try {
console.log("[useInbox] Starting user-level sync for:", userId);
// Sync ALL inbox items for this user (cached via syncShape caching)
// Note: Backend table is still named "notifications"
const handle = await client.syncShape({ const handle = await client.syncShape({
table: "notifications", table: "notifications",
where: `user_id = '${userId}'`, where: `user_id = '${userId}' AND created_at > '${cutoffDate}'`,
primaryKey: ["id"], primaryKey: ["id"],
}); });
console.log("[useInbox] User sync started:", {
isUpToDate: handle.isUpToDate,
});
// Wait for initial sync with timeout // Wait for initial sync with timeout
if (!handle.isUpToDate && handle.initialSyncPromise) { if (!handle.isUpToDate && handle.initialSyncPromise) {
try {
await Promise.race([ await Promise.race([
handle.initialSyncPromise, handle.initialSyncPromise,
new Promise((resolve) => setTimeout(resolve, 2000)), new Promise((resolve) => setTimeout(resolve, 3000)),
]); ]);
} catch (syncErr) {
console.error("[useInbox] Initial sync failed:", syncErr);
}
} }
if (!mounted) { if (!mounted) {
@ -105,18 +141,17 @@ export function useInbox(
setError(null); setError(null);
} catch (err) { } catch (err) {
if (!mounted) return; if (!mounted) return;
console.error("[useInbox] Failed to start user sync:", err); console.error("[useInbox] Sync failed:", err);
setError(err instanceof Error ? err : new Error("Failed to sync inbox")); setError(err instanceof Error ? err : new Error("Sync failed"));
setLoading(false); setLoading(false);
} }
} }
startUserSync(); startSync();
return () => { return () => {
mounted = false; mounted = false;
userSyncKeyRef.current = null; userSyncKeyRef.current = null;
if (syncHandleRef.current) { if (syncHandleRef.current) {
syncHandleRef.current.unsubscribe(); syncHandleRef.current.unsubscribe();
syncHandleRef.current = null; syncHandleRef.current = null;
@ -124,117 +159,126 @@ export function useInbox(
}; };
}, [userId, electricClient]); }, [userId, electricClient]);
// Reset pagination when filters change // Reset when filters change
useEffect(() => { useEffect(() => {
offsetRef.current = 0;
setHasMore(true); setHasMore(true);
setInboxItems([]); setInboxItems([]);
}, [userId, searchSpaceId, typeFilter]); }, [userId, searchSpaceId, typeFilter]);
// EFFECT 2: Search-space-level query - updates when searchSpaceId or typeFilter changes // EFFECT 2: Live query for real-time updates + auto-fetch from API if empty
// This runs independently of sync, allowing smooth transitions between search spaces
useEffect(() => { useEffect(() => {
if (!userId || !electricClient) { if (!userId || !electricClient) return;
return;
}
// Capture electricClient to satisfy TypeScript in async function
const client = electricClient; const client = electricClient;
let mounted = true; let mounted = true;
async function updateQuery() { async function setupLiveQuery() {
// Clean up previous live query (but DON'T clear inbox items - keep showing old until new arrive) // Clean up previous live query
if (liveQueryRef.current) { if (liveQueryRef.current) {
liveQueryRef.current.unsubscribe(); liveQueryRef.current.unsubscribe();
liveQueryRef.current = null; liveQueryRef.current = null;
} }
try { try {
console.log( const cutoff = getSyncCutoffDate();
"[useInbox] Updating query for searchSpace:",
searchSpaceId,
"typeFilter:",
typeFilter
);
// Build query with optional type filter and LIMIT for pagination const query = `SELECT * FROM notifications
// Note: Backend table is still named "notifications"
const baseQuery = `SELECT * FROM notifications
WHERE user_id = $1 WHERE user_id = $1
AND (search_space_id = $2 OR search_space_id IS NULL)`; AND (search_space_id = $2 OR search_space_id IS NULL)
const typeClause = typeFilter ? ` AND type = $3` : ""; AND created_at > '${cutoff}'
const orderClause = ` ORDER BY created_at DESC`; ${typeFilter ? "AND type = $3" : ""}
const limitClause = ` LIMIT ${PAGE_SIZE}`; ORDER BY created_at DESC
const fullQuery = baseQuery + typeClause + orderClause + limitClause; LIMIT ${PAGE_SIZE}`;
const params = typeFilter ? [userId, searchSpaceId, typeFilter] : [userId, searchSpaceId];
// Fetch inbox items for current search space immediately const params = typeFilter
const result = await client.db.query<InboxItem>(fullQuery, params); ? [userId, searchSpaceId, typeFilter]
: [userId, searchSpaceId];
if (mounted) {
const items = result.rows || [];
setInboxItems(items);
setHasMore(items.length === PAGE_SIZE);
offsetRef.current = items.length;
}
// Set up live query for real-time updates (first page only)
const db = client.db as any; const db = client.db as any;
if (db.live?.query && typeof db.live.query === "function") { // Initial fetch from PGLite
const liveQuery = await db.live.query(fullQuery, params); const result = await client.db.query<InboxItem>(query, params);
if (mounted && result.rows) {
const items = deduplicateAndSort(result.rows);
setInboxItems(items);
// AUTO-FETCH: If Electric returned 0 items, check API for older items
// This handles the edge case where user has no recent notifications
// but has older ones outside the sync window
if (items.length === 0) {
console.log(
"[useInbox] Electric returned 0 items, checking API for older notifications"
);
try {
const apiParams = new URLSearchParams();
if (searchSpaceId !== null) {
apiParams.append("search_space_id", String(searchSpaceId));
}
if (typeFilter) {
apiParams.append("type", typeFilter);
}
apiParams.append("limit", String(PAGE_SIZE));
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/notifications?${apiParams.toString()}`
);
if (response.ok && mounted) {
const data = await response.json();
const apiItems: InboxItem[] = data.items.map((item: any) => ({
...item,
metadata: item.metadata || {},
}));
if (apiItems.length > 0) {
setInboxItems(apiItems);
}
setHasMore(data.has_more ?? apiItems.length === PAGE_SIZE);
}
} catch (err) {
console.error("[useInbox] API fallback failed:", err);
}
}
}
// Set up live query for real-time updates
if (db.live?.query) {
const liveQuery = await db.live.query(query, params);
if (!mounted) { if (!mounted) {
liveQuery.unsubscribe?.(); liveQuery.unsubscribe?.();
return; return;
} }
// Set initial results from live query if (liveQuery.subscribe) {
if (liveQuery.initialResults?.rows) {
const items = liveQuery.initialResults.rows;
setInboxItems(items);
setHasMore(items.length === PAGE_SIZE);
offsetRef.current = items.length;
} else if (liveQuery.rows) {
const items = liveQuery.rows;
setInboxItems(items);
setHasMore(items.length === PAGE_SIZE);
offsetRef.current = items.length;
}
// Subscribe to changes
if (typeof liveQuery.subscribe === "function") {
liveQuery.subscribe((result: { rows: InboxItem[] }) => { liveQuery.subscribe((result: { rows: InboxItem[] }) => {
if (mounted && result.rows) { if (mounted && result.rows) {
// Only update first page from live query setInboxItems((prev) => {
// Keep any additionally loaded items const liveItems = result.rows;
setInboxItems(prev => { const liveItemIds = new Set(liveItems.map((item) => item.id));
if (prev.length <= PAGE_SIZE) {
const items = result.rows; // FIXED: Keep ALL items not in live result (not just slice)
setHasMore(items.length === PAGE_SIZE); // This prevents data loss when new notifications push items
offsetRef.current = items.length; // out of the LIMIT window
return items; const itemsToKeep = prev.filter((item) => !liveItemIds.has(item.id));
}
// Merge: new first page + existing extra items return deduplicateAndSort([...liveItems, ...itemsToKeep]);
const newFirstPage = result.rows;
const existingExtra = prev.slice(PAGE_SIZE);
offsetRef.current = newFirstPage.length + existingExtra.length;
return [...newFirstPage, ...existingExtra];
}); });
} }
}); });
} }
if (typeof liveQuery.unsubscribe === "function") { if (liveQuery.unsubscribe) {
liveQueryRef.current = liveQuery; liveQueryRef.current = liveQuery;
} }
} }
} catch (err) { } catch (err) {
console.error("[useInbox] Failed to update query:", err); console.error("[useInbox] Live query error:", err);
} }
} }
updateQuery(); setupLiveQuery();
return () => { return () => {
mounted = false; mounted = false;
@ -245,61 +289,45 @@ export function useInbox(
}; };
}, [userId, searchSpaceId, typeFilter, electricClient]); }, [userId, searchSpaceId, typeFilter, electricClient]);
// EFFECT 3: Total unread count - independent of type filter // EFFECT 3: Unread count with live updates
// This ensures the badge count stays consistent regardless of active filter
useEffect(() => { useEffect(() => {
if (!userId || !electricClient) { if (!userId || !electricClient) return;
return;
}
// Capture electricClient to satisfy TypeScript in async function
const client = electricClient; const client = electricClient;
let mounted = true; let mounted = true;
async function updateUnreadCount() { async function updateUnreadCount() {
// Clean up previous live query
if (unreadCountLiveQueryRef.current) { if (unreadCountLiveQueryRef.current) {
unreadCountLiveQueryRef.current.unsubscribe(); unreadCountLiveQueryRef.current.unsubscribe();
unreadCountLiveQueryRef.current = null; unreadCountLiveQueryRef.current = null;
} }
try { try {
// Note: Backend table is still named "notifications" const cutoff = getSyncCutoffDate();
const countQuery = `SELECT COUNT(*) as count FROM notifications const query = `SELECT COUNT(*) as count FROM notifications
WHERE user_id = $1 WHERE user_id = $1
AND (search_space_id = $2 OR search_space_id IS NULL) AND (search_space_id = $2 OR search_space_id IS NULL)
AND read = false`; AND read = false
AND created_at > '${cutoff}'`;
// Fetch initial count const result = await client.db.query<{ count: number }>(query, [
const result = await client.db.query<{ count: number }>(countQuery, [
userId, userId,
searchSpaceId, searchSpaceId,
]); ]);
if (mounted && result.rows?.[0]) { if (mounted && result.rows?.[0]) {
setTotalUnreadCount(Number(result.rows[0].count) || 0); setTotalUnreadCount(Number(result.rows[0].count) || 0);
} }
// Set up live query for real-time updates
const db = client.db as any; const db = client.db as any;
if (db.live?.query) {
if (db.live?.query && typeof db.live.query === "function") { const liveQuery = await db.live.query(query, [userId, searchSpaceId]);
const liveQuery = await db.live.query(countQuery, [userId, searchSpaceId]);
if (!mounted) { if (!mounted) {
liveQuery.unsubscribe?.(); liveQuery.unsubscribe?.();
return; return;
} }
// Set initial results from live query if (liveQuery.subscribe) {
if (liveQuery.initialResults?.rows?.[0]) {
setTotalUnreadCount(Number(liveQuery.initialResults.rows[0].count) || 0);
} else if (liveQuery.rows?.[0]) {
setTotalUnreadCount(Number(liveQuery.rows[0].count) || 0);
}
// Subscribe to changes
if (typeof liveQuery.subscribe === "function") {
liveQuery.subscribe((result: { rows: { count: number }[] }) => { liveQuery.subscribe((result: { rows: { count: number }[] }) => {
if (mounted && result.rows?.[0]) { if (mounted && result.rows?.[0]) {
setTotalUnreadCount(Number(result.rows[0].count) || 0); setTotalUnreadCount(Number(result.rows[0].count) || 0);
@ -307,12 +335,12 @@ export function useInbox(
}); });
} }
if (typeof liveQuery.unsubscribe === "function") { if (liveQuery.unsubscribe) {
unreadCountLiveQueryRef.current = liveQuery; unreadCountLiveQueryRef.current = liveQuery;
} }
} }
} catch (err) { } catch (err) {
console.error("[useInbox] Failed to update unread count:", err); console.error("[useInbox] Unread count error:", err);
} }
} }
@ -327,76 +355,88 @@ export function useInbox(
}; };
}, [userId, searchSpaceId, electricClient]); }, [userId, searchSpaceId, electricClient]);
// Load more items (for infinite scroll) // loadMore - Pure cursor-based pagination, no race conditions
// Cursor is computed from current state, not stored in refs
const loadMore = useCallback(async () => { const loadMore = useCallback(async () => {
if (!userId || !electricClient || loadingMore || !hasMore) { // Removed inboxItems.length === 0 check to allow loading older items
return; // when Electric returns 0 items
} if (!userId || loadingMore || !hasMore) return;
setLoadingMore(true); setLoadingMore(true);
const client = electricClient;
try { try {
const baseQuery = `SELECT * FROM notifications // Cursor is computed from current state - no stale refs possible
WHERE user_id = $1 const oldestItem = inboxItems.length > 0 ? inboxItems[inboxItems.length - 1] : null;
AND (search_space_id = $2 OR search_space_id IS NULL)`; const beforeDate = oldestItem ? toISOString(oldestItem.created_at) : null;
const typeClause = typeFilter ? ` AND type = $3` : "";
const orderClause = ` ORDER BY created_at DESC`;
const limitOffsetClause = ` LIMIT ${PAGE_SIZE} OFFSET ${offsetRef.current}`;
const fullQuery = baseQuery + typeClause + orderClause + limitOffsetClause;
const params = typeFilter ? [userId, searchSpaceId, typeFilter] : [userId, searchSpaceId];
const result = await client.db.query<InboxItem>(fullQuery, params); const params = new URLSearchParams();
const newItems = result.rows || []; if (searchSpaceId !== null) {
params.append("search_space_id", String(searchSpaceId));
}
if (typeFilter) {
params.append("type", typeFilter);
}
// Only add before_date if we have a cursor
// Without before_date, API returns newest items first
if (beforeDate) {
params.append("before_date", beforeDate);
}
params.append("limit", String(PAGE_SIZE));
setInboxItems(prev => [...prev, ...newItems]); console.log("[useInbox] Loading more, before:", beforeDate ?? "none (initial)");
setHasMore(newItems.length === PAGE_SIZE);
offsetRef.current += newItems.length; const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/notifications?${params.toString()}`
);
if (!response.ok) {
throw new Error("Failed to fetch notifications");
}
const data = await response.json();
const apiItems: InboxItem[] = data.items.map((item: any) => ({
...item,
metadata: item.metadata || {},
}));
if (apiItems.length > 0) {
// Functional update ensures we always merge with latest state
setInboxItems((prev) => deduplicateAndSort([...prev, ...apiItems]));
}
// Use API's has_more flag if available, otherwise check count
setHasMore(data.has_more ?? apiItems.length === PAGE_SIZE);
} catch (err) { } catch (err) {
console.error("[useInbox] Failed to load more:", err); console.error("[useInbox] Load more failed:", err);
} finally { } finally {
setLoadingMore(false); setLoadingMore(false);
} }
}, [userId, searchSpaceId, typeFilter, electricClient, loadingMore, hasMore]); }, [userId, searchSpaceId, typeFilter, loadingMore, hasMore, inboxItems]);
// Mark inbox item as read via backend API // Mark inbox item as read
const markAsRead = useCallback(async (itemId: number) => { const markAsRead = useCallback(async (itemId: number) => {
try { try {
// Note: Backend API endpoint is still /notifications/
const response = await authenticatedFetch( const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/notifications/${itemId}/read`, `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/notifications/${itemId}/read`,
{ method: "PATCH" } { method: "PATCH" }
); );
return response.ok;
if (!response.ok) {
const error = await response.json().catch(() => ({ detail: "Failed to mark as read" }));
throw new Error(error.detail || "Failed to mark inbox item as read");
}
return true;
} catch (err) { } catch (err) {
console.error("Failed to mark inbox item as read:", err); console.error("Failed to mark as read:", err);
return false; return false;
} }
}, []); }, []);
// Mark all inbox items as read via backend API // Mark all inbox items as read
const markAllAsRead = useCallback(async () => { const markAllAsRead = useCallback(async () => {
try { try {
// Note: Backend API endpoint is still /notifications/
const response = await authenticatedFetch( const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/notifications/read-all`, `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/notifications/read-all`,
{ method: "PATCH" } { method: "PATCH" }
); );
return response.ok;
if (!response.ok) {
const error = await response.json().catch(() => ({ detail: "Failed to mark all as read" }));
throw new Error(error.detail || "Failed to mark all inbox items as read");
}
return true;
} catch (err) { } catch (err) {
console.error("Failed to mark all inbox items as read:", err); console.error("Failed to mark all as read:", err);
return false; return false;
} }
}, []); }, []);
@ -410,7 +450,7 @@ export function useInbox(
loadingMore, loadingMore,
hasMore, hasMore,
loadMore, loadMore,
isUsingApiFallback: true, // Always use API for pagination
error, error,
}; };
} }

View file

@ -54,7 +54,8 @@ const pendingSyncs = new Map<string, Promise<SyncHandle>>();
// Version for sync state - increment this to force fresh sync when Electric config changes // Version for sync state - increment this to force fresh sync when Electric config changes
// v2: user-specific database architecture // v2: user-specific database architecture
const SYNC_VERSION = 2; // v3: consistent cutoff date for sync+queries, visibility refresh support
const SYNC_VERSION = 3;
// Database name prefix for identifying SurfSense databases // Database name prefix for identifying SurfSense databases
const DB_PREFIX = "surfsense-"; const DB_PREFIX = "surfsense-";