2026-02-27 17:19:25 -08:00
|
|
|
import { useQuery, useQueryClient } from "@tanstack/react-query";
|
|
|
|
|
import { useEffect, useRef } from "react";
|
2026-01-15 19:57:25 +02:00
|
|
|
import { chatCommentsApiService } from "@/lib/apis/chat-comments-api.service";
|
|
|
|
|
import { cacheKeys } from "@/lib/query-client/cache-keys";
|
|
|
|
|
|
|
|
|
|
interface UseCommentsOptions {
|
|
|
|
|
messageId: number;
|
|
|
|
|
enabled?: boolean;
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-27 17:19:25 -08:00
|
|
|
// ---------------------------------------------------------------------------
|
|
|
|
|
// Module-level coordination: when a batch request is in-flight, individual
|
|
|
|
|
// useComments queryFns piggy-back on it instead of making their own requests.
|
2026-02-28 01:54:54 -08:00
|
|
|
//
|
|
|
|
|
// _batchReady is a promise that resolves once the batch useEffect has had a
|
|
|
|
|
// chance to set _batchInflight. Individual queryFns await this gate before
|
|
|
|
|
// deciding whether to piggy-back or fetch on their own, eliminating the
|
|
|
|
|
// previous race where setTimeout(0) was not enough.
|
2026-02-27 17:19:25 -08:00
|
|
|
// ---------------------------------------------------------------------------
|
|
|
|
|
let _batchInflight: Promise<void> | null = null;
|
|
|
|
|
let _batchTargetIds = new Set<number>();
|
2026-02-28 01:54:54 -08:00
|
|
|
let _batchReady: Promise<void> | null = null;
|
|
|
|
|
let _resolveBatchReady: (() => void) | null = null;
|
|
|
|
|
|
|
|
|
|
function resetBatchGate() {
|
|
|
|
|
_batchReady = new Promise<void>((r) => {
|
|
|
|
|
_resolveBatchReady = r;
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Open the initial gate immediately (no batch pending yet)
|
|
|
|
|
resetBatchGate();
|
|
|
|
|
_resolveBatchReady?.();
|
2026-02-27 17:19:25 -08:00
|
|
|
|
2026-01-15 19:57:25 +02:00
|
|
|
export function useComments({ messageId, enabled = true }: UseCommentsOptions) {
|
2026-02-27 17:19:25 -08:00
|
|
|
const queryClient = useQueryClient();
|
|
|
|
|
|
2026-01-15 19:57:25 +02:00
|
|
|
return useQuery({
|
|
|
|
|
queryKey: cacheKeys.comments.byMessage(messageId),
|
|
|
|
|
queryFn: async () => {
|
2026-02-28 01:54:54 -08:00
|
|
|
// Wait for the batch gate so the useEffect in useBatchCommentsPreload
|
|
|
|
|
// has a chance to set _batchInflight before we decide.
|
|
|
|
|
if (_batchReady) {
|
|
|
|
|
await _batchReady;
|
|
|
|
|
}
|
2026-02-27 17:19:25 -08:00
|
|
|
|
|
|
|
|
if (_batchInflight && _batchTargetIds.has(messageId)) {
|
|
|
|
|
await _batchInflight;
|
|
|
|
|
const cached = queryClient.getQueryData(cacheKeys.comments.byMessage(messageId));
|
|
|
|
|
if (cached) return cached;
|
|
|
|
|
}
|
|
|
|
|
|
2026-01-15 19:57:25 +02:00
|
|
|
return chatCommentsApiService.getComments({ message_id: messageId });
|
|
|
|
|
},
|
|
|
|
|
enabled: enabled && !!messageId,
|
2026-02-27 17:19:25 -08:00
|
|
|
staleTime: 30_000,
|
2026-01-15 19:57:25 +02:00
|
|
|
});
|
|
|
|
|
}
|
2026-02-27 17:19:25 -08:00
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Batch-fetch comments for all given message IDs in a single request, then
|
|
|
|
|
* seed the per-message React Query cache so individual useComments hooks
|
|
|
|
|
* resolve from cache instead of firing their own requests.
|
|
|
|
|
*/
|
|
|
|
|
export function useBatchCommentsPreload(messageIds: number[]) {
|
|
|
|
|
const queryClient = useQueryClient();
|
|
|
|
|
const prevKeyRef = useRef<string>("");
|
|
|
|
|
|
|
|
|
|
useEffect(() => {
|
|
|
|
|
if (!messageIds.length) return;
|
|
|
|
|
|
|
|
|
|
const key = messageIds
|
|
|
|
|
.slice()
|
|
|
|
|
.sort((a, b) => a - b)
|
|
|
|
|
.join(",");
|
|
|
|
|
if (key === prevKeyRef.current) return;
|
|
|
|
|
prevKeyRef.current = key;
|
|
|
|
|
|
2026-02-28 01:54:54 -08:00
|
|
|
// Open a new gate so individual queryFns wait for us
|
|
|
|
|
resetBatchGate();
|
|
|
|
|
|
2026-02-27 17:19:25 -08:00
|
|
|
_batchTargetIds = new Set(messageIds);
|
|
|
|
|
let cancelled = false;
|
|
|
|
|
|
|
|
|
|
const promise = chatCommentsApiService
|
|
|
|
|
.getBatchComments({ message_ids: messageIds })
|
|
|
|
|
.then((data) => {
|
|
|
|
|
if (cancelled) return;
|
|
|
|
|
for (const [msgIdStr, commentList] of Object.entries(data.comments_by_message)) {
|
|
|
|
|
queryClient.setQueryData(cacheKeys.comments.byMessage(Number(msgIdStr)), commentList);
|
|
|
|
|
}
|
|
|
|
|
})
|
|
|
|
|
.catch(() => {
|
|
|
|
|
// Batch failed; individual queryFns will fall through to their own fetch
|
|
|
|
|
})
|
|
|
|
|
.finally(() => {
|
|
|
|
|
if (_batchInflight === promise) {
|
|
|
|
|
_batchInflight = null;
|
|
|
|
|
_batchTargetIds = new Set();
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
_batchInflight = promise;
|
|
|
|
|
|
2026-02-28 01:54:54 -08:00
|
|
|
// Release the gate — individual queryFns can now check _batchInflight
|
|
|
|
|
_resolveBatchReady?.();
|
|
|
|
|
|
2026-02-27 17:19:25 -08:00
|
|
|
return () => {
|
|
|
|
|
cancelled = true;
|
|
|
|
|
if (_batchInflight === promise) {
|
|
|
|
|
_batchInflight = null;
|
|
|
|
|
_batchTargetIds = new Set();
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
}, [messageIds, queryClient]);
|
|
|
|
|
}
|