Add frontend vision LLM config types, API, atoms, and role manager wiring

This commit is contained in:
CREDO23 2026-04-07 19:24:43 +02:00
parent 4a675b64f4
commit 3369b8a832
7 changed files with 421 additions and 20 deletions

View file

@ -0,0 +1,27 @@
import { atomWithQuery } from "jotai-tanstack-query";
import { visionLLMConfigApiService } from "@/lib/apis/vision-llm-config-api.service";
import { cacheKeys } from "@/lib/query-client/cache-keys";
import { activeSearchSpaceIdAtom } from "../search-spaces/search-space-query.atoms";
export const visionLLMConfigsAtom = atomWithQuery((get) => {
const searchSpaceId = get(activeSearchSpaceIdAtom);
return {
queryKey: cacheKeys.visionLLMConfigs.all(Number(searchSpaceId)),
enabled: !!searchSpaceId,
staleTime: 5 * 60 * 1000,
queryFn: async () => {
return visionLLMConfigApiService.getConfigs(Number(searchSpaceId));
},
};
});
export const globalVisionLLMConfigsAtom = atomWithQuery(() => {
return {
queryKey: cacheKeys.visionLLMConfigs.global(),
staleTime: 10 * 60 * 1000,
queryFn: async () => {
return visionLLMConfigApiService.getGlobalConfigs();
},
};
});