feat: migrate createLLMConfig to jotai mutation atom and add query atoms for LLM configs

This commit is contained in:
CREDO23 2025-12-09 19:39:25 +00:00
parent 0c0491cd49
commit 5b7e5770be
6 changed files with 93 additions and 71 deletions

View file

@ -0,0 +1,31 @@
import { atomWithQuery } from "jotai-tanstack-query";
import { activeSearchSpaceIdAtom } from "@/atoms/seach-spaces/seach-space-queries.atom";
import { llmConfigApiService } from "@/lib/apis/llm-config-api.service";
import { cacheKeys } from "@/lib/query-client/cache-keys";
export const llmConfigsAtom = atomWithQuery((get) => {
const searchSpaceId = get(activeSearchSpaceIdAtom);
return {
queryKey: cacheKeys.llmConfigs.all(searchSpaceId!),
enabled: !!searchSpaceId,
staleTime: 5 * 60 * 1000, // 5 minutes
queryFn: async () => {
return llmConfigApiService.getLLMConfigs({
queryParams: {
search_space_id: searchSpaceId!,
},
});
},
};
});
export const globalLLMConfigsAtom = atomWithQuery(() => {
return {
queryKey: cacheKeys.llmConfigs.global(),
staleTime: 10 * 60 * 1000, // 10 minutes
queryFn: async () => {
return llmConfigApiService.getGlobalLLMConfigs();
},
};
});