feat: add updateLLMPreferencesMutationAtom to handle LLM preferences updates

This commit is contained in:
CREDO23 2025-12-09 13:46:43 +00:00
parent 1aa10db910
commit 0c0491cd49

View file

@ -6,6 +6,7 @@ import type {
UpdateLLMConfigRequest,
DeleteLLMConfigRequest,
GetLLMConfigsResponse,
UpdateLLMPreferencesRequest,
} from "@/contracts/types/llm-config.types";
import { llmConfigApiService } from "@/lib/apis/llm-config-api.service";
import { cacheKeys } from "@/lib/query-client/cache-keys";
@ -91,3 +92,22 @@ export const deleteLLMConfigMutationAtom = atomWithMutation((get) => {
},
};
});
export const updateLLMPreferencesMutationAtom = atomWithMutation((get) => {
const searchSpaceId = get(activeSearchSpaceIdAtom);
return {
mutationKey: cacheKeys.llmConfigs.preferences(searchSpaceId!),
enabled: !!searchSpaceId,
mutationFn: async (request: UpdateLLMPreferencesRequest) => {
return llmConfigApiService.updateLLMPreferences(request);
},
onSuccess: () => {
toast.success("LLM preferences updated successfully");
queryClient.invalidateQueries({
queryKey: cacheKeys.llmConfigs.preferences(searchSpaceId!),
});
},
};
});