feat: migrate createLLMConfig to jotai mutation atom and add query atoms for LLM configs

This commit is contained in:
CREDO23 2025-12-09 19:39:25 +00:00
parent 0c0491cd49
commit 5b7e5770be
6 changed files with 93 additions and 71 deletions

View file

@ -7,6 +7,7 @@ import type {
DeleteLLMConfigRequest, DeleteLLMConfigRequest,
GetLLMConfigsResponse, GetLLMConfigsResponse,
UpdateLLMPreferencesRequest, UpdateLLMPreferencesRequest,
UpdateLLMConfigResponse,
} from "@/contracts/types/llm-config.types"; } from "@/contracts/types/llm-config.types";
import { llmConfigApiService } from "@/lib/apis/llm-config-api.service"; import { llmConfigApiService } from "@/lib/apis/llm-config-api.service";
import { cacheKeys } from "@/lib/query-client/cache-keys"; import { cacheKeys } from "@/lib/query-client/cache-keys";
@ -44,7 +45,7 @@ export const updateLLMConfigMutationAtom = atomWithMutation((get) => {
return llmConfigApiService.updateLLMConfig(request); return llmConfigApiService.updateLLMConfig(request);
}, },
onSuccess: (_, request: UpdateLLMConfigRequest) => { onSuccess: (_: UpdateLLMConfigResponse , request: UpdateLLMConfigRequest) => {
toast.success("LLM configuration updated successfully"); toast.success("LLM configuration updated successfully");
queryClient.invalidateQueries({ queryClient.invalidateQueries({
queryKey: cacheKeys.llmConfigs.all(searchSpaceId!), queryKey: cacheKeys.llmConfigs.all(searchSpaceId!),
@ -76,11 +77,7 @@ export const deleteLLMConfigMutationAtom = atomWithMutation((get) => {
cacheKeys.llmConfigs.all(searchSpaceId!), cacheKeys.llmConfigs.all(searchSpaceId!),
(oldData: GetLLMConfigsResponse | undefined) => { (oldData: GetLLMConfigsResponse | undefined) => {
if (!oldData) return oldData; if (!oldData) return oldData;
return { return oldData.filter((config) => config.id !== request.id);
...oldData,
items: oldData.items.filter((config) => config.id !== request.id),
total: oldData.total - 1,
};
} }
); );
queryClient.invalidateQueries({ queryClient.invalidateQueries({

View file

@ -0,0 +1,31 @@
import { atomWithQuery } from "jotai-tanstack-query";
import { activeSearchSpaceIdAtom } from "@/atoms/seach-spaces/seach-space-queries.atom";
import { llmConfigApiService } from "@/lib/apis/llm-config-api.service";
import { cacheKeys } from "@/lib/query-client/cache-keys";
export const llmConfigsAtom = atomWithQuery((get) => {
const searchSpaceId = get(activeSearchSpaceIdAtom);
return {
queryKey: cacheKeys.llmConfigs.all(searchSpaceId!),
enabled: !!searchSpaceId,
staleTime: 5 * 60 * 1000, // 5 minutes
queryFn: async () => {
return llmConfigApiService.getLLMConfigs({
queryParams: {
search_space_id: searchSpaceId!,
},
});
},
};
});
export const globalLLMConfigsAtom = atomWithQuery(() => {
return {
queryKey: cacheKeys.llmConfigs.global(),
staleTime: 10 * 60 * 1000, // 10 minutes
queryFn: async () => {
return llmConfigApiService.getGlobalLLMConfigs();
},
};
});

View file

@ -52,6 +52,9 @@ import {
import { cn } from "@/lib/utils"; import { cn } from "@/lib/utils";
import InferenceParamsEditor from "../inference-params-editor"; import InferenceParamsEditor from "../inference-params-editor";
import { useAtomValue } from "jotai";
import { createLLMConfigMutationAtom } from "@/atoms/llm-config/llm-config-mutation.atoms";
import { CreateLLMConfigRequest } from "@/contracts/types/llm-config.types";
interface SetupLLMStepProps { interface SetupLLMStepProps {
searchSpaceId: number; searchSpaceId: number;
@ -97,14 +100,15 @@ export function SetupLLMStep({
onPreferencesUpdated, onPreferencesUpdated,
}: SetupLLMStepProps) { }: SetupLLMStepProps) {
const t = useTranslations("onboard"); const t = useTranslations("onboard");
const { llmConfigs, createLLMConfig, deleteLLMConfig } = useLLMConfigs(searchSpaceId); const { llmConfigs, deleteLLMConfig } = useLLMConfigs(searchSpaceId);
const { mutateAsync : createLLMConfig, isPending : isCreatingLlmConfig } = useAtomValue(createLLMConfigMutationAtom)
const { globalConfigs } = useGlobalLLMConfigs(); const { globalConfigs } = useGlobalLLMConfigs();
const { preferences, updatePreferences } = useLLMPreferences(searchSpaceId); const { preferences, updatePreferences } = useLLMPreferences(searchSpaceId);
const [isAddingNew, setIsAddingNew] = useState(false); const [isAddingNew, setIsAddingNew] = useState(false);
const [formData, setFormData] = useState<CreateLLMConfig>({ const [formData, setFormData] = useState<CreateLLMConfigRequest>({
name: "", name: "",
provider: "", provider: "" as CreateLLMConfigRequest["provider"], // Allow it as Default
custom_provider: "", custom_provider: "",
model_name: "", model_name: "",
api_key: "", api_key: "",
@ -113,7 +117,6 @@ export function SetupLLMStep({
litellm_params: {}, litellm_params: {},
search_space_id: searchSpaceId, search_space_id: searchSpaceId,
}); });
const [isSubmitting, setIsSubmitting] = useState(false);
const [modelComboboxOpen, setModelComboboxOpen] = useState(false); const [modelComboboxOpen, setModelComboboxOpen] = useState(false);
const [showProviderForm, setShowProviderForm] = useState(false); const [showProviderForm, setShowProviderForm] = useState(false);
@ -146,14 +149,12 @@ export function SetupLLMStep({
return; return;
} }
setIsSubmitting(true);
const result = await createLLMConfig(formData); const result = await createLLMConfig(formData);
setIsSubmitting(false);
if (result) { if (result) {
setFormData({ setFormData({
name: "", name: "",
provider: "", provider: "" as CreateLLMConfigRequest["provider"],
custom_provider: "", custom_provider: "",
model_name: "", model_name: "",
api_key: "", api_key: "",
@ -417,7 +418,7 @@ export function SetupLLMStep({
<Input <Input
id="custom_provider" id="custom_provider"
placeholder={t("custom_provider_placeholder")} placeholder={t("custom_provider_placeholder")}
value={formData.custom_provider} value={formData.custom_provider ?? ""}
onChange={(e) => handleInputChange("custom_provider", e.target.value)} onChange={(e) => handleInputChange("custom_provider", e.target.value)}
required required
/> />
@ -543,7 +544,7 @@ export function SetupLLMStep({
<Input <Input
id="api_base" id="api_base"
placeholder={selectedProvider?.apiBase || t("api_base_placeholder")} placeholder={selectedProvider?.apiBase || t("api_base_placeholder")}
value={formData.api_base} value={formData.api_base ?? ""}
onChange={(e) => handleInputChange("api_base", e.target.value)} onChange={(e) => handleInputChange("api_base", e.target.value)}
/> />
{/* Ollama-specific help */} {/* Ollama-specific help */}
@ -590,15 +591,15 @@ export function SetupLLMStep({
</div> </div>
<div className="flex gap-2 pt-2"> <div className="flex gap-2 pt-2">
<Button type="submit" disabled={isSubmitting} size="sm"> <Button type="submit" disabled={isCreatingLlmConfig} size="sm">
{isSubmitting ? t("adding") : t("add_provider")} {isCreatingLlmConfig ? t("adding") : t("add_provider")}
</Button> </Button>
<Button <Button
type="button" type="button"
variant="outline" variant="outline"
size="sm" size="sm"
onClick={() => setIsAddingNew(false)} onClick={() => setIsAddingNew(false)}
disabled={isSubmitting} disabled={isCreatingLlmConfig}
> >
{t("cancel")} {t("cancel")}
</Button> </Button>

View file

@ -60,34 +60,30 @@ import { LANGUAGES } from "@/contracts/enums/languages";
import { getModelsByProvider } from "@/contracts/enums/llm-models"; import { getModelsByProvider } from "@/contracts/enums/llm-models";
import { LLM_PROVIDERS } from "@/contracts/enums/llm-providers"; import { LLM_PROVIDERS } from "@/contracts/enums/llm-providers";
import { import {
type CreateLLMConfig,
type LLMConfig,
useGlobalLLMConfigs, useGlobalLLMConfigs,
useLLMConfigs,
} from "@/hooks/use-llm-configs"; } from "@/hooks/use-llm-configs";
import { cn } from "@/lib/utils"; import { cn } from "@/lib/utils";
import InferenceParamsEditor from "../inference-params-editor"; import InferenceParamsEditor from "../inference-params-editor";
import { useAtomValue } from "jotai";
import { createLLMConfigMutationAtom, deleteLLMConfigMutationAtom, updateLLMConfigMutationAtom } from "@/atoms/llm-config/llm-config-mutation.atoms";
import { CreateLLMConfigRequest, CreateLLMConfigResponse, LLMConfig, UpdateLLMConfigResponse } from "@/contracts/types/llm-config.types";
import { llmConfigsAtom } from "@/atoms/llm-config/llm-config-query.atoms";
interface ModelConfigManagerProps { interface ModelConfigManagerProps {
searchSpaceId: number; searchSpaceId: number;
} }
export function ModelConfigManager({ searchSpaceId }: ModelConfigManagerProps) { export function ModelConfigManager({ searchSpaceId }: ModelConfigManagerProps) {
const { const { mutateAsync : createLLMConfig, isPending : isCreatingLLMConfig, error : createLLMConfigError, isError : isCreateLLMConfigError } = useAtomValue(createLLMConfigMutationAtom)
llmConfigs, const { mutateAsync : updateLLMConfig, isPending : isUpdatingLLMConfig, error : updateLLMConfigError, isError : isUpdateLLMConfigError} = useAtomValue(updateLLMConfigMutationAtom)
loading, const { mutateAsync : deleteLLMConfig, isPending : isDeletingLLMConfig, error : deleteLLMConfigError, isError : isDeleteLLMConfigError } = useAtomValue(deleteLLMConfigMutationAtom)
error, const { data : llmConfigs, isFetching : isFetchingLLMConfigs, error : LLMConfigsFetchError, isError : isLLMConfigsFetchError, refetch : refreshConfigs} = useAtomValue(llmConfigsAtom)
createLLMConfig,
updateLLMConfig,
deleteLLMConfig,
refreshConfigs,
} = useLLMConfigs(searchSpaceId);
const { globalConfigs } = useGlobalLLMConfigs(); const { globalConfigs } = useGlobalLLMConfigs();
const [isAddingNew, setIsAddingNew] = useState(false); const [isAddingNew, setIsAddingNew] = useState(false);
const [editingConfig, setEditingConfig] = useState<LLMConfig | null>(null); const [editingConfig, setEditingConfig] = useState<LLMConfig | null>(null);
const [formData, setFormData] = useState<CreateLLMConfig>({ const [formData, setFormData] = useState<CreateLLMConfigRequest>({
name: "", name: "",
provider: "", provider: "" as CreateLLMConfigRequest["provider"], // Allow it as Default,
custom_provider: "", custom_provider: "",
model_name: "", model_name: "",
api_key: "", api_key: "",
@ -96,7 +92,9 @@ export function ModelConfigManager({ searchSpaceId }: ModelConfigManagerProps) {
litellm_params: {}, litellm_params: {},
search_space_id: searchSpaceId, search_space_id: searchSpaceId,
}); });
const [isSubmitting, setIsSubmitting] = useState(false); const isSubmitting = isCreatingLLMConfig || isUpdatingLLMConfig
const errors = [createLLMConfigError, updateLLMConfigError, deleteLLMConfigError, LLMConfigsFetchError] as Error[]
const isError = Boolean(errors.filter(Boolean).length)
const [modelComboboxOpen, setModelComboboxOpen] = useState(false); const [modelComboboxOpen, setModelComboboxOpen] = useState(false);
const [configToDelete, setConfigToDelete] = useState<LLMConfig | null>(null); const [configToDelete, setConfigToDelete] = useState<LLMConfig | null>(null);
const [isDeleting, setIsDeleting] = useState(false); const [isDeleting, setIsDeleting] = useState(false);
@ -118,12 +116,12 @@ export function ModelConfigManager({ searchSpaceId }: ModelConfigManagerProps) {
} }
}, [editingConfig, searchSpaceId]); }, [editingConfig, searchSpaceId]);
const handleInputChange = (field: keyof CreateLLMConfig, value: string) => { const handleInputChange = (field: keyof CreateLLMConfigRequest, value: string) => {
setFormData((prev) => ({ ...prev, [field]: value })); setFormData((prev) => ({ ...prev, [field]: value }));
}; };
// Handle provider change with auto-fill API Base URL and reset model / 处理 Provider 变更并自动填充 API Base URL 并重置模型 // Handle provider change with auto-fill API Base URL and reset model / 处理 Provider 变更并自动填充 API Base URL 并重置模型
const handleProviderChange = (providerValue: string) => { const handleProviderChange = (providerValue : CreateLLMConfigRequest["provider"]) => {
const provider = LLM_PROVIDERS.find((p) => p.value === providerValue); const provider = LLM_PROVIDERS.find((p) => p.value === providerValue);
setFormData((prev) => ({ setFormData((prev) => ({
...prev, ...prev,
@ -134,6 +132,8 @@ export function ModelConfigManager({ searchSpaceId }: ModelConfigManagerProps) {
})); }));
}; };
const handleSubmit = async (e: React.FormEvent) => { const handleSubmit = async (e: React.FormEvent) => {
e.preventDefault(); e.preventDefault();
if (!formData.name || !formData.provider || !formData.model_name || !formData.api_key) { if (!formData.name || !formData.provider || !formData.model_name || !formData.api_key) {
@ -141,23 +141,19 @@ export function ModelConfigManager({ searchSpaceId }: ModelConfigManagerProps) {
return; return;
} }
setIsSubmitting(true); let result: CreateLLMConfigResponse | UpdateLLMConfigResponse | null = null;
let result: LLMConfig | null = null;
if (editingConfig) { if (editingConfig) {
// Update existing config // Update existing config
result = await updateLLMConfig(editingConfig.id, formData); result = await updateLLMConfig({id : editingConfig.id, data : formData});
} else { } else {
// Create new config // Create new config
result = await createLLMConfig(formData); result = await createLLMConfig(formData);
} }
setIsSubmitting(false);
if (result) { if (result) {
setFormData({ setFormData({
name: "", name: "",
provider: "", provider: "" as CreateLLMConfigRequest["provider"],
custom_provider: "", custom_provider: "",
model_name: "", model_name: "",
api_key: "", api_key: "",
@ -177,14 +173,11 @@ export function ModelConfigManager({ searchSpaceId }: ModelConfigManagerProps) {
const handleConfirmDelete = async () => { const handleConfirmDelete = async () => {
if (!configToDelete) return; if (!configToDelete) return;
setIsDeleting(true);
try { try {
await deleteLLMConfig(configToDelete.id); await deleteLLMConfig({id : configToDelete.id});
toast.success("Configuration deleted successfully");
} catch (error) { } catch (error) {
toast.error("Failed to delete configuration"); toast.error("Failed to delete configuration");
} finally { } finally {
setIsDeleting(false);
setConfigToDelete(null); setConfigToDelete(null);
} }
}; };
@ -217,26 +210,28 @@ export function ModelConfigManager({ searchSpaceId }: ModelConfigManagerProps) {
<Button <Button
variant="outline" variant="outline"
size="sm" size="sm"
onClick={refreshConfigs} onClick={() => refreshConfigs()}
disabled={loading} disabled={isFetchingLLMConfigs}
className="flex items-center gap-2" className="flex items-center gap-2"
> >
<RefreshCw className={`h-4 w-4 ${loading ? "animate-spin" : ""}`} /> <RefreshCw className={`h-4 w-4 ${isFetchingLLMConfigs ? "animate-spin" : ""}`} />
Refresh Refresh
</Button> </Button>
</div> </div>
</div> </div>
{/* Error Alert */} {/* Error Alert */}
{error && ( {isError && errors.filter(Boolean).map(err => {
return (
<Alert variant="destructive"> <Alert variant="destructive">
<AlertCircle className="h-4 w-4" /> <AlertCircle className="h-4 w-4" />
<AlertDescription>{error}</AlertDescription> <AlertDescription>{err?.message ?? "Something went wrong"}</AlertDescription>
</Alert> </Alert>
)} )
}) }
{/* Global Configs Info Alert */} {/* Global Configs Info Alert */}
{!loading && !error && globalConfigs.length > 0 && ( {!isFetchingLLMConfigs && !isError && globalConfigs.length > 0 && (
<Alert> <Alert>
<CheckCircle className="h-4 w-4" /> <CheckCircle className="h-4 w-4" />
<AlertDescription> <AlertDescription>
@ -250,7 +245,7 @@ export function ModelConfigManager({ searchSpaceId }: ModelConfigManagerProps) {
)} )}
{/* Loading State */} {/* Loading State */}
{loading && ( {isFetchingLLMConfigs && (
<Card> <Card>
<CardContent className="flex items-center justify-center py-12"> <CardContent className="flex items-center justify-center py-12">
<div className="flex items-center gap-2 text-muted-foreground"> <div className="flex items-center gap-2 text-muted-foreground">
@ -262,14 +257,14 @@ export function ModelConfigManager({ searchSpaceId }: ModelConfigManagerProps) {
)} )}
{/* Stats Overview */} {/* Stats Overview */}
{!loading && !error && ( {!isFetchingLLMConfigs && !isError&& (
<div className="grid gap-3 grid-cols-3"> <div className="grid gap-3 grid-cols-3">
<Card className="overflow-hidden"> <Card className="overflow-hidden">
<div className="h-1 bg-blue-500" /> <div className="h-1 bg-blue-500" />
<CardContent className="p-4"> <CardContent className="p-4">
<div className="flex items-start justify-between gap-2"> <div className="flex items-start justify-between gap-2">
<div className="space-y-1 min-w-0"> <div className="space-y-1 min-w-0">
<p className="text-2xl font-bold tracking-tight">{llmConfigs.length}</p> <p className="text-2xl font-bold tracking-tight">{llmConfigs?.length}</p>
<p className="text-xs font-medium text-muted-foreground">Total Configs</p> <p className="text-xs font-medium text-muted-foreground">Total Configs</p>
</div> </div>
<div className="flex h-9 w-9 shrink-0 items-center justify-center rounded-lg bg-blue-500/10"> <div className="flex h-9 w-9 shrink-0 items-center justify-center rounded-lg bg-blue-500/10">
@ -285,7 +280,7 @@ export function ModelConfigManager({ searchSpaceId }: ModelConfigManagerProps) {
<div className="flex items-start justify-between gap-2"> <div className="flex items-start justify-between gap-2">
<div className="space-y-1 min-w-0"> <div className="space-y-1 min-w-0">
<p className="text-2xl font-bold tracking-tight"> <p className="text-2xl font-bold tracking-tight">
{new Set(llmConfigs.map((c) => c.provider)).size} {new Set(llmConfigs?.map((c) => c.provider)).size}
</p> </p>
<p className="text-xs font-medium text-muted-foreground">Providers</p> <p className="text-xs font-medium text-muted-foreground">Providers</p>
</div> </div>
@ -314,7 +309,7 @@ export function ModelConfigManager({ searchSpaceId }: ModelConfigManagerProps) {
)} )}
{/* Configuration Management */} {/* Configuration Management */}
{!loading && !error && ( {!isFetchingLLMConfigs && !isError && (
<div className="space-y-6"> <div className="space-y-6">
<div className="flex flex-col space-y-4 sm:flex-row sm:items-center sm:justify-between sm:space-y-0"> <div className="flex flex-col space-y-4 sm:flex-row sm:items-center sm:justify-between sm:space-y-0">
<div> <div>
@ -329,7 +324,7 @@ export function ModelConfigManager({ searchSpaceId }: ModelConfigManagerProps) {
</Button> </Button>
</div> </div>
{llmConfigs.length === 0 ? ( {llmConfigs?.length === 0 ? (
<Card className="border-dashed border-2 border-muted-foreground/25"> <Card className="border-dashed border-2 border-muted-foreground/25">
<CardContent className="flex flex-col items-center justify-center py-16 text-center"> <CardContent className="flex flex-col items-center justify-center py-16 text-center">
<div className="rounded-full bg-muted p-4 mb-6"> <div className="rounded-full bg-muted p-4 mb-6">
@ -350,7 +345,7 @@ export function ModelConfigManager({ searchSpaceId }: ModelConfigManagerProps) {
) : ( ) : (
<div className="grid gap-4"> <div className="grid gap-4">
<AnimatePresence> <AnimatePresence>
{llmConfigs.map((config) => { {llmConfigs?.map((config) => {
const providerInfo = getProviderInfo(config.provider); const providerInfo = getProviderInfo(config.provider);
return ( return (
<motion.div <motion.div
@ -466,7 +461,7 @@ export function ModelConfigManager({ searchSpaceId }: ModelConfigManagerProps) {
setEditingConfig(null); setEditingConfig(null);
setFormData({ setFormData({
name: "", name: "",
provider: "", provider: "" as LLMConfig["provider"],
custom_provider: "", custom_provider: "",
model_name: "", model_name: "",
api_key: "", api_key: "",
@ -538,7 +533,7 @@ export function ModelConfigManager({ searchSpaceId }: ModelConfigManagerProps) {
<Input <Input
id="custom_provider" id="custom_provider"
placeholder="e.g., my-custom-provider" placeholder="e.g., my-custom-provider"
value={formData.custom_provider} value={formData.custom_provider ?? ""}
onChange={(e) => handleInputChange("custom_provider", e.target.value)} onChange={(e) => handleInputChange("custom_provider", e.target.value)}
required required
/> />
@ -683,7 +678,7 @@ export function ModelConfigManager({ searchSpaceId }: ModelConfigManagerProps) {
<Input <Input
id="api_base" id="api_base"
placeholder={selectedProvider?.apiBase || "e.g., https://api.openai.com/v1"} placeholder={selectedProvider?.apiBase || "e.g., https://api.openai.com/v1"}
value={formData.api_base} value={formData.api_base ?? ""}
onChange={(e) => handleInputChange("api_base", e.target.value)} onChange={(e) => handleInputChange("api_base", e.target.value)}
/> />
{selectedProvider?.apiBase && formData.api_base === selectedProvider.apiBase && ( {selectedProvider?.apiBase && formData.api_base === selectedProvider.apiBase && (
@ -765,7 +760,7 @@ export function ModelConfigManager({ searchSpaceId }: ModelConfigManagerProps) {
setEditingConfig(null); setEditingConfig(null);
setFormData({ setFormData({
name: "", name: "",
provider: "", provider: "" as LLMConfig["provider"],
custom_provider: "", custom_provider: "",
model_name: "", model_name: "",
api_key: "", api_key: "",

View file

@ -38,7 +38,7 @@ export const llmConfig = z.object({
id: z.number(), id: z.number(),
name: z.string().max(100), name: z.string().max(100),
provider: liteLLMProviderEnum, provider: liteLLMProviderEnum,
custom_provider: z.string().max(100).nullable().optional(), custom_provider: z.string().nullable().optional(),
model_name: z.string().max(100), model_name: z.string().max(100),
api_key: z.string(), api_key: z.string(),
api_base: z.string().nullable().optional(), api_base: z.string().nullable().optional(),
@ -46,7 +46,7 @@ export const llmConfig = z.object({
litellm_params: z.record(z.string(), z.any()).nullable().optional(), litellm_params: z.record(z.string(), z.any()).nullable().optional(),
search_space_id: z.number(), search_space_id: z.number(),
created_at: z.string(), created_at: z.string(),
updated_at: z.string(), updated_at: z.string().optional(),
}); });
export const globalLLMConfig = llmConfig export const globalLLMConfig = llmConfig
@ -98,10 +98,7 @@ export const getLLMConfigsRequest = z.object({
.nullish(), .nullish(),
}); });
export const getLLMConfigsResponse = z.object({ export const getLLMConfigsResponse = z.array(llmConfig);
items: z.array(llmConfig),
total: z.number(),
});
/** /**
* Get LLM config by ID * Get LLM config by ID

View file

@ -2,6 +2,7 @@
import { useEffect, useState } from "react"; import { useEffect, useState } from "react";
import { toast } from "sonner"; import { toast } from "sonner";
import { authenticatedFetch } from "@/lib/auth-utils"; import { authenticatedFetch } from "@/lib/auth-utils";
import { UpdateLLMConfigRequest } from "@/contracts/types/llm-config.types";
export interface LLMConfig { export interface LLMConfig {
id: number; id: number;
@ -136,7 +137,7 @@ export function useLLMConfigs(searchSpaceId: number | null) {
const updateLLMConfig = async ( const updateLLMConfig = async (
id: number, id: number,
config: UpdateLLMConfig config: UpdateLLMConfigRequest["data"]
): Promise<LLMConfig | null> => { ): Promise<LLMConfig | null> => {
try { try {
const response = await authenticatedFetch( const response = await authenticatedFetch(