"use client"; import { useAtomValue } from "jotai"; import { AlertCircle, Zap } from "lucide-react"; import { useCallback, useRef, useState } from "react"; import { toast } from "sonner"; import { createNewLLMConfigMutationAtom, updateLLMPreferencesMutationAtom, updateNewLLMConfigMutationAtom, } from "@/atoms/new-llm-config/new-llm-config-mutation.atoms"; import { LLMConfigForm, type LLMConfigFormData } from "@/components/shared/llm-config-form"; import { Alert, AlertDescription } from "@/components/ui/alert"; import { Badge } from "@/components/ui/badge"; import { Button } from "@/components/ui/button"; import { Dialog, DialogContent, DialogTitle } from "@/components/ui/dialog"; import { Spinner } from "@/components/ui/spinner"; import type { GlobalNewLLMConfig, LiteLLMProvider, NewLLMConfigPublic, } from "@/contracts/types/new-llm-config.types"; interface ModelConfigDialogProps { open: boolean; onOpenChange: (open: boolean) => void; config: NewLLMConfigPublic | GlobalNewLLMConfig | null; isGlobal: boolean; searchSpaceId: number; mode: "create" | "edit" | "view"; } export function ModelConfigDialog({ open, onOpenChange, config, isGlobal, searchSpaceId, mode, }: ModelConfigDialogProps) { const [isSubmitting, setIsSubmitting] = useState(false); const [scrollPos, setScrollPos] = useState<"top" | "middle" | "bottom">("top"); const scrollRef = useRef(null); const handleScroll = useCallback((e: React.UIEvent) => { const el = e.currentTarget; const atTop = el.scrollTop <= 2; const atBottom = el.scrollHeight - el.scrollTop - el.clientHeight <= 2; setScrollPos(atTop ? "top" : atBottom ? "bottom" : "middle"); }, []); const { mutateAsync: createConfig } = useAtomValue(createNewLLMConfigMutationAtom); const { mutateAsync: updateConfig } = useAtomValue(updateNewLLMConfigMutationAtom); const { mutateAsync: updatePreferences } = useAtomValue(updateLLMPreferencesMutationAtom); const isAutoMode = config && "is_auto_mode" in config && config.is_auto_mode; const getTitle = () => { if (mode === "create") return "Add New Configuration"; if (isAutoMode) return "Auto Mode (Fastest)"; if (isGlobal) return "View Global Configuration"; return "Edit Configuration"; }; const getSubtitle = () => { if (mode === "create") return "Set up a new LLM provider for this search space"; if (isAutoMode) return "Automatically routes requests across providers"; if (isGlobal) return "Read-only global configuration"; return "Update your configuration settings"; }; const handleSubmit = useCallback( async (data: LLMConfigFormData) => { setIsSubmitting(true); try { if (mode === "create") { const result = await createConfig({ ...data, search_space_id: searchSpaceId, }); if (result?.id) { await updatePreferences({ search_space_id: searchSpaceId, data: { agent_llm_id: result.id, }, }); } onOpenChange(false); } else if (!isGlobal && config) { await updateConfig({ id: config.id, data: { name: data.name, description: data.description, provider: data.provider, custom_provider: data.custom_provider, model_name: data.model_name, api_key: data.api_key, api_base: data.api_base, litellm_params: data.litellm_params, system_instructions: data.system_instructions, use_default_system_instructions: data.use_default_system_instructions, citations_enabled: data.citations_enabled, }, }); onOpenChange(false); } } catch (error) { console.error("Failed to save configuration:", error); } finally { setIsSubmitting(false); } }, [ mode, isGlobal, config, searchSpaceId, createConfig, updateConfig, updatePreferences, onOpenChange, ] ); const handleUseGlobalConfig = useCallback(async () => { if (!config || !isGlobal) return; setIsSubmitting(true); try { await updatePreferences({ search_space_id: searchSpaceId, data: { agent_llm_id: config.id, }, }); toast.success(`Now using ${config.name}`); onOpenChange(false); } catch (error) { console.error("Failed to set model:", error); } finally { setIsSubmitting(false); } }, [config, isGlobal, searchSpaceId, updatePreferences, onOpenChange]); return ( e.preventDefault()} > {getTitle()} {/* Header */}

{getTitle()}

{isAutoMode && ( Recommended )} {isGlobal && !isAutoMode && mode !== "create" && ( Global )} {!isGlobal && mode !== "create" && !isAutoMode && ( Custom )}

{getSubtitle()}

{config && !isAutoMode && mode !== "create" && (

{config.model_name}

)}
{/* Scrollable content */}
{isAutoMode && ( Auto mode automatically distributes requests across all available LLM providers to optimize performance and avoid rate limits. )} {isGlobal && !isAutoMode && mode !== "create" && ( Global configurations are read-only. To customize settings, create a new configuration based on this template. )} {mode === "create" ? ( ) : isAutoMode && config ? (
How It Works

{config.description}

Key Benefits

Automatic (Fastest)

Distributes requests across all configured LLM providers

Rate Limit Protection

Automatically handles rate limits with cooldowns and retries

Automatic Failover

Falls back to other providers if one becomes unavailable

) : isGlobal && config ? (
Configuration Name

{config.name}

{config.description && (
Description

{config.description}

)}
Provider

{config.provider}

Model

{config.model_name}

Citations
{config.citations_enabled ? "Enabled" : "Disabled"}
{config.system_instructions && ( <>
System Instructions

{config.system_instructions}

)}
) : config ? ( ) : null}
{/* Fixed footer */}
{mode === "create" || (!isGlobal && !isAutoMode && config) ? ( ) : isAutoMode ? ( ) : isGlobal && config ? ( ) : null}
); }