diff --git a/surfsense_web/components/settings/model-config-manager.tsx b/surfsense_web/components/settings/model-config-manager.tsx index ed3cc4976..7530d05f1 100644 --- a/surfsense_web/components/settings/model-config-manager.tsx +++ b/surfsense_web/components/settings/model-config-manager.tsx @@ -59,9 +59,6 @@ import { import { LANGUAGES } from "@/contracts/enums/languages"; import { getModelsByProvider } from "@/contracts/enums/llm-models"; import { LLM_PROVIDERS } from "@/contracts/enums/llm-providers"; -import { - useGlobalLLMConfigs, -} from "@/hooks/use-llm-configs"; import { cn } from "@/lib/utils"; import InferenceParamsEditor from "../inference-params-editor"; import { useAtomValue } from "jotai"; diff --git a/surfsense_web/hooks/use-llm-configs.ts b/surfsense_web/hooks/use-llm-configs.ts index dec7b4ee2..7d50758d4 100644 --- a/surfsense_web/hooks/use-llm-configs.ts +++ b/surfsense_web/hooks/use-llm-configs.ts @@ -135,43 +135,3 @@ export function useLLMPreferences(searchSpaceId: number | null) { isOnboardingComplete, }; } - -export function useGlobalLLMConfigs() { - const [globalConfigs, setGlobalConfigs] = useState([]); - const [loading, setLoading] = useState(true); - const [error, setError] = useState(null); - - const fetchGlobalConfigs = async () => { - try { - setLoading(true); - const response = await authenticatedFetch( - `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/global-llm-configs`, - { method: "GET" } - ); - - if (!response.ok) { - throw new Error("Failed to fetch global LLM configurations"); - } - - const data = await response.json(); - setGlobalConfigs(data); - setError(null); - } catch (err: any) { - setError(err.message || "Failed to fetch global LLM configurations"); - console.error("Error fetching global LLM configurations:", err); - } finally { - setLoading(false); - } - }; - - useEffect(() => { - fetchGlobalConfigs(); - }, []); - - return { - globalConfigs, - loading, - error, - refreshGlobalConfigs: fetchGlobalConfigs, - }; -}