mirror of
https://github.com/MODSetter/SurfSense.git
synced 2026-05-05 13:52:40 +02:00
feat(llm-config): complete migration of useGlobalLLMConfigs to globalLLMConfigsAtom
- Removed unused import from model-config-manager.tsx - Deleted useGlobalLLMConfigs hook (no longer used anywhere) - All 5 components now using jotai + tanstack query pattern - Full backward compatibility maintained
This commit is contained in:
parent
62fcec425b
commit
194b17500e
2 changed files with 0 additions and 43 deletions
|
|
@ -135,43 +135,3 @@ export function useLLMPreferences(searchSpaceId: number | null) {
|
|||
isOnboardingComplete,
|
||||
};
|
||||
}
|
||||
|
||||
export function useGlobalLLMConfigs() {
|
||||
const [globalConfigs, setGlobalConfigs] = useState<LLMConfig[]>([]);
|
||||
const [loading, setLoading] = useState(true);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
const fetchGlobalConfigs = async () => {
|
||||
try {
|
||||
setLoading(true);
|
||||
const response = await authenticatedFetch(
|
||||
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/global-llm-configs`,
|
||||
{ method: "GET" }
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error("Failed to fetch global LLM configurations");
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
setGlobalConfigs(data);
|
||||
setError(null);
|
||||
} catch (err: any) {
|
||||
setError(err.message || "Failed to fetch global LLM configurations");
|
||||
console.error("Error fetching global LLM configurations:", err);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
fetchGlobalConfigs();
|
||||
}, []);
|
||||
|
||||
return {
|
||||
globalConfigs,
|
||||
loading,
|
||||
error,
|
||||
refreshGlobalConfigs: fetchGlobalConfigs,
|
||||
};
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue