feat(llm-config): complete migration of useGlobalLLMConfigs to globalLLMConfigsAtom

- Removed unused import from model-config-manager.tsx
- Deleted useGlobalLLMConfigs hook (no longer used anywhere)
- All 5 components now using jotai + tanstack query pattern
- Full backward compatibility maintained
This commit is contained in:
CREDO23 2025-12-10 09:29:14 +00:00
parent 62fcec425b
commit 194b17500e
2 changed files with 0 additions and 43 deletions

View file

@ -135,43 +135,3 @@ export function useLLMPreferences(searchSpaceId: number | null) {
isOnboardingComplete,
};
}
export function useGlobalLLMConfigs() {
const [globalConfigs, setGlobalConfigs] = useState<LLMConfig[]>([]);
const [loading, setLoading] = useState(true);
const [error, setError] = useState<string | null>(null);
const fetchGlobalConfigs = async () => {
try {
setLoading(true);
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/global-llm-configs`,
{ method: "GET" }
);
if (!response.ok) {
throw new Error("Failed to fetch global LLM configurations");
}
const data = await response.json();
setGlobalConfigs(data);
setError(null);
} catch (err: any) {
setError(err.message || "Failed to fetch global LLM configurations");
console.error("Error fetching global LLM configurations:", err);
} finally {
setLoading(false);
}
};
useEffect(() => {
fetchGlobalConfigs();
}, []);
return {
globalConfigs,
loading,
error,
refreshGlobalConfigs: fetchGlobalConfigs,
};
}