SurfSense/surfsense_web/hooks/use-llm-configs.ts
CREDO23 194b17500e feat(llm-config): complete migration of useGlobalLLMConfigs to globalLLMConfigsAtom
- Removed unused import from model-config-manager.tsx
- Deleted useGlobalLLMConfigs hook (no longer used anywhere)
- All 5 components now using jotai + tanstack query pattern
- Full backward compatibility maintained
2025-12-14 20:22:00 +02:00

137 lines
3.3 KiB
TypeScript

"use client";
import { useEffect, useState } from "react";
import { toast } from "sonner";
import { authenticatedFetch } from "@/lib/auth-utils";
export interface LLMConfig {
id: number;
name: string;
provider: string;
custom_provider?: string;
model_name: string;
api_key: string;
api_base?: string;
language?: string;
litellm_params?: Record<string, any>;
created_at?: string;
search_space_id?: number;
is_global?: boolean;
}
export interface LLMPreferences {
long_context_llm_id?: number;
fast_llm_id?: number;
strategic_llm_id?: number;
long_context_llm?: LLMConfig;
fast_llm?: LLMConfig;
strategic_llm?: LLMConfig;
}
export interface CreateLLMConfig {
name: string;
provider: string;
custom_provider?: string;
model_name: string;
api_key: string;
api_base?: string;
language?: string;
litellm_params?: Record<string, any>;
search_space_id: number;
}
export interface UpdateLLMConfig {
name?: string;
provider?: string;
custom_provider?: string;
model_name?: string;
api_key?: string;
api_base?: string;
litellm_params?: Record<string, any>;
}
export function useLLMPreferences(searchSpaceId: number | null) {
const [preferences, setPreferences] = useState<LLMPreferences>({});
const [loading, setLoading] = useState(true);
const [error, setError] = useState<string | null>(null);
const fetchPreferences = async () => {
if (!searchSpaceId) {
setLoading(false);
return;
}
try {
setLoading(true);
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-spaces/${searchSpaceId}/llm-preferences`,
{ method: "GET" }
);
if (!response.ok) {
throw new Error("Failed to fetch LLM preferences");
}
const data = await response.json();
setPreferences(data);
setError(null);
} catch (err: any) {
setError(err.message || "Failed to fetch LLM preferences");
console.error("Error fetching LLM preferences:", err);
} finally {
setLoading(false);
}
};
useEffect(() => {
fetchPreferences();
}, [searchSpaceId]);
const updatePreferences = async (newPreferences: Partial<LLMPreferences>): Promise<boolean> => {
if (!searchSpaceId) {
toast.error("Search space ID is required");
return false;
}
try {
const response = await authenticatedFetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-spaces/${searchSpaceId}/llm-preferences`,
{
method: "PUT",
headers: { "Content-Type": "application/json" },
body: JSON.stringify(newPreferences),
}
);
if (!response.ok) {
const errorData = await response.json();
throw new Error(errorData.detail || "Failed to update LLM preferences");
}
const updatedPreferences = await response.json();
setPreferences(updatedPreferences);
toast.success("LLM preferences updated successfully");
return true;
} catch (err: any) {
toast.error(err.message || "Failed to update LLM preferences");
console.error("Error updating LLM preferences:", err);
return false;
}
};
const isOnboardingComplete = (): boolean => {
return !!(
preferences.long_context_llm_id &&
preferences.fast_llm_id &&
preferences.strategic_llm_id
);
};
return {
preferences,
loading,
error,
updatePreferences,
refreshPreferences: fetchPreferences,
isOnboardingComplete,
};
}