diff --git a/surfsense_web/app/dashboard/[search_space_id]/client-layout.tsx b/surfsense_web/app/dashboard/[search_space_id]/client-layout.tsx index dd32e6513..0a35a655c 100644 --- a/surfsense_web/app/dashboard/[search_space_id]/client-layout.tsx +++ b/surfsense_web/app/dashboard/[search_space_id]/client-layout.tsx @@ -60,10 +60,8 @@ export function DashboardClientLayout({ } }, [activeChatId, isChatPannelOpen]); - // Replace useLLMPreferences with jotai atom const { data: preferences = {}, isFetching: loading, error } = useAtomValue(llmPreferencesAtom); - // Create isOnboardingComplete function for backward compatibility const isOnboardingComplete = useCallback(() => { return !!( preferences.long_context_llm_id && diff --git a/surfsense_web/app/dashboard/[search_space_id]/onboard/page.tsx b/surfsense_web/app/dashboard/[search_space_id]/onboard/page.tsx index 1e5783cae..37f6980b1 100644 --- a/surfsense_web/app/dashboard/[search_space_id]/onboard/page.tsx +++ b/surfsense_web/app/dashboard/[search_space_id]/onboard/page.tsx @@ -121,7 +121,7 @@ const OnboardPage = () => { strategic_llm_id: defaultConfigId, }; - try { + await updatePreferences({ search_space_id: searchSpaceId, data: newPreferences @@ -131,9 +131,6 @@ const OnboardPage = () => { toast.success("AI models configured automatically!", { description: "You can customize these in advanced settings.", }); - } catch (updateError) { - console.error("Failed to update preferences:", updateError); - } } catch (error) { console.error("Auto-configuration failed:", error); } finally { diff --git a/surfsense_web/components/onboard/setup-llm-step.tsx b/surfsense_web/components/onboard/setup-llm-step.tsx index 8127f7ad4..4a2231eb2 100644 --- a/surfsense_web/components/onboard/setup-llm-step.tsx +++ b/surfsense_web/components/onboard/setup-llm-step.tsx @@ -99,8 +99,6 @@ export function SetupLLMStep({ const { mutateAsync : deleteLLMConfig } = useAtomValue(deleteLLMConfigMutationAtom); const { data : llmConfigs = []} = useAtomValue(llmConfigsAtom); const { data: globalConfigs = [] } = useAtomValue(globalLLMConfigsAtom); - - // Replace useLLMPreferences with jotai atoms const { data: preferences = {} } = useAtomValue(llmPreferencesAtom); const { mutateAsync: updatePreferences } = useAtomValue(updateLLMPreferencesMutationAtom); diff --git a/surfsense_web/components/settings/llm-role-manager.tsx b/surfsense_web/components/settings/llm-role-manager.tsx index 92d2e6f16..bce2d79fd 100644 --- a/surfsense_web/components/settings/llm-role-manager.tsx +++ b/surfsense_web/components/settings/llm-role-manager.tsx @@ -67,13 +67,13 @@ export function LLMRoleManager({ searchSpaceId }: LLMRoleManagerProps) { const { data: llmConfigs = [], isFetching: configsLoading, - isError: configsError, + error: configsError, refetch: refreshConfigs } = useAtomValue(llmConfigsAtom); const { data: globalConfigs = [], isFetching: globalConfigsLoading, - isError: globalConfigsError, + error: globalConfigsError, refetch: refreshGlobalConfigs, } = useAtomValue(globalLLMConfigsAtom); const { @@ -235,9 +235,9 @@ export function LLMRoleManager({ searchSpaceId }: LLMRoleManagerProps) { - {(configsError && "Failed to load LLM configurations") || - (preferencesError && "Failed to load preferences") || - (globalConfigsError && "Failed to load global configurations")} + {(configsError?.message ?? "Failed to load LLM configurations") || + (preferencesError?.message ?? "Failed to load preferences") || + (globalConfigsError?.message ?? "Failed to load global configurations")} )}