feat: moved LLMConfigs from User to SearchSpaces

- RBAC soon??
- Updated various services and routes to handle search space-specific LLM preferences.
- Modified frontend components to pass search space ID for LLM configuration management.
- Removed onboarding page and settings page as part of the refactor.
This commit is contained in:
DESKTOP-RTLN3BA\$punk 2025-10-10 00:50:29 -07:00
parent a1b1db3895
commit 633ea3ac0f
44 changed files with 1075 additions and 518 deletions

View file

@ -66,10 +66,6 @@ export function UserDropdown({
</DropdownMenuItem>
</DropdownMenuGroup>
<DropdownMenuSeparator />
<DropdownMenuItem onClick={() => router.push(`/settings`)}>
<Settings className="mr-2 h-4 w-4" />
Settings
</DropdownMenuItem>
<DropdownMenuItem onClick={handleLogout}>
<LogOut className="mr-2 h-4 w-4" />
Log out

View file

@ -332,8 +332,11 @@ const ResearchModeSelector = React.memo(
ResearchModeSelector.displayName = "ResearchModeSelector";
const LLMSelector = React.memo(() => {
const { llmConfigs, loading: llmLoading, error } = useLLMConfigs();
const { preferences, updatePreferences, loading: preferencesLoading } = useLLMPreferences();
const { search_space_id } = useParams();
const searchSpaceId = Number(search_space_id);
const { llmConfigs, loading: llmLoading, error } = useLLMConfigs(searchSpaceId);
const { preferences, updatePreferences, loading: preferencesLoading } = useLLMPreferences(searchSpaceId);
const isLoading = llmLoading || preferencesLoading;

View file

@ -23,12 +23,17 @@ import { type CreateLLMConfig, useLLMConfigs } from "@/hooks/use-llm-configs";
import InferenceParamsEditor from "../inference-params-editor";
interface AddProviderStepProps {
searchSpaceId: number;
onConfigCreated?: () => void;
onConfigDeleted?: () => void;
}
export function AddProviderStep({ onConfigCreated, onConfigDeleted }: AddProviderStepProps) {
const { llmConfigs, createLLMConfig, deleteLLMConfig } = useLLMConfigs();
export function AddProviderStep({
searchSpaceId,
onConfigCreated,
onConfigDeleted,
}: AddProviderStepProps) {
const { llmConfigs, createLLMConfig, deleteLLMConfig } = useLLMConfigs(searchSpaceId);
const [isAddingNew, setIsAddingNew] = useState(false);
const [formData, setFormData] = useState<CreateLLMConfig>({
name: "",
@ -38,6 +43,7 @@ export function AddProviderStep({ onConfigCreated, onConfigDeleted }: AddProvide
api_key: "",
api_base: "",
litellm_params: {},
search_space_id: searchSpaceId,
});
const [isSubmitting, setIsSubmitting] = useState(false);
@ -65,6 +71,7 @@ export function AddProviderStep({ onConfigCreated, onConfigDeleted }: AddProvide
api_key: "",
api_base: "",
litellm_params: {},
search_space_id: searchSpaceId,
});
setIsAddingNew(false);
// Notify parent component that a config was created
@ -253,7 +260,6 @@ export function AddProviderStep({ onConfigCreated, onConfigDeleted }: AddProvide
/>
</div>
<div className="flex gap-2 pt-4">
<Button type="submit" disabled={isSubmitting}>
{isSubmitting ? "Adding..." : "Add Provider"}

View file

@ -41,12 +41,13 @@ const ROLE_DESCRIPTIONS = {
};
interface AssignRolesStepProps {
searchSpaceId: number;
onPreferencesUpdated?: () => Promise<void>;
}
export function AssignRolesStep({ onPreferencesUpdated }: AssignRolesStepProps) {
const { llmConfigs } = useLLMConfigs();
const { preferences, updatePreferences } = useLLMPreferences();
export function AssignRolesStep({ searchSpaceId, onPreferencesUpdated }: AssignRolesStepProps) {
const { llmConfigs } = useLLMConfigs(searchSpaceId);
const { preferences, updatePreferences } = useLLMPreferences(searchSpaceId);
const [assignments, setAssignments] = useState({
long_context_llm_id: preferences.long_context_llm_id || "",

View file

@ -12,9 +12,13 @@ const ROLE_ICONS = {
strategic: Bot,
};
export function CompletionStep() {
const { llmConfigs } = useLLMConfigs();
const { preferences } = useLLMPreferences();
interface CompletionStepProps {
searchSpaceId: number;
}
export function CompletionStep({ searchSpaceId }: CompletionStepProps) {
const { llmConfigs } = useLLMConfigs(searchSpaceId);
const { preferences } = useLLMPreferences(searchSpaceId);
const assignedConfigs = {
long_context: llmConfigs.find((c) => c.id === preferences.long_context_llm_id),

View file

@ -56,20 +56,24 @@ const ROLE_DESCRIPTIONS = {
},
};
export function LLMRoleManager() {
interface LLMRoleManagerProps {
searchSpaceId: number;
}
export function LLMRoleManager({ searchSpaceId }: LLMRoleManagerProps) {
const {
llmConfigs,
loading: configsLoading,
error: configsError,
refreshConfigs,
} = useLLMConfigs();
} = useLLMConfigs(searchSpaceId);
const {
preferences,
loading: preferencesLoading,
error: preferencesError,
updatePreferences,
refreshPreferences,
} = useLLMPreferences();
} = useLLMPreferences(searchSpaceId);
const [assignments, setAssignments] = useState({
long_context_llm_id: preferences.long_context_llm_id || "",

View file

@ -41,7 +41,11 @@ import { LLM_PROVIDERS } from "@/contracts/enums/llm-providers";
import { type CreateLLMConfig, type LLMConfig, useLLMConfigs } from "@/hooks/use-llm-configs";
import InferenceParamsEditor from "../inference-params-editor";
export function ModelConfigManager() {
interface ModelConfigManagerProps {
searchSpaceId: number;
}
export function ModelConfigManager({ searchSpaceId }: ModelConfigManagerProps) {
const {
llmConfigs,
loading,
@ -50,7 +54,7 @@ export function ModelConfigManager() {
updateLLMConfig,
deleteLLMConfig,
refreshConfigs,
} = useLLMConfigs();
} = useLLMConfigs(searchSpaceId);
const [isAddingNew, setIsAddingNew] = useState(false);
const [editingConfig, setEditingConfig] = useState<LLMConfig | null>(null);
const [showApiKey, setShowApiKey] = useState<Record<number, boolean>>({});
@ -62,6 +66,7 @@ export function ModelConfigManager() {
api_key: "",
api_base: "",
litellm_params: {},
search_space_id: searchSpaceId,
});
const [isSubmitting, setIsSubmitting] = useState(false);
@ -76,9 +81,10 @@ export function ModelConfigManager() {
api_key: editingConfig.api_key,
api_base: editingConfig.api_base || "",
litellm_params: editingConfig.litellm_params || {},
search_space_id: searchSpaceId,
});
}
}, [editingConfig]);
}, [editingConfig, searchSpaceId]);
const handleInputChange = (field: keyof CreateLLMConfig, value: string) => {
setFormData((prev) => ({ ...prev, [field]: value }));
@ -113,6 +119,7 @@ export function ModelConfigManager() {
api_key: "",
api_base: "",
litellm_params: {},
search_space_id: searchSpaceId,
});
setIsAddingNew(false);
setEditingConfig(null);
@ -426,6 +433,7 @@ export function ModelConfigManager() {
api_key: "",
api_base: "",
litellm_params: {},
search_space_id: searchSpaceId,
});
}
}}
@ -462,18 +470,12 @@ export function ModelConfigManager() {
value={formData.provider}
onValueChange={(value) => handleInputChange("provider", value)}
>
<SelectTrigger className="h-auto min-h-[2.5rem] py-2">
<SelectTrigger>
<SelectValue placeholder="Select a provider">
{formData.provider && (
<div className="flex items-center space-x-2 py-1">
<div className="font-medium">
{LLM_PROVIDERS.find((p) => p.value === formData.provider)?.label}
</div>
<div className="text-xs text-muted-foreground"></div>
<div className="text-xs text-muted-foreground">
{LLM_PROVIDERS.find((p) => p.value === formData.provider)?.description}
</div>
</div>
<span className="font-medium">
{LLM_PROVIDERS.find((p) => p.value === formData.provider)?.label}
</span>
)}
</SelectValue>
</SelectTrigger>
@ -549,7 +551,7 @@ export function ModelConfigManager() {
<InferenceParamsEditor
params={formData.litellm_params || {}}
setParams={(newParams) =>
setFormData((prev) => ({ ...prev, litellm_params: newParams }))
setFormData((prev) => ({ ...prev, litellm_params: newParams }))
}
/>
</div>
@ -578,6 +580,7 @@ export function ModelConfigManager() {
api_key: "",
api_base: "",
litellm_params: {},
search_space_id: searchSpaceId,
});
}}
disabled={isSubmitting}