Add frontend vision LLM config types, API, atoms, and role manager wiring

This commit is contained in:
CREDO23 2026-04-07 19:24:43 +02:00
parent 4a675b64f4
commit 3369b8a832
7 changed files with 421 additions and 20 deletions

View file

@ -0,0 +1,84 @@
import { atomWithMutation } from "jotai-tanstack-query";
import { toast } from "sonner";
import type {
CreateVisionLLMConfigRequest,
CreateVisionLLMConfigResponse,
DeleteVisionLLMConfigResponse,
GetVisionLLMConfigsResponse,
UpdateVisionLLMConfigRequest,
UpdateVisionLLMConfigResponse,
} from "@/contracts/types/new-llm-config.types";
import { visionLLMConfigApiService } from "@/lib/apis/vision-llm-config-api.service";
import { cacheKeys } from "@/lib/query-client/cache-keys";
import { queryClient } from "@/lib/query-client/client";
import { activeSearchSpaceIdAtom } from "../search-spaces/search-space-query.atoms";
export const createVisionLLMConfigMutationAtom = atomWithMutation((get) => {
const searchSpaceId = get(activeSearchSpaceIdAtom);
return {
mutationKey: ["vision-llm-configs", "create"],
enabled: !!searchSpaceId,
mutationFn: async (request: CreateVisionLLMConfigRequest) => {
return visionLLMConfigApiService.createConfig(request);
},
onSuccess: (_: CreateVisionLLMConfigResponse, request: CreateVisionLLMConfigRequest) => {
toast.success(`${request.name} created`);
queryClient.invalidateQueries({
queryKey: cacheKeys.visionLLMConfigs.all(Number(searchSpaceId)),
});
},
onError: (error: Error) => {
toast.error(error.message || "Failed to create vision model");
},
};
});
export const updateVisionLLMConfigMutationAtom = atomWithMutation((get) => {
const searchSpaceId = get(activeSearchSpaceIdAtom);
return {
mutationKey: ["vision-llm-configs", "update"],
enabled: !!searchSpaceId,
mutationFn: async (request: UpdateVisionLLMConfigRequest) => {
return visionLLMConfigApiService.updateConfig(request);
},
onSuccess: (_: UpdateVisionLLMConfigResponse, request: UpdateVisionLLMConfigRequest) => {
toast.success(`${request.data.name ?? "Configuration"} updated`);
queryClient.invalidateQueries({
queryKey: cacheKeys.visionLLMConfigs.all(Number(searchSpaceId)),
});
queryClient.invalidateQueries({
queryKey: cacheKeys.visionLLMConfigs.byId(request.id),
});
},
onError: (error: Error) => {
toast.error(error.message || "Failed to update vision model");
},
};
});
export const deleteVisionLLMConfigMutationAtom = atomWithMutation((get) => {
const searchSpaceId = get(activeSearchSpaceIdAtom);
return {
mutationKey: ["vision-llm-configs", "delete"],
enabled: !!searchSpaceId,
mutationFn: async (request: { id: number; name: string }) => {
return visionLLMConfigApiService.deleteConfig(request.id);
},
onSuccess: (_: DeleteVisionLLMConfigResponse, request: { id: number; name: string }) => {
toast.success(`${request.name} deleted`);
queryClient.setQueryData(
cacheKeys.visionLLMConfigs.all(Number(searchSpaceId)),
(oldData: GetVisionLLMConfigsResponse | undefined) => {
if (!oldData) return oldData;
return oldData.filter((config) => config.id !== request.id);
}
);
},
onError: (error: Error) => {
toast.error(error.message || "Failed to delete vision model");
},
};
});

View file

@ -0,0 +1,27 @@
import { atomWithQuery } from "jotai-tanstack-query";
import { visionLLMConfigApiService } from "@/lib/apis/vision-llm-config-api.service";
import { cacheKeys } from "@/lib/query-client/cache-keys";
import { activeSearchSpaceIdAtom } from "../search-spaces/search-space-query.atoms";
export const visionLLMConfigsAtom = atomWithQuery((get) => {
const searchSpaceId = get(activeSearchSpaceIdAtom);
return {
queryKey: cacheKeys.visionLLMConfigs.all(Number(searchSpaceId)),
enabled: !!searchSpaceId,
staleTime: 5 * 60 * 1000,
queryFn: async () => {
return visionLLMConfigApiService.getConfigs(Number(searchSpaceId));
},
};
});
export const globalVisionLLMConfigsAtom = atomWithQuery(() => {
return {
queryKey: cacheKeys.visionLLMConfigs.global(),
staleTime: 10 * 60 * 1000,
queryFn: async () => {
return visionLLMConfigApiService.getGlobalConfigs();
},
};
});

View file

@ -24,6 +24,10 @@ import {
llmPreferencesAtom,
newLLMConfigsAtom,
} from "@/atoms/new-llm-config/new-llm-config-query.atoms";
import {
globalVisionLLMConfigsAtom,
visionLLMConfigsAtom,
} from "@/atoms/vision-llm-config/vision-llm-config-query.atoms";
import { Alert, AlertDescription } from "@/components/ui/alert";
import { Badge } from "@/components/ui/badge";
import { Button } from "@/components/ui/button";
@ -77,8 +81,8 @@ const ROLE_DESCRIPTIONS = {
description: "Vision-capable model for screenshot analysis and context extraction",
color: "text-amber-600 dark:text-amber-400",
bgColor: "bg-amber-500/10",
prefKey: "vision_llm_id" as const,
configType: "llm" as const,
prefKey: "vision_llm_config_id" as const,
configType: "vision" as const,
},
};
@ -112,6 +116,18 @@ export function LLMRoleManager({ searchSpaceId }: LLMRoleManagerProps) {
error: globalImageConfigsError,
} = useAtomValue(globalImageGenConfigsAtom);
// Vision LLM configs
const {
data: userVisionConfigs = [],
isFetching: visionConfigsLoading,
error: visionConfigsError,
} = useAtomValue(visionLLMConfigsAtom);
const {
data: globalVisionConfigs = [],
isFetching: globalVisionConfigsLoading,
error: globalVisionConfigsError,
} = useAtomValue(globalVisionLLMConfigsAtom);
// Preferences
const {
data: preferences = {},
@ -125,7 +141,7 @@ export function LLMRoleManager({ searchSpaceId }: LLMRoleManagerProps) {
agent_llm_id: preferences.agent_llm_id ?? "",
document_summary_llm_id: preferences.document_summary_llm_id ?? "",
image_generation_config_id: preferences.image_generation_config_id ?? "",
vision_llm_id: preferences.vision_llm_id ?? "",
vision_llm_config_id: preferences.vision_llm_config_id ?? "",
}));
const [savingRole, setSavingRole] = useState<string | null>(null);
@ -137,14 +153,14 @@ export function LLMRoleManager({ searchSpaceId }: LLMRoleManagerProps) {
agent_llm_id: preferences.agent_llm_id ?? "",
document_summary_llm_id: preferences.document_summary_llm_id ?? "",
image_generation_config_id: preferences.image_generation_config_id ?? "",
vision_llm_id: preferences.vision_llm_id ?? "",
vision_llm_config_id: preferences.vision_llm_config_id ?? "",
});
}
}, [
preferences?.agent_llm_id,
preferences?.document_summary_llm_id,
preferences?.image_generation_config_id,
preferences?.vision_llm_id,
preferences?.vision_llm_config_id,
]);
const handleRoleAssignment = useCallback(
@ -181,6 +197,14 @@ export function LLMRoleManager({ searchSpaceId }: LLMRoleManagerProps) {
...(userImageConfigs ?? []).filter((config) => config.id && config.id.toString().trim() !== ""),
];
// Combine global and custom vision LLM configs
const allVisionConfigs = [
...globalVisionConfigs.map((config) => ({ ...config, is_global: true })),
...(userVisionConfigs ?? []).filter(
(config) => config.id && config.id.toString().trim() !== ""
),
];
const isAssignmentComplete =
allLLMConfigs.some((c) => c.id === assignments.agent_llm_id) &&
allLLMConfigs.some((c) => c.id === assignments.document_summary_llm_id) &&
@ -191,13 +215,17 @@ export function LLMRoleManager({ searchSpaceId }: LLMRoleManagerProps) {
preferencesLoading ||
globalConfigsLoading ||
imageConfigsLoading ||
globalImageConfigsLoading;
globalImageConfigsLoading ||
visionConfigsLoading ||
globalVisionConfigsLoading;
const hasError =
configsError ||
preferencesError ||
globalConfigsError ||
imageConfigsError ||
globalImageConfigsError;
globalImageConfigsError ||
visionConfigsError ||
globalVisionConfigsError;
const hasAnyConfigs = allLLMConfigs.length > 0 || allImageConfigs.length > 0;
return (
@ -291,15 +319,27 @@ export function LLMRoleManager({ searchSpaceId }: LLMRoleManagerProps) {
<div className="grid gap-4 grid-cols-1 lg:grid-cols-2">
{Object.entries(ROLE_DESCRIPTIONS).map(([key, role]) => {
const IconComponent = role.icon;
const isImageRole = role.configType === "image";
const currentAssignment = assignments[role.prefKey as keyof typeof assignments];
// Pick the right config lists based on role type
const roleGlobalConfigs = isImageRole ? globalImageConfigs : globalConfigs;
const roleUserConfigs = isImageRole
? (userImageConfigs ?? []).filter((c) => c.id && c.id.toString().trim() !== "")
: newLLMConfigs.filter((c) => c.id && c.id.toString().trim() !== "");
const roleAllConfigs = isImageRole ? allImageConfigs : allLLMConfigs;
const roleGlobalConfigs =
role.configType === "image"
? globalImageConfigs
: role.configType === "vision"
? globalVisionConfigs
: globalConfigs;
const roleUserConfigs =
role.configType === "image"
? (userImageConfigs ?? []).filter((c) => c.id && c.id.toString().trim() !== "")
: role.configType === "vision"
? (userVisionConfigs ?? []).filter((c) => c.id && c.id.toString().trim() !== "")
: newLLMConfigs.filter((c) => c.id && c.id.toString().trim() !== "");
const roleAllConfigs =
role.configType === "image"
? allImageConfigs
: role.configType === "vision"
? allVisionConfigs
: allLLMConfigs;
const assignedConfig = roleAllConfigs.find((config) => config.id === currentAssignment);
const isAssigned = !!assignedConfig;

View file

@ -0,0 +1,102 @@
export interface VisionProviderInfo {
value: string;
label: string;
example: string;
description: string;
apiBase?: string;
}
export const VISION_PROVIDERS: VisionProviderInfo[] = [
{
value: "OPENAI",
label: "OpenAI",
example: "gpt-4o, gpt-4o-mini",
description: "GPT-4o vision models",
},
{
value: "ANTHROPIC",
label: "Anthropic",
example: "claude-sonnet-4-20250514",
description: "Claude vision models",
},
{
value: "GOOGLE",
label: "Google AI Studio",
example: "gemini-2.5-flash, gemini-2.0-flash",
description: "Gemini vision models",
},
{
value: "AZURE_OPENAI",
label: "Azure OpenAI",
example: "azure/gpt-4o",
description: "OpenAI vision models on Azure",
},
{
value: "VERTEX_AI",
label: "Google Vertex AI",
example: "vertex_ai/gemini-2.5-flash",
description: "Gemini vision models on Vertex AI",
},
{
value: "BEDROCK",
label: "AWS Bedrock",
example: "bedrock/anthropic.claude-sonnet-4-20250514-v1:0",
description: "Vision models on AWS Bedrock",
},
{
value: "XAI",
label: "xAI",
example: "grok-2-vision",
description: "Grok vision models",
},
{
value: "OPENROUTER",
label: "OpenRouter",
example: "openrouter/openai/gpt-4o",
description: "Vision models via OpenRouter",
},
{
value: "OLLAMA",
label: "Ollama",
example: "llava, bakllava",
description: "Local vision models via Ollama",
apiBase: "http://localhost:11434",
},
{
value: "GROQ",
label: "Groq",
example: "llama-4-scout-17b-16e-instruct",
description: "Vision models on Groq",
},
{
value: "TOGETHER_AI",
label: "Together AI",
example: "meta-llama/Llama-4-Scout-17B-16E-Instruct",
description: "Vision models on Together AI",
},
{
value: "FIREWORKS_AI",
label: "Fireworks AI",
example: "fireworks_ai/phi-3-vision-128k-instruct",
description: "Vision models on Fireworks AI",
},
{
value: "DEEPSEEK",
label: "DeepSeek",
example: "deepseek-chat",
description: "DeepSeek vision models",
apiBase: "https://api.deepseek.com",
},
{
value: "MISTRAL",
label: "Mistral",
example: "pixtral-large-latest",
description: "Pixtral vision models",
},
{
value: "CUSTOM",
label: "Custom Provider",
example: "custom/my-vision-model",
description: "Custom OpenAI-compatible vision endpoint",
},
];

View file

@ -252,23 +252,99 @@ export const globalImageGenConfig = z.object({
export const getGlobalImageGenConfigsResponse = z.array(globalImageGenConfig);
// =============================================================================
// Vision LLM Config (separate table for vision-capable models)
// =============================================================================
export const visionProviderEnum = z.enum([
"OPENAI",
"ANTHROPIC",
"GOOGLE",
"AZURE_OPENAI",
"VERTEX_AI",
"BEDROCK",
"XAI",
"OPENROUTER",
"OLLAMA",
"GROQ",
"TOGETHER_AI",
"FIREWORKS_AI",
"DEEPSEEK",
"MISTRAL",
"CUSTOM",
]);
export type VisionProvider = z.infer<typeof visionProviderEnum>;
export const visionLLMConfig = z.object({
id: z.number(),
name: z.string().max(100),
description: z.string().max(500).nullable().optional(),
provider: visionProviderEnum,
custom_provider: z.string().max(100).nullable().optional(),
model_name: z.string().max(100),
api_key: z.string(),
api_base: z.string().max(500).nullable().optional(),
api_version: z.string().max(50).nullable().optional(),
litellm_params: z.record(z.string(), z.any()).nullable().optional(),
created_at: z.string(),
search_space_id: z.number(),
user_id: z.string(),
});
export const createVisionLLMConfigRequest = visionLLMConfig.omit({
id: true,
created_at: true,
user_id: true,
});
export const createVisionLLMConfigResponse = visionLLMConfig;
export const getVisionLLMConfigsResponse = z.array(visionLLMConfig);
export const updateVisionLLMConfigRequest = z.object({
id: z.number(),
data: visionLLMConfig
.omit({ id: true, created_at: true, search_space_id: true, user_id: true })
.partial(),
});
export const updateVisionLLMConfigResponse = visionLLMConfig;
export const deleteVisionLLMConfigResponse = z.object({
message: z.string(),
id: z.number(),
});
export const globalVisionLLMConfig = z.object({
id: z.number(),
name: z.string(),
description: z.string().nullable().optional(),
provider: z.string(),
custom_provider: z.string().nullable().optional(),
model_name: z.string(),
api_base: z.string().nullable().optional(),
api_version: z.string().nullable().optional(),
litellm_params: z.record(z.string(), z.any()).nullable().optional(),
is_global: z.literal(true),
is_auto_mode: z.boolean().optional().default(false),
});
export const getGlobalVisionLLMConfigsResponse = z.array(globalVisionLLMConfig);
// =============================================================================
// LLM Preferences (Role Assignments)
// =============================================================================
/**
* LLM Preferences schemas - for role assignments
* image_generation uses image_generation_config_id (not llm_id)
*/
export const llmPreferences = z.object({
agent_llm_id: z.union([z.number(), z.null()]).optional(),
document_summary_llm_id: z.union([z.number(), z.null()]).optional(),
image_generation_config_id: z.union([z.number(), z.null()]).optional(),
vision_llm_id: z.union([z.number(), z.null()]).optional(),
vision_llm_config_id: z.union([z.number(), z.null()]).optional(),
agent_llm: z.union([z.record(z.string(), z.unknown()), z.null()]).optional(),
document_summary_llm: z.union([z.record(z.string(), z.unknown()), z.null()]).optional(),
image_generation_config: z.union([z.record(z.string(), z.unknown()), z.null()]).optional(),
vision_llm: z.union([z.record(z.string(), z.unknown()), z.null()]).optional(),
vision_llm_config: z.union([z.record(z.string(), z.unknown()), z.null()]).optional(),
});
/**
@ -289,7 +365,7 @@ export const updateLLMPreferencesRequest = z.object({
agent_llm_id: true,
document_summary_llm_id: true,
image_generation_config_id: true,
vision_llm_id: true,
vision_llm_config_id: true,
}),
});
@ -341,6 +417,15 @@ export type UpdateImageGenConfigResponse = z.infer<typeof updateImageGenConfigRe
export type DeleteImageGenConfigResponse = z.infer<typeof deleteImageGenConfigResponse>;
export type GlobalImageGenConfig = z.infer<typeof globalImageGenConfig>;
export type GetGlobalImageGenConfigsResponse = z.infer<typeof getGlobalImageGenConfigsResponse>;
export type VisionLLMConfig = z.infer<typeof visionLLMConfig>;
export type CreateVisionLLMConfigRequest = z.infer<typeof createVisionLLMConfigRequest>;
export type CreateVisionLLMConfigResponse = z.infer<typeof createVisionLLMConfigResponse>;
export type GetVisionLLMConfigsResponse = z.infer<typeof getVisionLLMConfigsResponse>;
export type UpdateVisionLLMConfigRequest = z.infer<typeof updateVisionLLMConfigRequest>;
export type UpdateVisionLLMConfigResponse = z.infer<typeof updateVisionLLMConfigResponse>;
export type DeleteVisionLLMConfigResponse = z.infer<typeof deleteVisionLLMConfigResponse>;
export type GlobalVisionLLMConfig = z.infer<typeof globalVisionLLMConfig>;
export type GetGlobalVisionLLMConfigsResponse = z.infer<typeof getGlobalVisionLLMConfigsResponse>;
export type LLMPreferences = z.infer<typeof llmPreferences>;
export type GetLLMPreferencesRequest = z.infer<typeof getLLMPreferencesRequest>;
export type GetLLMPreferencesResponse = z.infer<typeof getLLMPreferencesResponse>;

View file

@ -0,0 +1,58 @@
import {
type CreateVisionLLMConfigRequest,
createVisionLLMConfigRequest,
createVisionLLMConfigResponse,
deleteVisionLLMConfigResponse,
getGlobalVisionLLMConfigsResponse,
getVisionLLMConfigsResponse,
type UpdateVisionLLMConfigRequest,
updateVisionLLMConfigRequest,
updateVisionLLMConfigResponse,
} from "@/contracts/types/new-llm-config.types";
import { ValidationError } from "../error";
import { baseApiService } from "./base-api.service";
class VisionLLMConfigApiService {
getGlobalConfigs = async () => {
return baseApiService.get(
`/api/v1/global-vision-llm-configs`,
getGlobalVisionLLMConfigsResponse
);
};
createConfig = async (request: CreateVisionLLMConfigRequest) => {
const parsed = createVisionLLMConfigRequest.safeParse(request);
if (!parsed.success) {
const msg = parsed.error.issues.map((i) => i.message).join(", ");
throw new ValidationError(`Invalid request: ${msg}`);
}
return baseApiService.post(`/api/v1/vision-llm-configs`, createVisionLLMConfigResponse, {
body: parsed.data,
});
};
getConfigs = async (searchSpaceId: number) => {
const params = new URLSearchParams({
search_space_id: String(searchSpaceId),
}).toString();
return baseApiService.get(`/api/v1/vision-llm-configs?${params}`, getVisionLLMConfigsResponse);
};
updateConfig = async (request: UpdateVisionLLMConfigRequest) => {
const parsed = updateVisionLLMConfigRequest.safeParse(request);
if (!parsed.success) {
const msg = parsed.error.issues.map((i) => i.message).join(", ");
throw new ValidationError(`Invalid request: ${msg}`);
}
const { id, data } = parsed.data;
return baseApiService.put(`/api/v1/vision-llm-configs/${id}`, updateVisionLLMConfigResponse, {
body: data,
});
};
deleteConfig = async (id: number) => {
return baseApiService.delete(`/api/v1/vision-llm-configs/${id}`, deleteVisionLLMConfigResponse);
};
}
export const visionLLMConfigApiService = new VisionLLMConfigApiService();

View file

@ -39,6 +39,11 @@ export const cacheKeys = {
byId: (configId: number) => ["image-gen-configs", "detail", configId] as const,
global: () => ["image-gen-configs", "global"] as const,
},
visionLLMConfigs: {
all: (searchSpaceId: number) => ["vision-llm-configs", searchSpaceId] as const,
byId: (configId: number) => ["vision-llm-configs", "detail", configId] as const,
global: () => ["vision-llm-configs", "global"] as const,
},
auth: {
user: ["auth", "user"] as const,
},