feat: simplify LLM config and onboarding

This commit is contained in:
Ramnique Singh 2026-02-04 01:12:06 +05:30
parent 948c6e7176
commit 10f94ce67e
10 changed files with 630 additions and 153 deletions

View file

@ -1,6 +1,7 @@
import { PrefixLogger } from './prefix-logger.js';
export * as ipc from './ipc.js';
export * as models from './models.js';
export * as workspace from './workspace.js';
export * as mcp from './mcp.js';
export { PrefixLogger };
export { PrefixLogger };

View file

@ -2,6 +2,7 @@ import { z } from 'zod';
import { RelPath, Encoding, Stat, DirEntry, ReaddirOptions, ReadFileResult, WorkspaceChangeEvent, WriteFileOptions, WriteFileResult, RemoveOptions } from './workspace.js';
import { ListToolsResponse } from './mcp.js';
import { AskHumanResponsePayload, CreateRunOptions, Run, ListRunsResponse, ToolPermissionAuthorizePayload } from './runs.js';
import { LlmModelConfig } from './models.js';
// ============================================================================
// Runtime Validation Schemas (Single Source of Truth)
@ -173,6 +174,34 @@ const ipcSchemas = {
req: z.null(),
res: z.null(),
},
'models:list': {
req: z.null(),
res: z.object({
providers: z.array(z.object({
id: z.string(),
name: z.string(),
models: z.array(z.object({
id: z.string(),
name: z.string().optional(),
release_date: z.string().optional(),
})),
})),
lastUpdated: z.string().optional(),
}),
},
'models:test': {
req: LlmModelConfig,
res: z.object({
success: z.boolean(),
error: z.string().optional(),
}),
},
'models:saveConfig': {
req: LlmModelConfig,
res: z.object({
success: z.literal(true),
}),
},
'oauth:connect': {
req: z.object({
provider: z.string(),
@ -373,4 +402,4 @@ export function validateResponse<K extends keyof IPCChannels>(
): IPCChannels[K]['res'] {
const schema = ipcSchemas[channel].res;
return schema.parse(data) as IPCChannels[K]['res'];
}
}

View file

@ -0,0 +1,13 @@
import { z } from "zod";
export const LlmProvider = z.object({
flavor: z.enum(["openai", "anthropic", "google", "openrouter", "aigateway", "ollama", "openai-compatible"]),
apiKey: z.string().optional(),
baseURL: z.string().optional(),
headers: z.record(z.string(), z.string()).optional(),
});
export const LlmModelConfig = z.object({
provider: LlmProvider,
model: z.string(),
});