Model switch (#413)

Add ability to switch models in chat
This commit is contained in:
arkml 2026-03-02 21:42:46 +05:30 committed by GitHub
parent d7dc27a77e
commit 5a72ee06e1
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 444 additions and 93 deletions

View file

@ -2,6 +2,7 @@ import { useCallback, useEffect, useRef, useState } from 'react'
import {
ArrowUp,
AudioLines,
ChevronDown,
FileArchive,
FileCode2,
FileIcon,
@ -15,6 +16,13 @@ import {
} from 'lucide-react'
import { Button } from '@/components/ui/button'
import {
DropdownMenu,
DropdownMenuContent,
DropdownMenuRadioGroup,
DropdownMenuRadioItem,
DropdownMenuTrigger,
} from '@/components/ui/dropdown-menu'
import {
type AttachmentIconKind,
getAttachmentDisplayName,
@ -45,6 +53,25 @@ export type StagedAttachment = {
const MAX_ATTACHMENT_SIZE = 10 * 1024 * 1024 // 10MB
const providerDisplayNames: Record<string, string> = {
openai: 'OpenAI',
anthropic: 'Anthropic',
google: 'Gemini',
ollama: 'Ollama',
openrouter: 'OpenRouter',
aigateway: 'AI Gateway',
'openai-compatible': 'OpenAI-Compatible',
}
interface ConfiguredModel {
flavor: string
model: string
apiKey?: string
baseURL?: string
headers?: Record<string, string>
knowledgeGraphModel?: string
}
function getAttachmentIcon(kind: AttachmentIconKind) {
switch (kind) {
case 'audio':
@ -96,6 +123,90 @@ function ChatInputInner({
const fileInputRef = useRef<HTMLInputElement>(null)
const canSubmit = (Boolean(message.trim()) || attachments.length > 0) && !isProcessing
const [configuredModels, setConfiguredModels] = useState<ConfiguredModel[]>([])
const [activeModelKey, setActiveModelKey] = useState('')
// Load model config from disk (on mount and whenever tab becomes active)
const loadModelConfig = useCallback(async () => {
try {
const result = await window.ipc.invoke('workspace:readFile', { path: 'config/models.json' })
const parsed = JSON.parse(result.data)
const models: ConfiguredModel[] = []
if (parsed?.providers) {
for (const [flavor, entry] of Object.entries(parsed.providers)) {
const e = entry as Record<string, unknown>
const modelList: string[] = Array.isArray(e.models) ? e.models as string[] : []
const singleModel = typeof e.model === 'string' ? e.model : ''
const allModels = modelList.length > 0 ? modelList : singleModel ? [singleModel] : []
for (const model of allModels) {
if (model) {
models.push({
flavor,
model,
apiKey: (e.apiKey as string) || undefined,
baseURL: (e.baseURL as string) || undefined,
headers: (e.headers as Record<string, string>) || undefined,
knowledgeGraphModel: (e.knowledgeGraphModel as string) || undefined,
})
}
}
}
}
const defaultKey = parsed?.provider?.flavor && parsed?.model
? `${parsed.provider.flavor}/${parsed.model}`
: ''
models.sort((a, b) => {
const aKey = `${a.flavor}/${a.model}`
const bKey = `${b.flavor}/${b.model}`
if (aKey === defaultKey) return -1
if (bKey === defaultKey) return 1
return 0
})
setConfiguredModels(models)
if (defaultKey) {
setActiveModelKey(defaultKey)
}
} catch {
// No config yet
}
}, [])
useEffect(() => {
loadModelConfig()
}, [isActive, loadModelConfig])
// Reload when model config changes (e.g. from settings dialog)
useEffect(() => {
const handler = () => { loadModelConfig() }
window.addEventListener('models-config-changed', handler)
return () => window.removeEventListener('models-config-changed', handler)
}, [loadModelConfig])
const handleModelChange = useCallback(async (key: string) => {
const entry = configuredModels.find((m) => `${m.flavor}/${m.model}` === key)
if (!entry) return
setActiveModelKey(key)
// Collect all models for this provider so the full list is preserved
const providerModels = configuredModels
.filter((m) => m.flavor === entry.flavor)
.map((m) => m.model)
try {
await window.ipc.invoke('models:saveConfig', {
provider: {
flavor: entry.flavor,
apiKey: entry.apiKey,
baseURL: entry.baseURL,
headers: entry.headers,
},
model: entry.model,
models: providerModels,
knowledgeGraphModel: entry.knowledgeGraphModel,
})
} catch {
toast.error('Failed to switch model')
}
}, [configuredModels])
// Restore the tab draft when this input mounts.
useEffect(() => {
if (initialDraft) {
@ -239,24 +350,33 @@ function ChatInputInner({
})}
</div>
)}
<div className="flex items-center gap-2 px-4 py-4">
<input
ref={fileInputRef}
type="file"
multiple
className="hidden"
onChange={(e) => {
const files = e.target.files
if (!files || files.length === 0) return
const paths = Array.from(files)
.map((file) => window.electronUtils?.getPathForFile(file))
.filter(Boolean) as string[]
if (paths.length > 0) {
void addFiles(paths)
}
e.target.value = ''
}}
<input
ref={fileInputRef}
type="file"
multiple
className="hidden"
onChange={(e) => {
const files = e.target.files
if (!files || files.length === 0) return
const paths = Array.from(files)
.map((file) => window.electronUtils?.getPathForFile(file))
.filter(Boolean) as string[]
if (paths.length > 0) {
void addFiles(paths)
}
e.target.value = ''
}}
/>
<div className="px-4 pt-4 pb-2">
<PromptInputTextarea
placeholder="Type your message..."
onKeyDown={handleKeyDown}
autoFocus={isActive}
focusTrigger={isActive ? `${runId ?? 'new'}:${focusNonce}` : undefined}
className="min-h-6 rounded-none border-0 py-0 shadow-none focus-visible:ring-0"
/>
</div>
<div className="flex items-center gap-2 px-4 pb-3">
<button
type="button"
onClick={() => fileInputRef.current?.click()}
@ -265,13 +385,35 @@ function ChatInputInner({
>
<Plus className="h-4 w-4" />
</button>
<PromptInputTextarea
placeholder="Type your message..."
onKeyDown={handleKeyDown}
autoFocus={isActive}
focusTrigger={isActive ? `${runId ?? 'new'}:${focusNonce}` : undefined}
className="min-h-6 rounded-none border-0 py-0 shadow-none focus-visible:ring-0"
/>
<div className="flex-1" />
{configuredModels.length > 0 && (
<DropdownMenu>
<DropdownMenuTrigger asChild>
<button
type="button"
className="flex h-7 shrink-0 items-center gap-1 rounded-full px-2 text-xs text-muted-foreground transition-colors hover:bg-muted hover:text-foreground"
>
<span className="max-w-[150px] truncate">
{configuredModels.find((m) => `${m.flavor}/${m.model}` === activeModelKey)?.model || 'Model'}
</span>
<ChevronDown className="h-3 w-3" />
</button>
</DropdownMenuTrigger>
<DropdownMenuContent align="end">
<DropdownMenuRadioGroup value={activeModelKey} onValueChange={handleModelChange}>
{configuredModels.map((m) => {
const key = `${m.flavor}/${m.model}`
return (
<DropdownMenuRadioItem key={key} value={key}>
<span className="truncate">{m.model}</span>
<span className="ml-2 text-xs text-muted-foreground">{providerDisplayNames[m.flavor] || m.flavor}</span>
</DropdownMenuRadioItem>
)
})}
</DropdownMenuRadioGroup>
</DropdownMenuContent>
</DropdownMenu>
)}
{isProcessing ? (
<Button
size="icon"

View file

@ -2,7 +2,7 @@
import * as React from "react"
import { useState, useEffect, useCallback } from "react"
import { Server, Key, Shield, Palette, Monitor, Sun, Moon, Loader2, CheckCircle2 } from "lucide-react"
import { Server, Key, Shield, Palette, Monitor, Sun, Moon, Loader2, CheckCircle2, Plus, X } from "lucide-react"
import {
Dialog,
@ -167,14 +167,15 @@ const defaultBaseURLs: Partial<Record<LlmProviderFlavor, string>> = {
function ModelSettings({ dialogOpen }: { dialogOpen: boolean }) {
const [provider, setProvider] = useState<LlmProviderFlavor>("openai")
const [providerConfigs, setProviderConfigs] = useState<Record<LlmProviderFlavor, { apiKey: string; baseURL: string; model: string; knowledgeGraphModel: string }>>({
openai: { apiKey: "", baseURL: "", model: "", knowledgeGraphModel: "" },
anthropic: { apiKey: "", baseURL: "", model: "", knowledgeGraphModel: "" },
google: { apiKey: "", baseURL: "", model: "", knowledgeGraphModel: "" },
openrouter: { apiKey: "", baseURL: "", model: "", knowledgeGraphModel: "" },
aigateway: { apiKey: "", baseURL: "", model: "", knowledgeGraphModel: "" },
ollama: { apiKey: "", baseURL: "http://localhost:11434", model: "", knowledgeGraphModel: "" },
"openai-compatible": { apiKey: "", baseURL: "http://localhost:1234/v1", model: "", knowledgeGraphModel: "" },
const [defaultProvider, setDefaultProvider] = useState<LlmProviderFlavor | null>(null)
const [providerConfigs, setProviderConfigs] = useState<Record<LlmProviderFlavor, { apiKey: string; baseURL: string; models: string[]; knowledgeGraphModel: string }>>({
openai: { apiKey: "", baseURL: "", models: [""], knowledgeGraphModel: "" },
anthropic: { apiKey: "", baseURL: "", models: [""], knowledgeGraphModel: "" },
google: { apiKey: "", baseURL: "", models: [""], knowledgeGraphModel: "" },
openrouter: { apiKey: "", baseURL: "", models: [""], knowledgeGraphModel: "" },
aigateway: { apiKey: "", baseURL: "", models: [""], knowledgeGraphModel: "" },
ollama: { apiKey: "", baseURL: "http://localhost:11434", models: [""], knowledgeGraphModel: "" },
"openai-compatible": { apiKey: "", baseURL: "http://localhost:1234/v1", models: [""], knowledgeGraphModel: "" },
})
const [modelsCatalog, setModelsCatalog] = useState<Record<string, LlmModelOption[]>>({})
const [modelsLoading, setModelsLoading] = useState(false)
@ -193,13 +194,14 @@ function ModelSettings({ dialogOpen }: { dialogOpen: boolean }) {
const showModelInput = isLocalProvider || modelsForProvider.length === 0
const isMoreProvider = moreProviders.some(p => p.id === provider)
const primaryModel = activeConfig.models[0] || ""
const canTest =
activeConfig.model.trim().length > 0 &&
primaryModel.trim().length > 0 &&
(!requiresApiKey || activeConfig.apiKey.trim().length > 0) &&
(!requiresBaseURL || activeConfig.baseURL.trim().length > 0)
const updateConfig = useCallback(
(prov: LlmProviderFlavor, updates: Partial<{ apiKey: string; baseURL: string; model: string; knowledgeGraphModel: string }>) => {
(prov: LlmProviderFlavor, updates: Partial<{ apiKey: string; baseURL: string; models: string[]; knowledgeGraphModel: string }>) => {
setProviderConfigs(prev => ({
...prev,
[prov]: { ...prev[prov], ...updates },
@ -209,6 +211,39 @@ function ModelSettings({ dialogOpen }: { dialogOpen: boolean }) {
[]
)
const updateModelAt = useCallback(
(prov: LlmProviderFlavor, index: number, value: string) => {
setProviderConfigs(prev => {
const models = [...prev[prov].models]
models[index] = value
return { ...prev, [prov]: { ...prev[prov], models } }
})
setTestState({ status: "idle" })
},
[]
)
const addModel = useCallback(
(prov: LlmProviderFlavor) => {
setProviderConfigs(prev => ({
...prev,
[prov]: { ...prev[prov], models: [...prev[prov].models, ""] },
}))
},
[]
)
const removeModel = useCallback(
(prov: LlmProviderFlavor, index: number) => {
setProviderConfigs(prev => {
const models = prev[prov].models.filter((_, i) => i !== index)
return { ...prev, [prov]: { ...prev[prov], models: models.length > 0 ? models : [""] } }
})
setTestState({ status: "idle" })
},
[]
)
// Load current config from file
useEffect(() => {
if (!dialogOpen) return
@ -223,15 +258,42 @@ function ModelSettings({ dialogOpen }: { dialogOpen: boolean }) {
if (parsed?.provider?.flavor && parsed?.model) {
const flavor = parsed.provider.flavor as LlmProviderFlavor
setProvider(flavor)
setProviderConfigs(prev => ({
...prev,
[flavor]: {
apiKey: parsed.provider.apiKey || "",
baseURL: parsed.provider.baseURL || (defaultBaseURLs[flavor] || ""),
model: parsed.model,
knowledgeGraphModel: parsed.knowledgeGraphModel || "",
},
}))
setDefaultProvider(flavor)
setProviderConfigs(prev => {
const next = { ...prev };
// Hydrate all saved providers from the providers map
if (parsed.providers) {
for (const [key, entry] of Object.entries(parsed.providers)) {
if (key in next) {
const e = entry as any;
const savedModels: string[] = Array.isArray(e.models) && e.models.length > 0
? e.models
: e.model ? [e.model] : [""];
next[key as LlmProviderFlavor] = {
apiKey: e.apiKey || "",
baseURL: e.baseURL || (defaultBaseURLs[key as LlmProviderFlavor] || ""),
models: savedModels,
knowledgeGraphModel: e.knowledgeGraphModel || "",
};
}
}
}
// Active provider takes precedence from top-level config,
// but only if it exists in the providers map (wasn't deleted)
if (parsed.providers?.[flavor]) {
const existingModels = next[flavor].models;
const activeModels = existingModels[0] === parsed.model
? existingModels
: [parsed.model, ...existingModels.filter((m: string) => m && m !== parsed.model)];
next[flavor] = {
apiKey: parsed.provider.apiKey || "",
baseURL: parsed.provider.baseURL || (defaultBaseURLs[flavor] || ""),
models: activeModels.length > 0 ? activeModels : [""],
knowledgeGraphModel: parsed.knowledgeGraphModel || "",
};
}
return next;
})
}
} catch {
// No existing config or parse error - use defaults
@ -275,11 +337,12 @@ function ModelSettings({ dialogOpen }: { dialogOpen: boolean }) {
const next = { ...prev }
const cloudProviders: LlmProviderFlavor[] = ["openai", "anthropic", "google"]
for (const prov of cloudProviders) {
const models = modelsCatalog[prov]
if (models?.length && !next[prov].model) {
const catalog = modelsCatalog[prov]
if (catalog?.length && !next[prov].models[0]) {
const preferred = preferredDefaults[prov]
const hasPreferred = preferred && models.some(m => m.id === preferred)
next[prov] = { ...next[prov], model: hasPreferred ? preferred : (models[0]?.id || "") }
const hasPreferred = preferred && catalog.some(m => m.id === preferred)
const defaultModel = hasPreferred ? preferred! : (catalog[0]?.id || "")
next[prov] = { ...next[prov], models: [defaultModel] }
}
}
return next
@ -290,19 +353,23 @@ function ModelSettings({ dialogOpen }: { dialogOpen: boolean }) {
if (!canTest) return
setTestState({ status: "testing" })
try {
const allModels = activeConfig.models.map(m => m.trim()).filter(Boolean)
const providerConfig = {
provider: {
flavor: provider,
apiKey: activeConfig.apiKey.trim() || undefined,
baseURL: activeConfig.baseURL.trim() || undefined,
},
model: activeConfig.model.trim(),
model: allModels[0] || "",
models: allModels,
knowledgeGraphModel: activeConfig.knowledgeGraphModel.trim() || undefined,
}
const result = await window.ipc.invoke("models:test", providerConfig)
if (result.success) {
await window.ipc.invoke("models:saveConfig", providerConfig)
setDefaultProvider(provider)
setTestState({ status: "success" })
window.dispatchEvent(new Event('models-config-changed'))
toast.success("Model configuration saved")
} else {
setTestState({ status: "error", error: result.error })
@ -314,24 +381,120 @@ function ModelSettings({ dialogOpen }: { dialogOpen: boolean }) {
}
}, [canTest, provider, activeConfig])
const renderProviderCard = (p: { id: LlmProviderFlavor; name: string; description: string }) => (
<button
key={p.id}
onClick={() => {
setProvider(p.id)
setTestState({ status: "idle" })
}}
className={cn(
"rounded-md border px-3 py-2.5 text-left transition-colors",
provider === p.id
? "border-primary bg-primary/5"
: "border-border hover:bg-accent"
)}
>
<div className="text-sm font-medium">{p.name}</div>
<div className="text-xs text-muted-foreground mt-0.5">{p.description}</div>
</button>
)
const handleSetDefault = useCallback(async (prov: LlmProviderFlavor) => {
const config = providerConfigs[prov]
const allModels = config.models.map(m => m.trim()).filter(Boolean)
if (!allModels[0]) return
try {
await window.ipc.invoke("models:saveConfig", {
provider: {
flavor: prov,
apiKey: config.apiKey.trim() || undefined,
baseURL: config.baseURL.trim() || undefined,
},
model: allModels[0],
models: allModels,
knowledgeGraphModel: config.knowledgeGraphModel.trim() || undefined,
})
setDefaultProvider(prov)
window.dispatchEvent(new Event('models-config-changed'))
toast.success("Default provider updated")
} catch {
toast.error("Failed to set default provider")
}
}, [providerConfigs])
const handleDeleteProvider = useCallback(async (prov: LlmProviderFlavor) => {
try {
const result = await window.ipc.invoke("workspace:readFile", { path: "config/models.json" })
const parsed = JSON.parse(result.data)
if (parsed?.providers?.[prov]) {
delete parsed.providers[prov]
}
// If the deleted provider is the current top-level active one,
// switch top-level config to the current default provider
if (parsed?.provider?.flavor === prov && defaultProvider && defaultProvider !== prov) {
const defConfig = providerConfigs[defaultProvider]
const defModels = defConfig.models.map(m => m.trim()).filter(Boolean)
parsed.provider = {
flavor: defaultProvider,
apiKey: defConfig.apiKey.trim() || undefined,
baseURL: defConfig.baseURL.trim() || undefined,
}
parsed.model = defModels[0] || ""
parsed.models = defModels
parsed.knowledgeGraphModel = defConfig.knowledgeGraphModel.trim() || undefined
}
await window.ipc.invoke("workspace:writeFile", {
path: "config/models.json",
data: JSON.stringify(parsed, null, 2),
})
setProviderConfigs(prev => ({
...prev,
[prov]: { apiKey: "", baseURL: defaultBaseURLs[prov] || "", models: [""], knowledgeGraphModel: "" },
}))
setTestState({ status: "idle" })
window.dispatchEvent(new Event('models-config-changed'))
toast.success("Provider configuration removed")
} catch {
toast.error("Failed to remove provider")
}
}, [defaultProvider, providerConfigs])
const renderProviderCard = (p: { id: LlmProviderFlavor; name: string; description: string }) => {
const isDefault = defaultProvider === p.id
const isSelected = provider === p.id
const hasModel = providerConfigs[p.id].models[0]?.trim().length > 0
return (
<button
key={p.id}
onClick={() => {
setProvider(p.id)
setTestState({ status: "idle" })
}}
className={cn(
"rounded-md border px-3 py-2.5 text-left transition-colors relative",
isSelected
? "border-primary bg-primary/5"
: "border-border hover:bg-accent"
)}
>
<div className="flex items-center gap-1.5">
<span className="text-sm font-medium">{p.name}</span>
{isDefault && (
<span className="rounded-full bg-primary/10 px-1.5 py-0.5 text-[10px] font-medium leading-none text-primary">
Default
</span>
)}
</div>
<div className="text-xs text-muted-foreground mt-0.5">{p.description}</div>
{!isDefault && hasModel && isSelected && (
<div className="mt-1.5 flex items-center gap-3">
<span
role="button"
onClick={(e) => {
e.stopPropagation()
handleSetDefault(p.id)
}}
className="inline-flex text-[11px] text-muted-foreground hover:text-primary transition-colors cursor-pointer"
>
Set as default
</span>
<span
role="button"
onClick={(e) => {
e.stopPropagation()
handleDeleteProvider(p.id)
}}
className="inline-flex text-[11px] text-muted-foreground hover:text-destructive transition-colors cursor-pointer"
>
Remove
</span>
</div>
)}
</button>
)
}
if (configLoading) {
return (
@ -366,6 +529,7 @@ function ModelSettings({ dialogOpen }: { dialogOpen: boolean }) {
{/* Model selection - side by side */}
<div className="grid grid-cols-2 gap-3">
{/* Assistant models (left column) */}
<div className="space-y-2">
<span className="text-xs font-medium text-muted-foreground uppercase tracking-wider">Assistant model</span>
{modelsLoading ? (
@ -373,34 +537,58 @@ function ModelSettings({ dialogOpen }: { dialogOpen: boolean }) {
<Loader2 className="size-4 animate-spin" />
Loading...
</div>
) : showModelInput ? (
<Input
value={activeConfig.model}
onChange={(e) => updateConfig(provider, { model: e.target.value })}
placeholder="Enter model"
/>
) : (
<Select
value={activeConfig.model}
onValueChange={(value) => updateConfig(provider, { model: value })}
>
<SelectTrigger>
<SelectValue placeholder="Select a model" />
</SelectTrigger>
<SelectContent>
{modelsForProvider.map((model) => (
<SelectItem key={model.id} value={model.id}>
{model.name || model.id}
</SelectItem>
))}
</SelectContent>
</Select>
<div className="space-y-2">
{activeConfig.models.map((model, index) => (
<div key={index} className="group/model relative">
{showModelInput ? (
<Input
value={model}
onChange={(e) => updateModelAt(provider, index, e.target.value)}
placeholder="Enter model"
/>
) : (
<Select
value={model}
onValueChange={(value) => updateModelAt(provider, index, value)}
>
<SelectTrigger>
<SelectValue placeholder="Select a model" />
</SelectTrigger>
<SelectContent>
{modelsForProvider.map((m) => (
<SelectItem key={m.id} value={m.id}>
{m.name || m.id}
</SelectItem>
))}
</SelectContent>
</Select>
)}
{activeConfig.models.length > 1 && (
<button
onClick={() => removeModel(provider, index)}
className="absolute right-8 top-1/2 -translate-y-1/2 flex size-6 items-center justify-center rounded text-muted-foreground opacity-0 transition-opacity hover:text-foreground group-hover/model:opacity-100"
>
<X className="size-3.5" />
</button>
)}
</div>
))}
<button
onClick={() => addModel(provider)}
className="flex items-center gap-1.5 text-xs text-muted-foreground hover:text-foreground transition-colors"
>
<Plus className="size-3.5" />
Add assistant model
</button>
</div>
)}
{modelsError && (
<div className="text-xs text-destructive">{modelsError}</div>
)}
</div>
{/* Knowledge graph model (right column) */}
<div className="space-y-2">
<span className="text-xs font-medium text-muted-foreground uppercase tracking-wider">Knowledge graph model</span>
{modelsLoading ? (
@ -412,7 +600,7 @@ function ModelSettings({ dialogOpen }: { dialogOpen: boolean }) {
<Input
value={activeConfig.knowledgeGraphModel}
onChange={(e) => updateConfig(provider, { knowledgeGraphModel: e.target.value })}
placeholder={activeConfig.model || "Enter model"}
placeholder={primaryModel || "Enter model"}
/>
) : (
<Select
@ -424,9 +612,9 @@ function ModelSettings({ dialogOpen }: { dialogOpen: boolean }) {
</SelectTrigger>
<SelectContent>
<SelectItem value="__same__">Same as assistant</SelectItem>
{modelsForProvider.map((model) => (
<SelectItem key={model.id} value={model.id}>
{model.name || model.id}
{modelsForProvider.map((m) => (
<SelectItem key={m.id} value={m.id}>
{m.name || m.id}
</SelectItem>
))}
</SelectContent>

View file

@ -34,6 +34,26 @@ export class FSModelConfigRepo implements IModelConfigRepo {
}
async setConfig(config: z.infer<typeof ModelConfig>): Promise<void> {
await fs.writeFile(this.configPath, JSON.stringify(config, null, 2));
let existingProviders: Record<string, Record<string, unknown>> = {};
try {
const raw = await fs.readFile(this.configPath, "utf8");
const existing = JSON.parse(raw);
existingProviders = existing.providers || {};
} catch {
// No existing config
}
existingProviders[config.provider.flavor] = {
...existingProviders[config.provider.flavor],
apiKey: config.provider.apiKey,
baseURL: config.provider.baseURL,
headers: config.provider.headers,
model: config.model,
models: config.models,
knowledgeGraphModel: config.knowledgeGraphModel,
};
const toWrite = { ...config, providers: existingProviders };
await fs.writeFile(this.configPath, JSON.stringify(toWrite, null, 2));
}
}

View file

@ -10,5 +10,6 @@ export const LlmProvider = z.object({
export const LlmModelConfig = z.object({
provider: LlmProvider,
model: z.string(),
models: z.array(z.string()).optional(),
knowledgeGraphModel: z.string().optional(),
});