config to specify graph model

This commit is contained in:
Arjun 2026-02-27 09:38:00 +05:30
parent cccb7a8a65
commit 83fe81ef5a
3 changed files with 53 additions and 11 deletions

View file

@ -167,14 +167,14 @@ const defaultBaseURLs: Partial<Record<LlmProviderFlavor, string>> = {
function ModelSettings({ dialogOpen }: { dialogOpen: boolean }) { function ModelSettings({ dialogOpen }: { dialogOpen: boolean }) {
const [provider, setProvider] = useState<LlmProviderFlavor>("openai") const [provider, setProvider] = useState<LlmProviderFlavor>("openai")
const [providerConfigs, setProviderConfigs] = useState<Record<LlmProviderFlavor, { apiKey: string; baseURL: string; model: string }>>({ const [providerConfigs, setProviderConfigs] = useState<Record<LlmProviderFlavor, { apiKey: string; baseURL: string; model: string; knowledgeGraphModel: string }>>({
openai: { apiKey: "", baseURL: "", model: "" }, openai: { apiKey: "", baseURL: "", model: "", knowledgeGraphModel: "" },
anthropic: { apiKey: "", baseURL: "", model: "" }, anthropic: { apiKey: "", baseURL: "", model: "", knowledgeGraphModel: "" },
google: { apiKey: "", baseURL: "", model: "" }, google: { apiKey: "", baseURL: "", model: "", knowledgeGraphModel: "" },
openrouter: { apiKey: "", baseURL: "", model: "" }, openrouter: { apiKey: "", baseURL: "", model: "", knowledgeGraphModel: "" },
aigateway: { apiKey: "", baseURL: "", model: "" }, aigateway: { apiKey: "", baseURL: "", model: "", knowledgeGraphModel: "" },
ollama: { apiKey: "", baseURL: "http://localhost:11434", model: "" }, ollama: { apiKey: "", baseURL: "http://localhost:11434", model: "", knowledgeGraphModel: "" },
"openai-compatible": { apiKey: "", baseURL: "http://localhost:1234/v1", model: "" }, "openai-compatible": { apiKey: "", baseURL: "http://localhost:1234/v1", model: "", knowledgeGraphModel: "" },
}) })
const [modelsCatalog, setModelsCatalog] = useState<Record<string, LlmModelOption[]>>({}) const [modelsCatalog, setModelsCatalog] = useState<Record<string, LlmModelOption[]>>({})
const [modelsLoading, setModelsLoading] = useState(false) const [modelsLoading, setModelsLoading] = useState(false)
@ -199,7 +199,7 @@ function ModelSettings({ dialogOpen }: { dialogOpen: boolean }) {
(!requiresBaseURL || activeConfig.baseURL.trim().length > 0) (!requiresBaseURL || activeConfig.baseURL.trim().length > 0)
const updateConfig = useCallback( const updateConfig = useCallback(
(prov: LlmProviderFlavor, updates: Partial<{ apiKey: string; baseURL: string; model: string }>) => { (prov: LlmProviderFlavor, updates: Partial<{ apiKey: string; baseURL: string; model: string; knowledgeGraphModel: string }>) => {
setProviderConfigs(prev => ({ setProviderConfigs(prev => ({
...prev, ...prev,
[prov]: { ...prev[prov], ...updates }, [prov]: { ...prev[prov], ...updates },
@ -229,6 +229,7 @@ function ModelSettings({ dialogOpen }: { dialogOpen: boolean }) {
apiKey: parsed.provider.apiKey || "", apiKey: parsed.provider.apiKey || "",
baseURL: parsed.provider.baseURL || (defaultBaseURLs[flavor] || ""), baseURL: parsed.provider.baseURL || (defaultBaseURLs[flavor] || ""),
model: parsed.model, model: parsed.model,
knowledgeGraphModel: parsed.knowledgeGraphModel || "",
}, },
})) }))
} }
@ -296,6 +297,7 @@ function ModelSettings({ dialogOpen }: { dialogOpen: boolean }) {
baseURL: activeConfig.baseURL.trim() || undefined, baseURL: activeConfig.baseURL.trim() || undefined,
}, },
model: activeConfig.model.trim(), model: activeConfig.model.trim(),
knowledgeGraphModel: activeConfig.knowledgeGraphModel.trim() || undefined,
} }
const result = await window.ipc.invoke("models:test", providerConfig) const result = await window.ipc.invoke("models:test", providerConfig)
if (result.success) { if (result.success) {
@ -364,7 +366,7 @@ function ModelSettings({ dialogOpen }: { dialogOpen: boolean }) {
{/* Model selection */} {/* Model selection */}
<div className="space-y-2"> <div className="space-y-2">
<span className="text-xs font-medium text-muted-foreground uppercase tracking-wider">Model</span> <span className="text-xs font-medium text-muted-foreground uppercase tracking-wider">Assistant model</span>
{modelsLoading ? ( {modelsLoading ? (
<div className="flex items-center gap-2 text-sm text-muted-foreground"> <div className="flex items-center gap-2 text-sm text-muted-foreground">
<Loader2 className="size-4 animate-spin" /> <Loader2 className="size-4 animate-spin" />
@ -398,6 +400,40 @@ function ModelSettings({ dialogOpen }: { dialogOpen: boolean }) {
)} )}
</div> </div>
{/* Knowledge graph model selection */}
<div className="space-y-2">
<span className="text-xs font-medium text-muted-foreground uppercase tracking-wider">Knowledge graph model</span>
<p className="text-xs text-muted-foreground">Used for note creation, email drafts, and meeting prep. Defaults to assistant model if empty.</p>
{modelsLoading ? (
<div className="flex items-center gap-2 text-sm text-muted-foreground">
<Loader2 className="size-4 animate-spin" />
Loading models...
</div>
) : showModelInput ? (
<Input
value={activeConfig.knowledgeGraphModel}
onChange={(e) => updateConfig(provider, { knowledgeGraphModel: e.target.value })}
placeholder={activeConfig.model || "Enter model"}
/>
) : (
<Select
value={activeConfig.knowledgeGraphModel}
onValueChange={(value) => updateConfig(provider, { knowledgeGraphModel: value })}
>
<SelectTrigger>
<SelectValue placeholder={activeConfig.model || "Same as assistant model"} />
</SelectTrigger>
<SelectContent>
{modelsForProvider.map((model) => (
<SelectItem key={model.id} value={model.id}>
{model.name || model.id}
</SelectItem>
))}
</SelectContent>
</Select>
)}
</div>
{/* API Key */} {/* API Key */}
{showApiKey && ( {showApiKey && (
<div className="space-y-2"> <div className="space-y-2">

View file

@ -706,7 +706,12 @@ export async function* streamAgent({
// set up provider + model // set up provider + model
const provider = createProvider(modelConfig.provider); const provider = createProvider(modelConfig.provider);
const model = provider.languageModel(modelConfig.model); const knowledgeGraphAgents = ["note_creation", "email-draft", "meeting-prep"];
const modelId = (knowledgeGraphAgents.includes(state.agentName!) && modelConfig.knowledgeGraphModel)
? modelConfig.knowledgeGraphModel
: modelConfig.model;
const model = provider.languageModel(modelId);
console.log(`[main] [GraphBuilder] Agent "${state.agentName}" using model: ${modelId}`);
let loopCounter = 0; let loopCounter = 0;
while (true) { while (true) {

View file

@ -10,4 +10,5 @@ export const LlmProvider = z.object({
export const LlmModelConfig = z.object({ export const LlmModelConfig = z.object({
provider: LlmProvider, provider: LlmProvider,
model: z.string(), model: z.string(),
knowledgeGraphModel: z.string().optional(),
}); });