mirror of
https://github.com/rowboatlabs/rowboat.git
synced 2026-05-06 13:52:44 +02:00
freeze model + provider per run at creation time
The model dropdown was broken in two ways: it wrote to ~/.rowboat/config/models.json
(the BYOK creds file, stamped with a fake `flavor: 'openrouter'` to satisfy zod
when signed in), and the runtime ignored that write entirely for signed-in users
because `streamAgent` hard-coded `gpt-5.4`. Model selection was also globally
scoped, so every chat shared one brain.
This change moves model + provider out of the global config and onto the run
itself, resolved once at runs:create and frozen for the run's lifetime.
## Resolution
`runsCore.createRun` resolves per-field, falling through:
run.model = opts.model ?? agent.model ?? defaults.model
run.provider = opts.provider ?? agent.provider ?? defaults.provider
A new `core/models/defaults.ts` is the only place in the codebase that branches
on signed-in state. `getDefaultModelAndProvider()` returns name strings;
`resolveProviderConfig(name)` does the name → full LlmProvider lookup at
runtime. `createProvider` learns about `flavor: 'rowboat'` so the gateway is
just another flavor.
`provider` is stored as a name (e.g. `"rowboat"`, `"openai"`), not a full
LlmProvider object. API keys never get written into the JSONL log; rotating a
key in models.json applies to existing runs without re-creation. Cost: deleting
a provider from settings breaks runs that referenced it (clear error surfaced
via `resolveProviderConfig`).
## Runtime
`streamAgent` no longer resolves anything — it reads `state.runModel` /
`state.runProvider`, looks up the provider config, instantiates. Subflows
inherit the parent run's pair, so KG / inline-task subagents run on whatever
the main run resolved to at creation. The `knowledgeGraphAgents` array,
`isKgAgent`, and the per-agent default constants are gone.
KG / inline-task / pre-built agents declare their preferred model in YAML
frontmatter (claude-haiku-4.5 / claude-sonnet-4.6) — used at resolution time
when those agents are themselves the top-level agent of a run (background
triggers, scheduled tasks, etc.).
## Standalone callers
Non-run LLM call sites (summarize_meeting, track/routing, builtin-tools
parseFile) and `agent-schedule/runner` were branching on signed-in
independently. They all route through `getDefaultModelAndProvider` +
`resolveProviderConfig` + `createProvider` now; `agent-schedule/runner`
switched from raw `runsRepo.create` to `runsCore.createRun` so resolution
applies to scheduled-agent runs too.
## UI
`chat-input-with-mentions` stops calling `models:saveConfig`. The dropdown
notifies the parent via `onSelectedModelChange` ({provider, model} as names);
App.tsx stashes selection per-tab and passes it to the next `runs:create`.
When a run already exists, the input fetches it and renders a static label —
model can't change mid-run.
## Legacy runs
A lenient zod schema in `repo.ts` (`StartEvent.extend(...optional)` plus
`RunEvent.or(LegacyStartEvent)`) parses pre-existing runs. `repo.fetch` fills
missing model/provider from current defaults and returns the strict canonical
`Run` type. No file-rewriting migration; no impact on the canonical schema in
`@x/shared`.
Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
parent
51f2ad6e8a
commit
5c4aa77255
22 changed files with 256 additions and 179 deletions
|
|
@ -817,6 +817,7 @@ function App() {
|
|||
const chatTabIdCounterRef = useRef(0)
|
||||
const newChatTabId = () => `chat-tab-${++chatTabIdCounterRef.current}`
|
||||
const chatDraftsRef = useRef(new Map<string, string>())
|
||||
const selectedModelByTabRef = useRef(new Map<string, { provider: string; model: string }>())
|
||||
const chatScrollTopByTabRef = useRef(new Map<string, number>())
|
||||
const [toolOpenByTab, setToolOpenByTab] = useState<Record<string, Record<string, boolean>>>({})
|
||||
const [chatViewportAnchorByTab, setChatViewportAnchorByTab] = useState<Record<string, ChatViewportAnchorState>>({})
|
||||
|
|
@ -2165,8 +2166,10 @@ function App() {
|
|||
let isNewRun = false
|
||||
let newRunCreatedAt: string | null = null
|
||||
if (!currentRunId) {
|
||||
const selected = selectedModelByTabRef.current.get(submitTabId)
|
||||
const run = await window.ipc.invoke('runs:create', {
|
||||
agentId,
|
||||
...(selected ? { model: selected.model, provider: selected.provider } : {}),
|
||||
})
|
||||
currentRunId = run.id
|
||||
newRunCreatedAt = run.createdAt
|
||||
|
|
@ -2471,6 +2474,7 @@ function App() {
|
|||
return next
|
||||
})
|
||||
chatDraftsRef.current.delete(tabId)
|
||||
selectedModelByTabRef.current.delete(tabId)
|
||||
chatScrollTopByTabRef.current.delete(tabId)
|
||||
setToolOpenByTab((prev) => {
|
||||
if (!(tabId in prev)) return prev
|
||||
|
|
@ -4644,6 +4648,13 @@ function App() {
|
|||
runId={tabState.runId}
|
||||
initialDraft={chatDraftsRef.current.get(tab.id)}
|
||||
onDraftChange={(text) => setChatDraftForTab(tab.id, text)}
|
||||
onSelectedModelChange={(m) => {
|
||||
if (m) {
|
||||
selectedModelByTabRef.current.set(tab.id, m)
|
||||
} else {
|
||||
selectedModelByTabRef.current.delete(tab.id)
|
||||
}
|
||||
}}
|
||||
isRecording={isActive && isRecording}
|
||||
recordingText={isActive ? voice.interimText : undefined}
|
||||
recordingState={isActive ? (voice.state === 'connecting' ? 'connecting' : 'listening') : undefined}
|
||||
|
|
@ -4697,6 +4708,13 @@ function App() {
|
|||
onPresetMessageConsumed={() => setPresetMessage(undefined)}
|
||||
getInitialDraft={(tabId) => chatDraftsRef.current.get(tabId)}
|
||||
onDraftChangeForTab={setChatDraftForTab}
|
||||
onSelectedModelChangeForTab={(tabId, m) => {
|
||||
if (m) {
|
||||
selectedModelByTabRef.current.set(tabId, m)
|
||||
} else {
|
||||
selectedModelByTabRef.current.delete(tabId)
|
||||
}
|
||||
}}
|
||||
pendingAskHumanRequests={pendingAskHumanRequests}
|
||||
allPermissionRequests={allPermissionRequests}
|
||||
permissionResponses={permissionResponses}
|
||||
|
|
|
|||
|
|
@ -69,13 +69,16 @@ const providerDisplayNames: Record<string, string> = {
|
|||
rowboat: 'Rowboat',
|
||||
}
|
||||
|
||||
type ProviderName = "openai" | "anthropic" | "google" | "openrouter" | "aigateway" | "ollama" | "openai-compatible" | "rowboat"
|
||||
|
||||
interface ConfiguredModel {
|
||||
flavor: "openai" | "anthropic" | "google" | "openrouter" | "aigateway" | "ollama" | "openai-compatible" | "rowboat"
|
||||
provider: ProviderName
|
||||
model: string
|
||||
}
|
||||
|
||||
export interface SelectedModel {
|
||||
provider: string
|
||||
model: string
|
||||
apiKey?: string
|
||||
baseURL?: string
|
||||
headers?: Record<string, string>
|
||||
knowledgeGraphModel?: string
|
||||
}
|
||||
|
||||
function getAttachmentIcon(kind: AttachmentIconKind) {
|
||||
|
|
@ -120,6 +123,8 @@ interface ChatInputInnerProps {
|
|||
ttsMode?: 'summary' | 'full'
|
||||
onToggleTts?: () => void
|
||||
onTtsModeChange?: (mode: 'summary' | 'full') => void
|
||||
/** Fired when the user picks a different model in the dropdown (only when no run exists yet). */
|
||||
onSelectedModelChange?: (model: SelectedModel | null) => void
|
||||
}
|
||||
|
||||
function ChatInputInner({
|
||||
|
|
@ -145,6 +150,7 @@ function ChatInputInner({
|
|||
ttsMode,
|
||||
onToggleTts,
|
||||
onTtsModeChange,
|
||||
onSelectedModelChange,
|
||||
}: ChatInputInnerProps) {
|
||||
const controller = usePromptInputController()
|
||||
const message = controller.textInput.value
|
||||
|
|
@ -155,10 +161,27 @@ function ChatInputInner({
|
|||
|
||||
const [configuredModels, setConfiguredModels] = useState<ConfiguredModel[]>([])
|
||||
const [activeModelKey, setActiveModelKey] = useState('')
|
||||
const [lockedModel, setLockedModel] = useState<SelectedModel | null>(null)
|
||||
const [searchEnabled, setSearchEnabled] = useState(false)
|
||||
const [searchAvailable, setSearchAvailable] = useState(false)
|
||||
const [isRowboatConnected, setIsRowboatConnected] = useState(false)
|
||||
|
||||
// When a run exists, freeze the dropdown to the run's resolved model+provider.
|
||||
useEffect(() => {
|
||||
if (!runId) {
|
||||
setLockedModel(null)
|
||||
return
|
||||
}
|
||||
let cancelled = false
|
||||
window.ipc.invoke('runs:fetch', { runId }).then((run) => {
|
||||
if (cancelled) return
|
||||
if (run.provider && run.model) {
|
||||
setLockedModel({ provider: run.provider, model: run.model })
|
||||
}
|
||||
}).catch(() => { /* legacy run or fetch failure — leave unlocked */ })
|
||||
return () => { cancelled = true }
|
||||
}, [runId])
|
||||
|
||||
// Check Rowboat sign-in state
|
||||
useEffect(() => {
|
||||
window.ipc.invoke('oauth:getState', null).then((result) => {
|
||||
|
|
@ -176,42 +199,20 @@ function ChatInputInner({
|
|||
return cleanup
|
||||
}, [])
|
||||
|
||||
// Load model config (gateway when signed in, local config when BYOK)
|
||||
// Load the list of models the user can choose from.
|
||||
// Signed-in: gateway model list. Signed-out: providers configured in models.json.
|
||||
const loadModelConfig = useCallback(async () => {
|
||||
try {
|
||||
if (isRowboatConnected) {
|
||||
// Fetch gateway models
|
||||
const listResult = await window.ipc.invoke('models:list', null)
|
||||
const rowboatProvider = listResult.providers?.find(
|
||||
(p: { id: string }) => p.id === 'rowboat'
|
||||
)
|
||||
const models: ConfiguredModel[] = (rowboatProvider?.models || []).map(
|
||||
(m: { id: string }) => ({ flavor: 'rowboat', model: m.id })
|
||||
(m: { id: string }) => ({ provider: 'rowboat', model: m.id })
|
||||
)
|
||||
|
||||
// Read current default from config
|
||||
let defaultModel = ''
|
||||
try {
|
||||
const result = await window.ipc.invoke('workspace:readFile', { path: 'config/models.json' })
|
||||
const parsed = JSON.parse(result.data)
|
||||
defaultModel = parsed?.model || ''
|
||||
} catch { /* no config yet */ }
|
||||
|
||||
if (defaultModel) {
|
||||
models.sort((a, b) => {
|
||||
if (a.model === defaultModel) return -1
|
||||
if (b.model === defaultModel) return 1
|
||||
return 0
|
||||
})
|
||||
}
|
||||
|
||||
setConfiguredModels(models)
|
||||
const activeKey = defaultModel
|
||||
? `rowboat/${defaultModel}`
|
||||
: models[0] ? `rowboat/${models[0].model}` : ''
|
||||
if (activeKey) setActiveModelKey(activeKey)
|
||||
} else {
|
||||
// BYOK: read from local models.json
|
||||
const result = await window.ipc.invoke('workspace:readFile', { path: 'config/models.json' })
|
||||
const parsed = JSON.parse(result.data)
|
||||
const models: ConfiguredModel[] = []
|
||||
|
|
@ -223,32 +224,12 @@ function ChatInputInner({
|
|||
const allModels = modelList.length > 0 ? modelList : singleModel ? [singleModel] : []
|
||||
for (const model of allModels) {
|
||||
if (model) {
|
||||
models.push({
|
||||
flavor: flavor as ConfiguredModel['flavor'],
|
||||
model,
|
||||
apiKey: (e.apiKey as string) || undefined,
|
||||
baseURL: (e.baseURL as string) || undefined,
|
||||
headers: (e.headers as Record<string, string>) || undefined,
|
||||
knowledgeGraphModel: (e.knowledgeGraphModel as string) || undefined,
|
||||
})
|
||||
models.push({ provider: flavor as ProviderName, model })
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
const defaultKey = parsed?.provider?.flavor && parsed?.model
|
||||
? `${parsed.provider.flavor}/${parsed.model}`
|
||||
: ''
|
||||
models.sort((a, b) => {
|
||||
const aKey = `${a.flavor}/${a.model}`
|
||||
const bKey = `${b.flavor}/${b.model}`
|
||||
if (aKey === defaultKey) return -1
|
||||
if (bKey === defaultKey) return 1
|
||||
return 0
|
||||
})
|
||||
setConfiguredModels(models)
|
||||
if (defaultKey) {
|
||||
setActiveModelKey(defaultKey)
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// No config yet
|
||||
|
|
@ -284,40 +265,15 @@ function ChatInputInner({
|
|||
checkSearch()
|
||||
}, [isActive, isRowboatConnected])
|
||||
|
||||
const handleModelChange = useCallback(async (key: string) => {
|
||||
const entry = configuredModels.find((m) => `${m.flavor}/${m.model}` === key)
|
||||
// Selecting a model affects only the *next* run created from this tab.
|
||||
// Once a run exists, model is frozen on the run and the dropdown is read-only.
|
||||
const handleModelChange = useCallback((key: string) => {
|
||||
if (lockedModel) return
|
||||
const entry = configuredModels.find((m) => `${m.provider}/${m.model}` === key)
|
||||
if (!entry) return
|
||||
setActiveModelKey(key)
|
||||
|
||||
try {
|
||||
if (entry.flavor === 'rowboat') {
|
||||
// Gateway model — save with valid Zod flavor, no credentials
|
||||
await window.ipc.invoke('models:saveConfig', {
|
||||
provider: { flavor: 'openrouter' as const },
|
||||
model: entry.model,
|
||||
knowledgeGraphModel: entry.knowledgeGraphModel,
|
||||
})
|
||||
} else {
|
||||
// BYOK — preserve full provider config
|
||||
const providerModels = configuredModels
|
||||
.filter((m) => m.flavor === entry.flavor)
|
||||
.map((m) => m.model)
|
||||
await window.ipc.invoke('models:saveConfig', {
|
||||
provider: {
|
||||
flavor: entry.flavor,
|
||||
apiKey: entry.apiKey,
|
||||
baseURL: entry.baseURL,
|
||||
headers: entry.headers,
|
||||
},
|
||||
model: entry.model,
|
||||
models: providerModels,
|
||||
knowledgeGraphModel: entry.knowledgeGraphModel,
|
||||
})
|
||||
}
|
||||
} catch {
|
||||
toast.error('Failed to switch model')
|
||||
}
|
||||
}, [configuredModels])
|
||||
onSelectedModelChange?.({ provider: entry.provider, model: entry.model })
|
||||
}, [configuredModels, lockedModel, onSelectedModelChange])
|
||||
|
||||
// Restore the tab draft when this input mounts.
|
||||
useEffect(() => {
|
||||
|
|
@ -555,7 +511,14 @@ function ChatInputInner({
|
|||
)
|
||||
)}
|
||||
<div className="flex-1" />
|
||||
{configuredModels.length > 0 && (
|
||||
{lockedModel ? (
|
||||
<span
|
||||
className="flex h-7 shrink-0 items-center gap-1 rounded-full px-2 text-xs text-muted-foreground"
|
||||
title={`${providerDisplayNames[lockedModel.provider] || lockedModel.provider} — fixed for this chat`}
|
||||
>
|
||||
<span className="max-w-[150px] truncate">{lockedModel.model}</span>
|
||||
</span>
|
||||
) : configuredModels.length > 0 ? (
|
||||
<DropdownMenu>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<button
|
||||
|
|
@ -563,7 +526,7 @@ function ChatInputInner({
|
|||
className="flex h-7 shrink-0 items-center gap-1 rounded-full px-2 text-xs text-muted-foreground transition-colors hover:bg-muted hover:text-foreground"
|
||||
>
|
||||
<span className="max-w-[150px] truncate">
|
||||
{configuredModels.find((m) => `${m.flavor}/${m.model}` === activeModelKey)?.model || configuredModels[0]?.model || 'Model'}
|
||||
{configuredModels.find((m) => `${m.provider}/${m.model}` === activeModelKey)?.model || configuredModels[0]?.model || 'Model'}
|
||||
</span>
|
||||
<ChevronDown className="h-3 w-3" />
|
||||
</button>
|
||||
|
|
@ -571,18 +534,18 @@ function ChatInputInner({
|
|||
<DropdownMenuContent align="end">
|
||||
<DropdownMenuRadioGroup value={activeModelKey} onValueChange={handleModelChange}>
|
||||
{configuredModels.map((m) => {
|
||||
const key = `${m.flavor}/${m.model}`
|
||||
const key = `${m.provider}/${m.model}`
|
||||
return (
|
||||
<DropdownMenuRadioItem key={key} value={key}>
|
||||
<span className="truncate">{m.model}</span>
|
||||
<span className="ml-2 text-xs text-muted-foreground">{providerDisplayNames[m.flavor] || m.flavor}</span>
|
||||
<span className="ml-2 text-xs text-muted-foreground">{providerDisplayNames[m.provider] || m.provider}</span>
|
||||
</DropdownMenuRadioItem>
|
||||
)
|
||||
})}
|
||||
</DropdownMenuRadioGroup>
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
)}
|
||||
) : null}
|
||||
{onToggleTts && ttsAvailable && (
|
||||
<div className="flex shrink-0 items-center">
|
||||
<Tooltip>
|
||||
|
|
@ -729,6 +692,7 @@ export interface ChatInputWithMentionsProps {
|
|||
ttsMode?: 'summary' | 'full'
|
||||
onToggleTts?: () => void
|
||||
onTtsModeChange?: (mode: 'summary' | 'full') => void
|
||||
onSelectedModelChange?: (model: SelectedModel | null) => void
|
||||
}
|
||||
|
||||
export function ChatInputWithMentions({
|
||||
|
|
@ -757,6 +721,7 @@ export function ChatInputWithMentions({
|
|||
ttsMode,
|
||||
onToggleTts,
|
||||
onTtsModeChange,
|
||||
onSelectedModelChange,
|
||||
}: ChatInputWithMentionsProps) {
|
||||
return (
|
||||
<PromptInputProvider knowledgeFiles={knowledgeFiles} recentFiles={recentFiles} visibleFiles={visibleFiles}>
|
||||
|
|
@ -783,6 +748,7 @@ export function ChatInputWithMentions({
|
|||
ttsMode={ttsMode}
|
||||
onToggleTts={onToggleTts}
|
||||
onTtsModeChange={onTtsModeChange}
|
||||
onSelectedModelChange={onSelectedModelChange}
|
||||
/>
|
||||
</PromptInputProvider>
|
||||
)
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ import { type PromptInputMessage, type FileMention } from '@/components/ai-eleme
|
|||
import { FileCardProvider } from '@/contexts/file-card-context'
|
||||
import { MarkdownPreOverride } from '@/components/ai-elements/markdown-code-override'
|
||||
import { TabBar, type ChatTab } from '@/components/tab-bar'
|
||||
import { ChatInputWithMentions, type StagedAttachment } from '@/components/chat-input-with-mentions'
|
||||
import { ChatInputWithMentions, type StagedAttachment, type SelectedModel } from '@/components/chat-input-with-mentions'
|
||||
import { ChatMessageAttachments } from '@/components/chat-message-attachments'
|
||||
import { wikiLabel } from '@/lib/wiki-links'
|
||||
import {
|
||||
|
|
@ -158,6 +158,7 @@ interface ChatSidebarProps {
|
|||
onPresetMessageConsumed?: () => void
|
||||
getInitialDraft?: (tabId: string) => string | undefined
|
||||
onDraftChangeForTab?: (tabId: string, text: string) => void
|
||||
onSelectedModelChangeForTab?: (tabId: string, model: SelectedModel | null) => void
|
||||
pendingAskHumanRequests?: ChatTabViewState['pendingAskHumanRequests']
|
||||
allPermissionRequests?: ChatTabViewState['allPermissionRequests']
|
||||
permissionResponses?: ChatTabViewState['permissionResponses']
|
||||
|
|
@ -211,6 +212,7 @@ export function ChatSidebar({
|
|||
onPresetMessageConsumed,
|
||||
getInitialDraft,
|
||||
onDraftChangeForTab,
|
||||
onSelectedModelChangeForTab,
|
||||
pendingAskHumanRequests = new Map(),
|
||||
allPermissionRequests = new Map(),
|
||||
permissionResponses = new Map(),
|
||||
|
|
@ -662,6 +664,7 @@ export function ChatSidebar({
|
|||
runId={tabState.runId}
|
||||
initialDraft={getInitialDraft?.(tab.id)}
|
||||
onDraftChange={onDraftChangeForTab ? (text) => onDraftChangeForTab(tab.id, text) : undefined}
|
||||
onSelectedModelChange={onSelectedModelChangeForTab ? (m) => onSelectedModelChangeForTab(tab.id, m) : undefined}
|
||||
isRecording={isActive && isRecording}
|
||||
recordingText={isActive ? recordingText : undefined}
|
||||
recordingState={isActive ? recordingState : undefined}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue