Merge branch 'dev' of github.com:rowboatlabs/rowboat into dev

This commit is contained in:
tusharmagar 2026-02-16 16:41:09 +05:30
commit 96e2625c6e
15 changed files with 648 additions and 124 deletions

View file

@ -33,9 +33,10 @@ import {
usePromptInputController,
type FileMention,
} from '@/components/ai-elements/prompt-input';
import { Reasoning, ReasoningContent, ReasoningTrigger } from '@/components/ai-elements/reasoning';
import { Shimmer } from '@/components/ai-elements/shimmer';
import { Tool, ToolContent, ToolHeader, ToolInput, ToolOutput } from '@/components/ai-elements/tool';
import { WebSearchResult } from '@/components/ai-elements/web-search-result';
import { PermissionRequest } from '@/components/ai-elements/permission-request';
import { AskHumanRequest } from '@/components/ai-elements/ask-human-request';
import { Suggestions } from '@/components/ai-elements/suggestions';
@ -54,6 +55,7 @@ import { FileCardProvider } from '@/contexts/file-card-context'
import { MarkdownPreOverride } from '@/components/ai-elements/markdown-code-override'
import { AgentScheduleConfig } from '@x/shared/dist/agent-schedule.js'
import { AgentScheduleState } from '@x/shared/dist/agent-schedule-state.js'
import { toast } from "sonner"
type DirEntry = z.infer<typeof workspace.DirEntry>
type RunEventType = z.infer<typeof RunEvent>
@ -80,20 +82,20 @@ interface ToolCall {
timestamp: number;
}
interface ReasoningBlock {
interface ErrorMessage {
id: string;
content: string;
kind: 'error';
message: string;
timestamp: number;
}
type ConversationItem = ChatMessage | ToolCall | ReasoningBlock;
type ConversationItem = ChatMessage | ToolCall | ErrorMessage;
type ToolState = 'input-streaming' | 'input-available' | 'output-available' | 'output-error';
const isChatMessage = (item: ConversationItem): item is ChatMessage => 'role' in item
const isToolCall = (item: ConversationItem): item is ToolCall => 'name' in item
const isReasoningBlock = (item: ConversationItem): item is ReasoningBlock =>
'content' in item && !('role' in item) && !('name' in item)
const isErrorMessage = (item: ConversationItem): item is ErrorMessage => 'kind' in item && item.kind === 'error'
const toToolState = (status: ToolCall['status']): ToolState => {
switch (status) {
@ -642,7 +644,6 @@ function App() {
const [message, setMessage] = useState<string>('')
const [conversation, setConversation] = useState<ConversationItem[]>([])
const [currentAssistantMessage, setCurrentAssistantMessage] = useState<string>('')
const [currentReasoning, setCurrentReasoning] = useState<string>('')
const [, setModelUsage] = useState<LanguageModelUsage | null>(null)
const [runId, setRunId] = useState<string | null>(null)
const runIdRef = useRef<string | null>(null)
@ -650,7 +651,7 @@ function App() {
const [isProcessing, setIsProcessing] = useState(false)
const [processingRunIds, setProcessingRunIds] = useState<Set<string>>(new Set())
const processingRunIdsRef = useRef<Set<string>>(new Set())
const streamingBuffersRef = useRef<Map<string, { assistant: string; reasoning: string }>>(new Map())
const streamingBuffersRef = useRef<Map<string, { assistant: string }>>(new Map())
const [isStopping, setIsStopping] = useState(false)
const [stopClickedAt, setStopClickedAt] = useState<number | null>(null)
const [agentId] = useState<string>('copilot')
@ -722,7 +723,6 @@ function App() {
if (!runId) {
setIsProcessing(false)
setCurrentAssistantMessage('')
setCurrentReasoning('')
return
}
const isRunProcessing = processingRunIdsRef.current.has(runId)
@ -730,10 +730,8 @@ function App() {
if (isRunProcessing) {
const buffer = streamingBuffersRef.current.get(runId)
setCurrentAssistantMessage(buffer?.assistant ?? '')
setCurrentReasoning(buffer?.reasoning ?? '')
} else {
setCurrentAssistantMessage('')
setCurrentReasoning('')
streamingBuffersRef.current.delete(runId)
}
}, [runId])
@ -1113,6 +1111,15 @@ function App() {
}
break
}
case 'error': {
items.push({
id: `error-${Date.now()}-${Math.random()}`,
kind: 'error',
message: event.error,
timestamp: event.ts ? new Date(event.ts).getTime() : Date.now(),
})
break
}
case 'llm-stream-event': {
// We don't need to reconstruct streaming events for history
// Reasoning is captured in the final message
@ -1182,15 +1189,15 @@ function App() {
const getStreamingBuffer = (id: string) => {
const existing = streamingBuffersRef.current.get(id)
if (existing) return existing
const next = { assistant: '', reasoning: '' }
const next = { assistant: '' }
streamingBuffersRef.current.set(id, next)
return next
}
const appendStreamingBuffer = (id: string, field: 'assistant' | 'reasoning', delta: string) => {
const appendStreamingBuffer = (id: string, delta: string) => {
if (!delta) return
const buffer = getStreamingBuffer(id)
buffer[field] += delta
buffer.assistant += delta
}
const clearStreamingBuffer = (id: string) => {
@ -1231,7 +1238,6 @@ function App() {
case 'start':
if (!isActiveRun) return
setCurrentAssistantMessage('')
setCurrentReasoning('')
setModelUsage(null)
break
@ -1239,29 +1245,13 @@ function App() {
{
const llmEvent = event.event
if (!isActiveRun) {
if (llmEvent.type === 'reasoning-delta' && llmEvent.delta) {
appendStreamingBuffer(event.runId, 'reasoning', llmEvent.delta)
} else if (llmEvent.type === 'text-delta' && llmEvent.delta) {
appendStreamingBuffer(event.runId, 'assistant', llmEvent.delta)
if (llmEvent.type === 'text-delta' && llmEvent.delta) {
appendStreamingBuffer(event.runId, llmEvent.delta)
}
return
}
if (llmEvent.type === 'reasoning-delta' && llmEvent.delta) {
appendStreamingBuffer(event.runId, 'reasoning', llmEvent.delta)
setCurrentReasoning(prev => prev + llmEvent.delta)
} else if (llmEvent.type === 'reasoning-end') {
setCurrentReasoning(reasoning => {
if (reasoning) {
setConversation(prev => [...prev, {
id: `reasoning-${Date.now()}`,
content: reasoning,
timestamp: Date.now(),
}])
}
return ''
})
} else if (llmEvent.type === 'text-delta' && llmEvent.delta) {
appendStreamingBuffer(event.runId, 'assistant', llmEvent.delta)
if (llmEvent.type === 'text-delta' && llmEvent.delta) {
appendStreamingBuffer(event.runId, llmEvent.delta)
setCurrentAssistantMessage(prev => prev + llmEvent.delta)
} else if (llmEvent.type === 'tool-call') {
setConversation(prev => [...prev, {
@ -1454,7 +1444,6 @@ function App() {
}
return ''
})
setCurrentReasoning('')
break
case 'error':
@ -1468,6 +1457,13 @@ function App() {
setIsProcessing(false)
setIsStopping(false)
setStopClickedAt(null)
setConversation(prev => [...prev, {
id: `error-${Date.now()}`,
kind: 'error',
message: event.error,
timestamp: Date.now(),
}])
toast.error(event.error.split('\n')[0] || 'Model error')
console.error('Run error:', event.error)
break
}
@ -1602,7 +1598,6 @@ function App() {
loadRunRequestIdRef.current += 1
setConversation([])
setCurrentAssistantMessage('')
setCurrentReasoning('')
setRunId(null)
setMessage('')
setModelUsage(null)
@ -2203,6 +2198,39 @@ function App() {
}
if (isToolCall(item)) {
if (item.name === 'web-search') {
const input = normalizeToolInput(item.input) as Record<string, unknown> | undefined
const result = item.result as Record<string, unknown> | undefined
return (
<WebSearchResult
key={item.id}
query={(input?.query as string) || ''}
results={(result?.results as Array<{ title: string; url: string; description: string }>) || []}
status={item.status}
/>
)
}
if (item.name === 'research-search') {
const input = normalizeToolInput(item.input) as Record<string, unknown> | undefined
const result = item.result as Record<string, unknown> | undefined
const rawResults = (result?.results as Array<{ title: string; url: string; highlights?: string[]; text?: string }>) || []
const mapped = rawResults.map(r => ({
title: r.title,
url: r.url,
description: r.highlights?.[0] || (r.text ? r.text.slice(0, 200) : ''),
}))
const category = input?.category as string | undefined
const cardTitle = category ? `${category.charAt(0).toUpperCase() + category.slice(1)} search` : 'Researched the web'
return (
<WebSearchResult
key={item.id}
query={(input?.query as string) || ''}
results={mapped}
status={item.status}
title={cardTitle}
/>
)
}
const errorText = item.status === 'error' ? 'Tool error' : ''
const output = normalizeToolOutput(item.result, item.status)
const input = normalizeToolInput(item.input)
@ -2223,19 +2251,20 @@ function App() {
)
}
if (isReasoningBlock(item)) {
if (isErrorMessage(item)) {
return (
<Reasoning key={item.id}>
<ReasoningTrigger />
<ReasoningContent>{item.content}</ReasoningContent>
</Reasoning>
<Message key={item.id} from="assistant">
<MessageContent className="rounded-lg border border-destructive/30 bg-destructive/10 px-4 py-3 text-destructive">
<pre className="whitespace-pre-wrap font-mono text-xs">{item.message}</pre>
</MessageContent>
</Message>
)
}
return null
}
const hasConversation = conversation.length > 0 || currentAssistantMessage || currentReasoning
const hasConversation = conversation.length > 0 || currentAssistantMessage
const conversationContentClassName = hasConversation
? "mx-auto w-full max-w-4xl pb-28"
: "mx-auto w-full max-w-4xl min-h-full items-center justify-center pb-0"
@ -2447,13 +2476,6 @@ function App() {
/>
))}
{currentReasoning && (
<Reasoning isStreaming>
<ReasoningTrigger />
<ReasoningContent>{currentReasoning}</ReasoningContent>
</Reasoning>
)}
{currentAssistantMessage && (
<Message from="assistant">
<MessageContent>
@ -2462,7 +2484,7 @@ function App() {
</Message>
)}
{isProcessing && !currentAssistantMessage && !currentReasoning && (
{isProcessing && !currentAssistantMessage && (
<Message from="assistant">
<MessageContent>
<Shimmer duration={1}>Thinking...</Shimmer>
@ -2508,7 +2530,6 @@ function App() {
onOpenFullScreen={navigateToFullScreenChat}
conversation={conversation}
currentAssistantMessage={currentAssistantMessage}
currentReasoning={currentReasoning}
isProcessing={isProcessing}
isStopping={isStopping}
onStop={handleStop}

View file

@ -0,0 +1,109 @@
"use client";
import {
Collapsible,
CollapsibleContent,
CollapsibleTrigger,
} from "@/components/ui/collapsible";
import {
CheckCircleIcon,
ChevronDownIcon,
GlobeIcon,
LoaderIcon,
} from "lucide-react";
interface WebSearchResultProps {
query: string;
results: Array<{ title: string; url: string; description: string }>;
status: "pending" | "running" | "completed" | "error";
title?: string;
}
function getDomain(url: string): string {
try {
return new URL(url).hostname;
} catch {
return url;
}
}
export function WebSearchResult({ query, results, status, title = "Searched the web" }: WebSearchResultProps) {
const isRunning = status === "pending" || status === "running";
return (
<Collapsible defaultOpen className="not-prose mb-4 w-full rounded-md border">
<CollapsibleTrigger className="flex w-full items-center justify-between gap-4 p-3">
<div className="flex items-center gap-2">
<GlobeIcon className="size-4 text-muted-foreground" />
<span className="font-medium text-sm">{title}</span>
</div>
<ChevronDownIcon className="size-4 text-muted-foreground transition-transform group-data-[state=open]:rotate-180" />
</CollapsibleTrigger>
<CollapsibleContent>
<div className="px-3 pb-3 space-y-3">
{/* Query + result count */}
<div className="flex items-center justify-between gap-2">
<div className="flex items-center gap-2 text-sm text-muted-foreground min-w-0">
<GlobeIcon className="size-3.5 shrink-0" />
<span className="truncate">{query}</span>
</div>
{results.length > 0 && (
<span className="text-xs text-muted-foreground whitespace-nowrap">
{results.length} result{results.length !== 1 ? "s" : ""}
</span>
)}
</div>
{/* Results list */}
{results.length > 0 && (
<div className="rounded-md border max-h-64 overflow-y-auto">
{results.map((result, index) => {
const domain = getDomain(result.url);
return (
<a
key={index}
href={result.url}
target="_blank"
rel="noopener noreferrer"
onClick={(e) => {
e.preventDefault();
window.open(result.url, "_blank");
}}
className="flex items-center justify-between gap-3 px-3 py-2 text-sm hover:bg-muted/50 transition-colors border-b last:border-b-0"
>
<div className="flex items-center gap-2 min-w-0">
<img
src={`https://www.google.com/s2/favicons?domain=${domain}&sz=16`}
alt=""
className="size-4 shrink-0"
/>
<span className="truncate">{result.title}</span>
</div>
<span className="text-xs text-muted-foreground whitespace-nowrap shrink-0">
{domain}
</span>
</a>
);
})}
</div>
)}
{/* Status */}
<div className="flex items-center gap-1.5 text-xs text-muted-foreground">
{isRunning ? (
<>
<LoaderIcon className="size-3.5 animate-spin" />
<span>Searching...</span>
</>
) : (
<>
<CheckCircleIcon className="size-3.5 text-green-600" />
<span>Done</span>
</>
)}
</div>
</div>
</CollapsibleContent>
</Collapsible>
);
}

View file

@ -19,7 +19,7 @@ import {
MessageContent,
MessageResponse,
} from '@/components/ai-elements/message'
import { Reasoning, ReasoningContent, ReasoningTrigger } from '@/components/ai-elements/reasoning'
import { Shimmer } from '@/components/ai-elements/shimmer'
import { Tool, ToolContent, ToolHeader, ToolInput, ToolOutput } from '@/components/ai-elements/tool'
import { PermissionRequest } from '@/components/ai-elements/permission-request'
@ -52,20 +52,20 @@ interface ToolCall {
timestamp: number
}
interface ReasoningBlock {
interface ErrorMessage {
id: string
content: string
kind: 'error'
message: string
timestamp: number
}
type ConversationItem = ChatMessage | ToolCall | ReasoningBlock
type ConversationItem = ChatMessage | ToolCall | ErrorMessage
type ToolState = 'input-streaming' | 'input-available' | 'output-available' | 'output-error'
const isChatMessage = (item: ConversationItem): item is ChatMessage => 'role' in item
const isToolCall = (item: ConversationItem): item is ToolCall => 'name' in item
const isReasoningBlock = (item: ConversationItem): item is ReasoningBlock =>
'content' in item && !('role' in item) && !('name' in item)
const isErrorMessage = (item: ConversationItem): item is ErrorMessage => 'kind' in item && item.kind === 'error'
const toToolState = (status: ToolCall['status']): ToolState => {
switch (status) {
@ -118,7 +118,6 @@ interface ChatSidebarProps {
onOpenFullScreen?: () => void
conversation: ConversationItem[]
currentAssistantMessage: string
currentReasoning: string
isProcessing: boolean
isStopping?: boolean
onStop?: () => void
@ -145,7 +144,6 @@ export function ChatSidebar({
onOpenFullScreen,
conversation,
currentAssistantMessage,
currentReasoning,
isProcessing,
isStopping,
onStop,
@ -326,7 +324,7 @@ export function ChatSidebar({
autoMentionRef.current = { path: selectedPath, displayName }
}, [selectedPath, message, onMessageChange])
const hasConversation = conversation.length > 0 || currentAssistantMessage || currentReasoning
const hasConversation = conversation.length > 0 || currentAssistantMessage
const canSubmit = Boolean(message.trim()) && !isProcessing
const handleSubmit = () => {
@ -427,12 +425,13 @@ export function ChatSidebar({
)
}
if (isReasoningBlock(item)) {
if (isErrorMessage(item)) {
return (
<Reasoning key={item.id}>
<ReasoningTrigger />
<ReasoningContent>{item.content}</ReasoningContent>
</Reasoning>
<Message key={item.id} from="assistant">
<MessageContent className="rounded-lg border border-destructive/30 bg-destructive/10 px-4 py-3 text-destructive">
<pre className="whitespace-pre-wrap font-mono text-xs">{item.message}</pre>
</MessageContent>
</Message>
)
}
@ -535,13 +534,6 @@ export function ChatSidebar({
/>
))}
{currentReasoning && (
<Reasoning isStreaming>
<ReasoningTrigger />
<ReasoningContent>{currentReasoning}</ReasoningContent>
</Reasoning>
)}
{currentAssistantMessage && (
<Message from="assistant">
<MessageContent>
@ -550,7 +542,7 @@ export function ChatSidebar({
</Message>
)}
{isProcessing && !currentAssistantMessage && !currentReasoning && (
{isProcessing && !currentAssistantMessage && (
<Message from="assistant">
<MessageContent>
<Shimmer duration={1}>Thinking...</Shimmer>

View file

@ -98,6 +98,7 @@ export function OnboardingModal({ open, onComplete }: OnboardingModalProps) {
)
const activeConfig = providerConfigs[llmProvider]
const showApiKey = llmProvider === "openai" || llmProvider === "anthropic" || llmProvider === "google" || llmProvider === "openrouter" || llmProvider === "aigateway" || llmProvider === "openai-compatible"
const requiresApiKey = llmProvider === "openai" || llmProvider === "anthropic" || llmProvider === "google" || llmProvider === "openrouter" || llmProvider === "aigateway"
const requiresBaseURL = llmProvider === "ollama" || llmProvider === "openai-compatible"
const showBaseURL = llmProvider === "ollama" || llmProvider === "openai-compatible" || llmProvider === "aigateway"
@ -690,9 +691,11 @@ export function OnboardingModal({ open, onComplete }: OnboardingModalProps) {
)}
</div>
{requiresApiKey && (
{showApiKey && (
<div className="space-y-2">
<span className="text-xs font-medium text-muted-foreground uppercase tracking-wider">API Key</span>
<span className="text-xs font-medium text-muted-foreground uppercase tracking-wider">
{llmProvider === "openai-compatible" ? "API Key (optional)" : "API Key"}
</span>
<Input
type="password"
value={activeConfig.apiKey}

View file

@ -184,6 +184,7 @@ function ModelSettings({ dialogOpen }: { dialogOpen: boolean }) {
const [showMoreProviders, setShowMoreProviders] = useState(false)
const activeConfig = providerConfigs[provider]
const showApiKey = provider === "openai" || provider === "anthropic" || provider === "google" || provider === "openrouter" || provider === "aigateway" || provider === "openai-compatible"
const requiresApiKey = provider === "openai" || provider === "anthropic" || provider === "google" || provider === "openrouter" || provider === "aigateway"
const showBaseURL = provider === "ollama" || provider === "openai-compatible" || provider === "aigateway"
const requiresBaseURL = provider === "ollama" || provider === "openai-compatible"
@ -398,9 +399,11 @@ function ModelSettings({ dialogOpen }: { dialogOpen: boolean }) {
</div>
{/* API Key */}
{requiresApiKey && (
{showApiKey && (
<div className="space-y-2">
<span className="text-xs font-medium text-muted-foreground uppercase tracking-wider">API Key</span>
<span className="text-xs font-medium text-muted-foreground uppercase tracking-wider">
{provider === "openai-compatible" ? "API Key (optional)" : "API Key"}
</span>
<Input
type="password"
value={activeConfig.apiKey}

View file

@ -8,9 +8,7 @@ import {
ChevronsDownUp,
ChevronsUpDown,
Copy,
File,
FilePlus,
Folder,
FolderPlus,
HelpCircle,
Mic,