From 63a1d4952b4d80195f1772637fb2cddbdcdc4e29 Mon Sep 17 00:00:00 2001
From: Ramnique Singh <30795890+ramnique@users.noreply.github.com>
Date: Thu, 19 Mar 2026 18:46:08 +0530
Subject: [PATCH 1/5] remove default exa server
---
apps/x/packages/core/src/mcp/repo.ts | 4 ----
1 file changed, 4 deletions(-)
diff --git a/apps/x/packages/core/src/mcp/repo.ts b/apps/x/packages/core/src/mcp/repo.ts
index 66162895..59bc975b 100644
--- a/apps/x/packages/core/src/mcp/repo.ts
+++ b/apps/x/packages/core/src/mcp/repo.ts
@@ -5,10 +5,6 @@ import path from "path";
import z from "zod";
const DEFAULT_MCP_SERVERS = {
- exa: {
- type: "http" as const,
- url: "https://mcp.exa.ai/mcp",
- },
};
export interface IMcpConfigRepo {
From 2e8a3580c25f97d486f684907f29857f9dae0a56 Mon Sep 17 00:00:00 2001
From: Ramnique Singh <30795890+ramnique@users.noreply.github.com>
Date: Thu, 19 Mar 2026 18:46:54 +0530
Subject: [PATCH 2/5] hardcode prod urls
---
apps/x/packages/core/src/config/env.ts | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/apps/x/packages/core/src/config/env.ts b/apps/x/packages/core/src/config/env.ts
index 2ec62881..441d5bb7 100644
--- a/apps/x/packages/core/src/config/env.ts
+++ b/apps/x/packages/core/src/config/env.ts
@@ -1,5 +1,5 @@
export const API_URL =
- process.env.API_URL || 'http://localhost:3002/v1';
+ process.env.API_URL || 'https://api.x.rowboatlabs.com/v1';
export const SUPABASE_PROJECT_URL =
- process.env.SUPABASE_PROJECT_URL || 'http://127.0.0.1:54321';
+ process.env.SUPABASE_PROJECT_URL || 'https://jpxoiuhlshgwixajvsbu.supabase.co';
From 1c63ee571c79f1d9bd75b7e22387a5061de67780 Mon Sep 17 00:00:00 2001
From: Arjun <6592213+arkml@users.noreply.github.com>
Date: Thu, 19 Mar 2026 19:20:08 +0530
Subject: [PATCH 3/5] fix build error
---
apps/x/apps/renderer/src/components/markdown-editor.tsx | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/apps/x/apps/renderer/src/components/markdown-editor.tsx b/apps/x/apps/renderer/src/components/markdown-editor.tsx
index 590b6585..ac065e3d 100644
--- a/apps/x/apps/renderer/src/components/markdown-editor.tsx
+++ b/apps/x/apps/renderer/src/components/markdown-editor.tsx
@@ -952,7 +952,8 @@ export function MarkdownEditor({
setRowboatAnchorTop(null)
// Get editor content for the agent
- const editorContent = editor.storage.markdown?.getMarkdown?.() ?? ''
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ const editorContent = (editor.storage as any).markdown?.getMarkdown?.() ?? ''
// Helper to find the processing block
const findProcessingBlock = (): number | null => {
From 7966501a79b0e6f6cb315b19f236adbd516b4d1e Mon Sep 17 00:00:00 2001
From: Ramnique Singh <30795890+ramnique@users.noreply.github.com>
Date: Thu, 19 Mar 2026 21:50:37 +0530
Subject: [PATCH 4/5] fix api url
---
apps/x/packages/core/src/config/env.ts | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/apps/x/packages/core/src/config/env.ts b/apps/x/packages/core/src/config/env.ts
index 441d5bb7..b8c0e3d7 100644
--- a/apps/x/packages/core/src/config/env.ts
+++ b/apps/x/packages/core/src/config/env.ts
@@ -1,5 +1,5 @@
export const API_URL =
- process.env.API_URL || 'https://api.x.rowboatlabs.com/v1';
+ process.env.API_URL || 'https://api.x.rowboatlabs.com';
export const SUPABASE_PROJECT_URL =
process.env.SUPABASE_PROJECT_URL || 'https://jpxoiuhlshgwixajvsbu.supabase.co';
From c0138af3abcf91d56072a7a7516b065293614bc9 Mon Sep 17 00:00:00 2001
From: arkml <6592213+arkml@users.noreply.github.com>
Date: Thu, 19 Mar 2026 22:12:42 +0530
Subject: [PATCH 5/5] Ola (#438)
Native meeting transcription that captures mic and system audio, transcribes
live via Deepgram, and generates AI-powered meeting notes.
- Toggle button in toolbar to start/stop meeting transcription
- Dual-stream audio capture: mic (You) + system audio (They) via getDisplayMedia
loopback
- Multichannel Deepgram transcription with diarization for speaker
identification
- Headphone detection with mic gating when using speakers to prevent echo bleed
- Live transcript saved to knowledge/Meetings/rowboat/{date}/ as markdown
- Auto-stop after 2 minutes of silence
- LLM-generated meeting notes prepended above raw transcript on stop
- Calendar event matching: pulls nearby events from calendar_sync to identify
meeting title and participant names
- First-time permissions setup modal on macOS for Screen Recording
- Button only visible when Deepgram is available (logged in or API key
configured)
---
apps/x/apps/main/entitlements.plist | 10 +
apps/x/apps/main/forge.config.cjs | 4 +
apps/x/apps/main/src/ipc.ts | 5 +
apps/x/apps/main/src/main.ts | 18 +-
apps/x/apps/renderer/src/App.tsx | 143 ++++++-
.../src/hooks/useMeetingTranscription.ts | 374 ++++++++++++++++++
.../core/src/knowledge/summarize_meeting.ts | 103 +++++
apps/x/packages/shared/src/ipc.ts | 9 +
8 files changed, 662 insertions(+), 4 deletions(-)
create mode 100644 apps/x/apps/main/entitlements.plist
create mode 100644 apps/x/apps/renderer/src/hooks/useMeetingTranscription.ts
create mode 100644 apps/x/packages/core/src/knowledge/summarize_meeting.ts
diff --git a/apps/x/apps/main/entitlements.plist b/apps/x/apps/main/entitlements.plist
new file mode 100644
index 00000000..db2dbd7e
--- /dev/null
+++ b/apps/x/apps/main/entitlements.plist
@@ -0,0 +1,10 @@
+
+
+
+
+ com.apple.security.device.audio-input
+
+ com.apple.security.device.screen-capture
+
+
+
diff --git a/apps/x/apps/main/forge.config.cjs b/apps/x/apps/main/forge.config.cjs
index 57f733f2..c79a8c43 100644
--- a/apps/x/apps/main/forge.config.cjs
+++ b/apps/x/apps/main/forge.config.cjs
@@ -13,6 +13,10 @@ module.exports = {
appCategoryType: 'public.app-category.productivity',
osxSign: {
batchCodesignCalls: true,
+ optionsForFile: () => ({
+ entitlements: path.join(__dirname, 'entitlements.plist'),
+ 'entitlements-inherit': path.join(__dirname, 'entitlements.plist'),
+ }),
},
osxNotarize: {
appleId: process.env.APPLE_ID,
diff --git a/apps/x/apps/main/src/ipc.ts b/apps/x/apps/main/src/ipc.ts
index 0596f1a5..3ace4359 100644
--- a/apps/x/apps/main/src/ipc.ts
+++ b/apps/x/apps/main/src/ipc.ts
@@ -41,6 +41,7 @@ import { search } from '@x/core/dist/search/search.js';
import { versionHistory, voice } from '@x/core';
import { classifySchedule, processRowboatInstruction } from '@x/core/dist/knowledge/inline_tasks.js';
import { getBillingInfo } from '@x/core/dist/billing/billing.js';
+import { summarizeMeeting } from '@x/core/dist/knowledge/summarize_meeting.js';
/**
* Convert markdown to a styled HTML document for PDF/DOCX export.
@@ -701,6 +702,10 @@ export function setupIpcHandlers() {
return { success: false, error: 'Unknown format' };
},
+ 'meeting:summarize': async (_event, args) => {
+ const notes = await summarizeMeeting(args.transcript, args.meetingStartTime);
+ return { notes };
+ },
'inline-task:classifySchedule': async (_event, args) => {
const schedule = await classifySchedule(args.instruction);
return { schedule };
diff --git a/apps/x/apps/main/src/main.ts b/apps/x/apps/main/src/main.ts
index 579fdbfa..060f0433 100644
--- a/apps/x/apps/main/src/main.ts
+++ b/apps/x/apps/main/src/main.ts
@@ -1,4 +1,4 @@
-import { app, BrowserWindow, protocol, net, shell, session } from "electron";
+import { app, BrowserWindow, desktopCapturer, protocol, net, shell, session } from "electron";
import path from "node:path";
import {
setupIpcHandlers,
@@ -92,15 +92,27 @@ function createWindow() {
},
});
- // Grant microphone permission for voice mode
+ // Grant microphone and display-capture permissions
session.defaultSession.setPermissionRequestHandler((_webContents, permission, callback) => {
- if (permission === 'media') {
+ if (permission === 'media' || permission === 'display-capture') {
callback(true);
} else {
callback(false);
}
});
+ // Auto-approve display media requests and route system audio as loopback.
+ // Electron requires a video source in the callback even if we only want audio.
+ // We pass the first available screen source; the renderer discards the video track.
+ session.defaultSession.setDisplayMediaRequestHandler(async (_request, callback) => {
+ const sources = await desktopCapturer.getSources({ types: ['screen'] });
+ if (sources.length === 0) {
+ callback({});
+ return;
+ }
+ callback({ video: sources[0], audio: 'loopback' });
+ });
+
// Show window when content is ready to prevent blank screen
win.once("ready-to-show", () => {
win.maximize();
diff --git a/apps/x/apps/renderer/src/App.tsx b/apps/x/apps/renderer/src/App.tsx
index b37b8559..1a60dcff 100644
--- a/apps/x/apps/renderer/src/App.tsx
+++ b/apps/x/apps/renderer/src/App.tsx
@@ -5,7 +5,7 @@ import { RunEvent, ListRunsResponse } from '@x/shared/src/runs.js';
import type { LanguageModelUsage, ToolUIPart } from 'ai';
import './App.css'
import z from 'zod';
-import { CheckIcon, LoaderIcon, PanelLeftIcon, Maximize2, Minimize2, ChevronLeftIcon, ChevronRightIcon, SquarePen, SearchIcon, HistoryIcon } from 'lucide-react';
+import { CheckIcon, LoaderIcon, PanelLeftIcon, Maximize2, Minimize2, ChevronLeftIcon, ChevronRightIcon, SquarePen, SearchIcon, HistoryIcon, RadioIcon, SquareIcon } from 'lucide-react';
import { cn } from '@/lib/utils';
import { MarkdownEditor } from './components/markdown-editor';
import { ChatSidebar } from './components/chat-sidebar';
@@ -46,6 +46,8 @@ import {
useSidebar,
} from "@/components/ui/sidebar"
import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from "@/components/ui/tooltip"
+import { Dialog, DialogContent, DialogHeader, DialogTitle, DialogDescription, DialogFooter } from "@/components/ui/dialog"
+import { Button } from "@/components/ui/button"
import { Toaster } from "@/components/ui/sonner"
import { stripKnowledgePrefix, toKnowledgePath, wikiLabel } from '@/lib/wiki-links'
import { splitFrontmatter, joinFrontmatter } from '@/lib/frontmatter'
@@ -78,6 +80,7 @@ import { AgentScheduleState } from '@x/shared/dist/agent-schedule-state.js'
import { toast } from "sonner"
import { useVoiceMode } from '@/hooks/useVoiceMode'
import { useVoiceTTS } from '@/hooks/useVoiceTTS'
+import { useMeetingTranscription, type MeetingTranscriptionState } from '@/hooks/useMeetingTranscription'
type DirEntry = z.infer
type RunEventType = z.infer
@@ -383,6 +386,10 @@ function FixedSidebarToggle({
canNavigateForward,
onNewChat,
onOpenSearch,
+ meetingState,
+ meetingSummarizing,
+ meetingAvailable,
+ onToggleMeeting,
leftInsetPx,
}: {
onNavigateBack: () => void
@@ -391,6 +398,10 @@ function FixedSidebarToggle({
canNavigateForward: boolean
onNewChat: () => void
onOpenSearch: () => void
+ meetingState: MeetingTranscriptionState
+ meetingSummarizing: boolean
+ meetingAvailable: boolean
+ onToggleMeeting: () => void
leftInsetPx: number
}) {
const { toggleSidebar, state } = useSidebar()
@@ -426,6 +437,37 @@ function FixedSidebarToggle({
>
+ {meetingAvailable && (
+
+
+
+
+
+ {meetingSummarizing ? 'Generating meeting notes...' : meetingState === 'recording' ? 'Stop meeting notes' : 'Take new meeting notes'}
+
+
+ )}
{/* Back / Forward navigation */}
{isCollapsed && (
<>
@@ -619,6 +661,11 @@ function App() {
const voiceRef = useRef(voice)
voiceRef.current = voice
+ const handleToggleMeetingRef = useRef<(() => void) | undefined>(undefined)
+ const meetingTranscription = useMeetingTranscription(() => {
+ handleToggleMeetingRef.current?.()
+ })
+
// Check if voice is available on mount and when OAuth state changes
const refreshVoiceAvailability = useCallback(() => {
Promise.all([
@@ -3314,6 +3361,73 @@ function App() {
navigateToFile(notePath)
}, [loadDirectory, navigateToFile, fileTabs])
+ const meetingNotePathRef = useRef(null)
+ const [meetingSummarizing, setMeetingSummarizing] = useState(false)
+ const [showMeetingPermissions, setShowMeetingPermissions] = useState(false)
+
+ const startMeetingAfterPermissions = useCallback(async () => {
+ setShowMeetingPermissions(false)
+ localStorage.setItem('meeting-permissions-acknowledged', '1')
+ const notePath = await meetingTranscription.start()
+ if (notePath) {
+ meetingNotePathRef.current = notePath
+ await handleVoiceNoteCreated(notePath)
+ }
+ }, [meetingTranscription, handleVoiceNoteCreated])
+
+ const handleToggleMeeting = useCallback(async () => {
+ if (meetingTranscription.state === 'recording') {
+ await meetingTranscription.stop()
+
+ // Read the final transcript and generate meeting notes via LLM
+ const notePath = meetingNotePathRef.current
+ if (notePath) {
+ setMeetingSummarizing(true)
+ try {
+ const result = await window.ipc.invoke('workspace:readFile', { path: notePath, encoding: 'utf8' })
+ const fileContent = result.data
+ if (fileContent && fileContent.trim()) {
+ // Extract meeting start time from frontmatter for calendar matching
+ const dateMatch = fileContent.match(/^date:\s*"(.+)"$/m)
+ const meetingStartTime = dateMatch?.[1]
+ const { notes } = await window.ipc.invoke('meeting:summarize', { transcript: fileContent, meetingStartTime })
+ if (notes) {
+ // Prepend meeting notes below the title but above the transcript
+ const { raw: fm, body: transcriptBody } = splitFrontmatter(fileContent)
+ // Strip the "# Meeting note" title from transcript body — we'll put it first
+ const bodyWithoutTitle = transcriptBody.replace(/^#\s+Meeting note\s*\n*/, '')
+ const newBody = '# Meeting note\n\n' + notes + '\n\n---\n\n## Raw transcript\n\n' + bodyWithoutTitle
+ const newContent = fm ? `${fm}\n${newBody}` : newBody
+ await window.ipc.invoke('workspace:writeFile', {
+ path: notePath,
+ data: newContent,
+ opts: { encoding: 'utf8' },
+ })
+ // Refresh the file view
+ await handleVoiceNoteCreated(notePath)
+ }
+ }
+ } catch (err) {
+ console.error('[meeting] Failed to generate meeting notes:', err)
+ }
+ setMeetingSummarizing(false)
+ meetingNotePathRef.current = null
+ }
+ } else if (meetingTranscription.state === 'idle') {
+ // Show permissions modal on first use (macOS only — Windows works out of the box)
+ if (isMac && !localStorage.getItem('meeting-permissions-acknowledged')) {
+ setShowMeetingPermissions(true)
+ return
+ }
+ const notePath = await meetingTranscription.start()
+ if (notePath) {
+ meetingNotePathRef.current = notePath
+ await handleVoiceNoteCreated(notePath)
+ }
+ }
+ }, [meetingTranscription, handleVoiceNoteCreated])
+ handleToggleMeetingRef.current = handleToggleMeeting
+
const ensureWikiFile = useCallback(async (wikiPath: string) => {
const resolvedPath = toKnowledgePath(wikiPath)
if (!resolvedPath) return null
@@ -4176,6 +4290,10 @@ function App() {
canNavigateForward={canNavigateForward}
onNewChat={handleNewChatTab}
onOpenSearch={() => setIsSearchOpen(true)}
+ meetingState={meetingTranscription.state}
+ meetingSummarizing={meetingSummarizing}
+ meetingAvailable={voiceAvailable}
+ onToggleMeeting={() => { void handleToggleMeeting() }}
leftInsetPx={isMac ? MACOS_TRAFFIC_LIGHTS_RESERVED_PX : 0}
/>
@@ -4192,6 +4310,29 @@ function App() {
open={showOnboarding}
onComplete={handleOnboardingComplete}
/>
+
)
}
diff --git a/apps/x/apps/renderer/src/hooks/useMeetingTranscription.ts b/apps/x/apps/renderer/src/hooks/useMeetingTranscription.ts
new file mode 100644
index 00000000..3fc40cce
--- /dev/null
+++ b/apps/x/apps/renderer/src/hooks/useMeetingTranscription.ts
@@ -0,0 +1,374 @@
+import { useCallback, useRef, useState } from 'react';
+
+export type MeetingTranscriptionState = 'idle' | 'connecting' | 'recording' | 'stopping';
+
+const DEEPGRAM_PARAMS = new URLSearchParams({
+ model: 'nova-3',
+ encoding: 'linear16',
+ sample_rate: '16000',
+ channels: '2',
+ multichannel: 'true',
+ diarize: 'true',
+ interim_results: 'true',
+ smart_format: 'true',
+ punctuate: 'true',
+ language: 'en',
+});
+const DEEPGRAM_LISTEN_URL = `wss://api.deepgram.com/v1/listen?${DEEPGRAM_PARAMS.toString()}`;
+
+// RMS threshold: system audio above this = "active" (speakers playing)
+const SYSTEM_AUDIO_GATE_THRESHOLD = 0.005;
+
+// Auto-stop after 2 minutes of silence (no transcript from Deepgram)
+const SILENCE_AUTO_STOP_MS = 2 * 60 * 1000;
+
+// ---------------------------------------------------------------------------
+// Headphone detection
+// ---------------------------------------------------------------------------
+async function detectHeadphones(): Promise {
+ try {
+ const devices = await navigator.mediaDevices.enumerateDevices();
+ const outputs = devices.filter(d => d.kind === 'audiooutput');
+ const defaultOutput = outputs.find(d => d.deviceId === 'default');
+ const label = (defaultOutput?.label ?? '').toLowerCase();
+ // Heuristic: built-in speakers won't match these patterns
+ const headphonePatterns = ['headphone', 'airpod', 'earpod', 'earphone', 'earbud', 'bluetooth', 'bt_', 'jabra', 'bose', 'sony wh', 'sony wf'];
+ return headphonePatterns.some(p => label.includes(p));
+ } catch {
+ return false;
+ }
+}
+
+// ---------------------------------------------------------------------------
+// Transcript formatting
+// ---------------------------------------------------------------------------
+interface TranscriptEntry {
+ speaker: string;
+ text: string;
+}
+
+function formatTranscript(entries: TranscriptEntry[], date: string): string {
+ const lines = [
+ '---',
+ 'type: meeting',
+ 'source: rowboat',
+ 'title: Meeting note',
+ `date: "${date}"`,
+ '---',
+ '',
+ '# Meeting note',
+ '',
+ ];
+ for (let i = 0; i < entries.length; i++) {
+ if (i > 0 && entries[i].speaker !== entries[i - 1].speaker) {
+ lines.push('');
+ }
+ lines.push(`**${entries[i].speaker}:** ${entries[i].text}`);
+ lines.push('');
+ }
+ return lines.join('\n');
+}
+
+// ---------------------------------------------------------------------------
+// Hook
+// ---------------------------------------------------------------------------
+export function useMeetingTranscription(onAutoStop?: () => void) {
+ const [state, setState] = useState('idle');
+ const wsRef = useRef(null);
+ const micStreamRef = useRef(null);
+ const systemStreamRef = useRef(null);
+ const processorRef = useRef(null);
+ const audioCtxRef = useRef(null);
+ const transcriptRef = useRef([]);
+ const interimRef = useRef