diff --git a/apps/x/apps/main/entitlements.plist b/apps/x/apps/main/entitlements.plist
new file mode 100644
index 00000000..db2dbd7e
--- /dev/null
+++ b/apps/x/apps/main/entitlements.plist
@@ -0,0 +1,10 @@
+
+
+
+
+ com.apple.security.device.audio-input
+
+ com.apple.security.device.screen-capture
+
+
+
diff --git a/apps/x/apps/main/forge.config.cjs b/apps/x/apps/main/forge.config.cjs
index 57f733f2..c79a8c43 100644
--- a/apps/x/apps/main/forge.config.cjs
+++ b/apps/x/apps/main/forge.config.cjs
@@ -13,6 +13,10 @@ module.exports = {
appCategoryType: 'public.app-category.productivity',
osxSign: {
batchCodesignCalls: true,
+ optionsForFile: () => ({
+ entitlements: path.join(__dirname, 'entitlements.plist'),
+ 'entitlements-inherit': path.join(__dirname, 'entitlements.plist'),
+ }),
},
osxNotarize: {
appleId: process.env.APPLE_ID,
diff --git a/apps/x/apps/main/src/ipc.ts b/apps/x/apps/main/src/ipc.ts
index 0596f1a5..3ace4359 100644
--- a/apps/x/apps/main/src/ipc.ts
+++ b/apps/x/apps/main/src/ipc.ts
@@ -41,6 +41,7 @@ import { search } from '@x/core/dist/search/search.js';
import { versionHistory, voice } from '@x/core';
import { classifySchedule, processRowboatInstruction } from '@x/core/dist/knowledge/inline_tasks.js';
import { getBillingInfo } from '@x/core/dist/billing/billing.js';
+import { summarizeMeeting } from '@x/core/dist/knowledge/summarize_meeting.js';
/**
* Convert markdown to a styled HTML document for PDF/DOCX export.
@@ -701,6 +702,10 @@ export function setupIpcHandlers() {
return { success: false, error: 'Unknown format' };
},
+ 'meeting:summarize': async (_event, args) => {
+ const notes = await summarizeMeeting(args.transcript, args.meetingStartTime);
+ return { notes };
+ },
'inline-task:classifySchedule': async (_event, args) => {
const schedule = await classifySchedule(args.instruction);
return { schedule };
diff --git a/apps/x/apps/main/src/main.ts b/apps/x/apps/main/src/main.ts
index 579fdbfa..060f0433 100644
--- a/apps/x/apps/main/src/main.ts
+++ b/apps/x/apps/main/src/main.ts
@@ -1,4 +1,4 @@
-import { app, BrowserWindow, protocol, net, shell, session } from "electron";
+import { app, BrowserWindow, desktopCapturer, protocol, net, shell, session } from "electron";
import path from "node:path";
import {
setupIpcHandlers,
@@ -92,15 +92,27 @@ function createWindow() {
},
});
- // Grant microphone permission for voice mode
+ // Grant microphone and display-capture permissions
session.defaultSession.setPermissionRequestHandler((_webContents, permission, callback) => {
- if (permission === 'media') {
+ if (permission === 'media' || permission === 'display-capture') {
callback(true);
} else {
callback(false);
}
});
+ // Auto-approve display media requests and route system audio as loopback.
+ // Electron requires a video source in the callback even if we only want audio.
+ // We pass the first available screen source; the renderer discards the video track.
+ session.defaultSession.setDisplayMediaRequestHandler(async (_request, callback) => {
+ const sources = await desktopCapturer.getSources({ types: ['screen'] });
+ if (sources.length === 0) {
+ callback({});
+ return;
+ }
+ callback({ video: sources[0], audio: 'loopback' });
+ });
+
// Show window when content is ready to prevent blank screen
win.once("ready-to-show", () => {
win.maximize();
diff --git a/apps/x/apps/renderer/src/App.tsx b/apps/x/apps/renderer/src/App.tsx
index b37b8559..1a60dcff 100644
--- a/apps/x/apps/renderer/src/App.tsx
+++ b/apps/x/apps/renderer/src/App.tsx
@@ -5,7 +5,7 @@ import { RunEvent, ListRunsResponse } from '@x/shared/src/runs.js';
import type { LanguageModelUsage, ToolUIPart } from 'ai';
import './App.css'
import z from 'zod';
-import { CheckIcon, LoaderIcon, PanelLeftIcon, Maximize2, Minimize2, ChevronLeftIcon, ChevronRightIcon, SquarePen, SearchIcon, HistoryIcon } from 'lucide-react';
+import { CheckIcon, LoaderIcon, PanelLeftIcon, Maximize2, Minimize2, ChevronLeftIcon, ChevronRightIcon, SquarePen, SearchIcon, HistoryIcon, RadioIcon, SquareIcon } from 'lucide-react';
import { cn } from '@/lib/utils';
import { MarkdownEditor } from './components/markdown-editor';
import { ChatSidebar } from './components/chat-sidebar';
@@ -46,6 +46,8 @@ import {
useSidebar,
} from "@/components/ui/sidebar"
import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from "@/components/ui/tooltip"
+import { Dialog, DialogContent, DialogHeader, DialogTitle, DialogDescription, DialogFooter } from "@/components/ui/dialog"
+import { Button } from "@/components/ui/button"
import { Toaster } from "@/components/ui/sonner"
import { stripKnowledgePrefix, toKnowledgePath, wikiLabel } from '@/lib/wiki-links'
import { splitFrontmatter, joinFrontmatter } from '@/lib/frontmatter'
@@ -78,6 +80,7 @@ import { AgentScheduleState } from '@x/shared/dist/agent-schedule-state.js'
import { toast } from "sonner"
import { useVoiceMode } from '@/hooks/useVoiceMode'
import { useVoiceTTS } from '@/hooks/useVoiceTTS'
+import { useMeetingTranscription, type MeetingTranscriptionState } from '@/hooks/useMeetingTranscription'
type DirEntry = z.infer
type RunEventType = z.infer
@@ -383,6 +386,10 @@ function FixedSidebarToggle({
canNavigateForward,
onNewChat,
onOpenSearch,
+ meetingState,
+ meetingSummarizing,
+ meetingAvailable,
+ onToggleMeeting,
leftInsetPx,
}: {
onNavigateBack: () => void
@@ -391,6 +398,10 @@ function FixedSidebarToggle({
canNavigateForward: boolean
onNewChat: () => void
onOpenSearch: () => void
+ meetingState: MeetingTranscriptionState
+ meetingSummarizing: boolean
+ meetingAvailable: boolean
+ onToggleMeeting: () => void
leftInsetPx: number
}) {
const { toggleSidebar, state } = useSidebar()
@@ -426,6 +437,37 @@ function FixedSidebarToggle({
>
+ {meetingAvailable && (
+
+
+
+
+
+ {meetingSummarizing ? 'Generating meeting notes...' : meetingState === 'recording' ? 'Stop meeting notes' : 'Take new meeting notes'}
+
+
+ )}
{/* Back / Forward navigation */}
{isCollapsed && (
<>
@@ -619,6 +661,11 @@ function App() {
const voiceRef = useRef(voice)
voiceRef.current = voice
+ const handleToggleMeetingRef = useRef<(() => void) | undefined>(undefined)
+ const meetingTranscription = useMeetingTranscription(() => {
+ handleToggleMeetingRef.current?.()
+ })
+
// Check if voice is available on mount and when OAuth state changes
const refreshVoiceAvailability = useCallback(() => {
Promise.all([
@@ -3314,6 +3361,73 @@ function App() {
navigateToFile(notePath)
}, [loadDirectory, navigateToFile, fileTabs])
+ const meetingNotePathRef = useRef(null)
+ const [meetingSummarizing, setMeetingSummarizing] = useState(false)
+ const [showMeetingPermissions, setShowMeetingPermissions] = useState(false)
+
+ const startMeetingAfterPermissions = useCallback(async () => {
+ setShowMeetingPermissions(false)
+ localStorage.setItem('meeting-permissions-acknowledged', '1')
+ const notePath = await meetingTranscription.start()
+ if (notePath) {
+ meetingNotePathRef.current = notePath
+ await handleVoiceNoteCreated(notePath)
+ }
+ }, [meetingTranscription, handleVoiceNoteCreated])
+
+ const handleToggleMeeting = useCallback(async () => {
+ if (meetingTranscription.state === 'recording') {
+ await meetingTranscription.stop()
+
+ // Read the final transcript and generate meeting notes via LLM
+ const notePath = meetingNotePathRef.current
+ if (notePath) {
+ setMeetingSummarizing(true)
+ try {
+ const result = await window.ipc.invoke('workspace:readFile', { path: notePath, encoding: 'utf8' })
+ const fileContent = result.data
+ if (fileContent && fileContent.trim()) {
+ // Extract meeting start time from frontmatter for calendar matching
+ const dateMatch = fileContent.match(/^date:\s*"(.+)"$/m)
+ const meetingStartTime = dateMatch?.[1]
+ const { notes } = await window.ipc.invoke('meeting:summarize', { transcript: fileContent, meetingStartTime })
+ if (notes) {
+ // Prepend meeting notes below the title but above the transcript
+ const { raw: fm, body: transcriptBody } = splitFrontmatter(fileContent)
+ // Strip the "# Meeting note" title from transcript body — we'll put it first
+ const bodyWithoutTitle = transcriptBody.replace(/^#\s+Meeting note\s*\n*/, '')
+ const newBody = '# Meeting note\n\n' + notes + '\n\n---\n\n## Raw transcript\n\n' + bodyWithoutTitle
+ const newContent = fm ? `${fm}\n${newBody}` : newBody
+ await window.ipc.invoke('workspace:writeFile', {
+ path: notePath,
+ data: newContent,
+ opts: { encoding: 'utf8' },
+ })
+ // Refresh the file view
+ await handleVoiceNoteCreated(notePath)
+ }
+ }
+ } catch (err) {
+ console.error('[meeting] Failed to generate meeting notes:', err)
+ }
+ setMeetingSummarizing(false)
+ meetingNotePathRef.current = null
+ }
+ } else if (meetingTranscription.state === 'idle') {
+ // Show permissions modal on first use (macOS only — Windows works out of the box)
+ if (isMac && !localStorage.getItem('meeting-permissions-acknowledged')) {
+ setShowMeetingPermissions(true)
+ return
+ }
+ const notePath = await meetingTranscription.start()
+ if (notePath) {
+ meetingNotePathRef.current = notePath
+ await handleVoiceNoteCreated(notePath)
+ }
+ }
+ }, [meetingTranscription, handleVoiceNoteCreated])
+ handleToggleMeetingRef.current = handleToggleMeeting
+
const ensureWikiFile = useCallback(async (wikiPath: string) => {
const resolvedPath = toKnowledgePath(wikiPath)
if (!resolvedPath) return null
@@ -4176,6 +4290,10 @@ function App() {
canNavigateForward={canNavigateForward}
onNewChat={handleNewChatTab}
onOpenSearch={() => setIsSearchOpen(true)}
+ meetingState={meetingTranscription.state}
+ meetingSummarizing={meetingSummarizing}
+ meetingAvailable={voiceAvailable}
+ onToggleMeeting={() => { void handleToggleMeeting() }}
leftInsetPx={isMac ? MACOS_TRAFFIC_LIGHTS_RESERVED_PX : 0}
/>
@@ -4192,6 +4310,29 @@ function App() {
open={showOnboarding}
onComplete={handleOnboardingComplete}
/>
+
)
}
diff --git a/apps/x/apps/renderer/src/hooks/useMeetingTranscription.ts b/apps/x/apps/renderer/src/hooks/useMeetingTranscription.ts
new file mode 100644
index 00000000..3fc40cce
--- /dev/null
+++ b/apps/x/apps/renderer/src/hooks/useMeetingTranscription.ts
@@ -0,0 +1,374 @@
+import { useCallback, useRef, useState } from 'react';
+
+export type MeetingTranscriptionState = 'idle' | 'connecting' | 'recording' | 'stopping';
+
+const DEEPGRAM_PARAMS = new URLSearchParams({
+ model: 'nova-3',
+ encoding: 'linear16',
+ sample_rate: '16000',
+ channels: '2',
+ multichannel: 'true',
+ diarize: 'true',
+ interim_results: 'true',
+ smart_format: 'true',
+ punctuate: 'true',
+ language: 'en',
+});
+const DEEPGRAM_LISTEN_URL = `wss://api.deepgram.com/v1/listen?${DEEPGRAM_PARAMS.toString()}`;
+
+// RMS threshold: system audio above this = "active" (speakers playing)
+const SYSTEM_AUDIO_GATE_THRESHOLD = 0.005;
+
+// Auto-stop after 2 minutes of silence (no transcript from Deepgram)
+const SILENCE_AUTO_STOP_MS = 2 * 60 * 1000;
+
+// ---------------------------------------------------------------------------
+// Headphone detection
+// ---------------------------------------------------------------------------
+async function detectHeadphones(): Promise {
+ try {
+ const devices = await navigator.mediaDevices.enumerateDevices();
+ const outputs = devices.filter(d => d.kind === 'audiooutput');
+ const defaultOutput = outputs.find(d => d.deviceId === 'default');
+ const label = (defaultOutput?.label ?? '').toLowerCase();
+ // Heuristic: built-in speakers won't match these patterns
+ const headphonePatterns = ['headphone', 'airpod', 'earpod', 'earphone', 'earbud', 'bluetooth', 'bt_', 'jabra', 'bose', 'sony wh', 'sony wf'];
+ return headphonePatterns.some(p => label.includes(p));
+ } catch {
+ return false;
+ }
+}
+
+// ---------------------------------------------------------------------------
+// Transcript formatting
+// ---------------------------------------------------------------------------
+interface TranscriptEntry {
+ speaker: string;
+ text: string;
+}
+
+function formatTranscript(entries: TranscriptEntry[], date: string): string {
+ const lines = [
+ '---',
+ 'type: meeting',
+ 'source: rowboat',
+ 'title: Meeting note',
+ `date: "${date}"`,
+ '---',
+ '',
+ '# Meeting note',
+ '',
+ ];
+ for (let i = 0; i < entries.length; i++) {
+ if (i > 0 && entries[i].speaker !== entries[i - 1].speaker) {
+ lines.push('');
+ }
+ lines.push(`**${entries[i].speaker}:** ${entries[i].text}`);
+ lines.push('');
+ }
+ return lines.join('\n');
+}
+
+// ---------------------------------------------------------------------------
+// Hook
+// ---------------------------------------------------------------------------
+export function useMeetingTranscription(onAutoStop?: () => void) {
+ const [state, setState] = useState('idle');
+ const wsRef = useRef(null);
+ const micStreamRef = useRef(null);
+ const systemStreamRef = useRef(null);
+ const processorRef = useRef(null);
+ const audioCtxRef = useRef(null);
+ const transcriptRef = useRef([]);
+ const interimRef = useRef