Meeting notes2 (#454)

Improve meeting transcription: screen recording permissions, collapsible transcript block
This commit is contained in:
arkml 2026-03-30 22:31:49 +05:30 committed by GitHub
parent 30e1785fe2
commit 86cc2aaf73
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
11 changed files with 885 additions and 116 deletions

View file

@ -11,6 +11,9 @@ module.exports = {
icon: './icons/icon', // .icns extension added automatically
appBundleId: 'com.rowboat.app',
appCategoryType: 'public.app-category.productivity',
extendInfo: {
NSAudioCaptureUsageDescription: 'Rowboat needs access to system audio to transcribe meetings from other apps (Zoom, Meet, etc.)',
},
osxSign: {
batchCodesignCalls: true,
optionsForFile: () => ({

View file

@ -1,4 +1,4 @@
import { ipcMain, BrowserWindow, shell, dialog } from 'electron';
import { ipcMain, BrowserWindow, shell, dialog, systemPreferences, desktopCapturer } from 'electron';
import { ipc } from '@x/shared';
import path from 'node:path';
import os from 'node:os';
@ -719,6 +719,24 @@ export function setupIpcHandlers() {
return { success: false, error: 'Unknown format' };
},
'meeting:checkScreenPermission': async () => {
if (process.platform !== 'darwin') return { granted: true };
const status = systemPreferences.getMediaAccessStatus('screen');
console.log('[meeting] Screen recording permission status:', status);
if (status === 'granted') return { granted: true };
// Not granted — call desktopCapturer.getSources() to register the app
// in the macOS Screen Recording list. On first call this shows the
// native permission prompt (signed apps are remembered across restarts).
try { await desktopCapturer.getSources({ types: ['screen'] }); } catch { /* ignore */ }
// Re-check after the native prompt was dismissed
const statusAfter = systemPreferences.getMediaAccessStatus('screen');
console.log('[meeting] Screen recording permission status after prompt:', statusAfter);
return { granted: statusAfter === 'granted' };
},
'meeting:openScreenRecordingSettings': async () => {
await shell.openExternal('x-apple.systempreferences:com.apple.preference.security?Privacy_ScreenCapture');
return { success: true };
},
'meeting:summarize': async (_event, args) => {
const notes = await summarizeMeeting(args.transcript, args.meetingStartTime, args.calendarEventJson);
return { notes };

View file

@ -484,7 +484,7 @@ function FixedSidebarToggle({
)}
style={{ marginLeft: TITLEBAR_BUTTON_GAP_PX }}
>
{meetingSummarizing ? (
{meetingSummarizing || meetingState === 'connecting' ? (
<LoaderIcon className="size-4 animate-spin" />
) : meetingState === 'recording' ? (
<SquareIcon className="size-4 animate-pulse" />
@ -494,7 +494,7 @@ function FixedSidebarToggle({
</button>
</TooltipTrigger>
<TooltipContent side="bottom">
{meetingSummarizing ? 'Generating meeting notes...' : meetingState === 'recording' ? 'Stop meeting notes' : 'Take new meeting notes'}
{meetingSummarizing ? 'Generating meeting notes...' : meetingState === 'connecting' ? 'Starting transcription...' : meetingState === 'recording' ? 'Stop meeting notes' : 'Take new meeting notes'}
</TooltipContent>
</Tooltip>
)}
@ -3417,9 +3417,9 @@ function App() {
const [meetingSummarizing, setMeetingSummarizing] = useState(false)
const [showMeetingPermissions, setShowMeetingPermissions] = useState(false)
const startMeetingAfterPermissions = useCallback(async () => {
setShowMeetingPermissions(false)
localStorage.setItem('meeting-permissions-acknowledged', '1')
const [checkingPermission, setCheckingPermission] = useState(false)
const startMeetingNow = useCallback(async () => {
const calEvent = pendingCalendarEventRef.current
pendingCalendarEventRef.current = undefined
const notePath = await meetingTranscription.start(calEvent)
@ -3429,6 +3429,23 @@ function App() {
}
}, [meetingTranscription, handleVoiceNoteCreated])
const handleCheckPermissionAndRetry = useCallback(async () => {
setCheckingPermission(true)
try {
const { granted } = await window.ipc.invoke('meeting:checkScreenPermission', null)
if (granted) {
setShowMeetingPermissions(false)
await startMeetingNow()
}
} finally {
setCheckingPermission(false)
}
}, [startMeetingNow])
const handleOpenScreenRecordingSettings = useCallback(async () => {
await window.ipc.invoke('meeting:openScreenRecordingSettings', null)
}, [])
const handleToggleMeeting = useCallback(async () => {
if (meetingTranscription.state === 'recording') {
await meetingTranscription.stop()
@ -3450,16 +3467,15 @@ function App() {
const calendarEventJson = calEventMatch?.[1]?.replace(/''/g, "'")
const { notes } = await window.ipc.invoke('meeting:summarize', { transcript: fileContent, meetingStartTime, calendarEventJson })
if (notes) {
// Prepend meeting notes below the title but above the transcript
const { raw: fm, body: transcriptBody } = splitFrontmatter(fileContent)
// Use frontmatter title as the heading (set from calendar event summary)
// Prepend meeting notes above the existing transcript block
const { raw: fm, body } = splitFrontmatter(fileContent)
const fmTitleMatch = fileContent.match(/^title:\s*(.+)$/m)
const noteTitle = fmTitleMatch?.[1]?.trim() || 'Meeting note'
// Strip any existing top-level heading from body
const bodyWithoutTitle = transcriptBody.replace(/^#\s+.+\s*\n*/, '')
// Also strip any title/heading the LLM may have generated
const noteTitle = fmTitleMatch?.[1]?.trim() || 'Meeting Notes'
const cleanedNotes = notes.replace(/^#{1,2}\s+.+\n+/, '')
const newBody = `# ${noteTitle}\n\n` + cleanedNotes + '\n\n---\n\n## Raw transcript\n\n' + bodyWithoutTitle
// Extract the existing transcript block and preserve it as-is
const transcriptBlockMatch = body.match(/(```transcript\n[\s\S]*?\n```)/)
const transcriptBlock = transcriptBlockMatch?.[1] || ''
const newBody = `# ${noteTitle}\n\n` + cleanedNotes + (transcriptBlock ? '\n\n' + transcriptBlock : '')
const newContent = fm ? `${fm}\n${newBody}` : newBody
await window.ipc.invoke('workspace:writeFile', {
path: notePath,
@ -3477,20 +3493,18 @@ function App() {
meetingNotePathRef.current = null
}
} else if (meetingTranscription.state === 'idle') {
// Show permissions modal on first use (macOS only — Windows works out of the box)
if (isMac && !localStorage.getItem('meeting-permissions-acknowledged')) {
setShowMeetingPermissions(true)
return
}
const calEvent = pendingCalendarEventRef.current
pendingCalendarEventRef.current = undefined
const notePath = await meetingTranscription.start(calEvent)
if (notePath) {
meetingNotePathRef.current = notePath
await handleVoiceNoteCreated(notePath)
// On macOS, check screen recording permission before starting
if (isMac) {
const result = await window.ipc.invoke('meeting:checkScreenPermission', null)
console.log('[meeting] Permission check result:', result)
if (!result.granted) {
setShowMeetingPermissions(true)
return
}
}
await startMeetingNow()
}
}, [meetingTranscription, handleVoiceNoteCreated])
}, [meetingTranscription, handleVoiceNoteCreated, startMeetingNow])
handleToggleMeetingRef.current = handleToggleMeeting
// Listen for calendar block "join meeting & take notes" events
@ -4421,23 +4435,25 @@ function App() {
<Dialog open={showMeetingPermissions} onOpenChange={setShowMeetingPermissions}>
<DialogContent showCloseButton={false}>
<DialogHeader>
<DialogTitle>Meeting transcription setup</DialogTitle>
<DialogTitle>Screen recording permission required</DialogTitle>
<DialogDescription>
Rowboat needs <strong>Screen Recording</strong> permission to capture meeting audio from other apps (Zoom, Meet, etc.).
Rowboat needs <strong>Screen Recording</strong> permission to capture meeting audio from other apps (Zoom, Meet, etc.). This feature won't work without it.
</DialogDescription>
</DialogHeader>
<div className="space-y-3 text-sm text-muted-foreground">
<p>To enable this:</p>
<ol className="list-decimal list-inside space-y-1.5">
<li>Open <strong>System Settings</strong> <strong>Privacy & Security</strong></li>
<li>Click <strong>Screen Recording</strong></li>
<li>Open <strong>System Settings</strong> <strong>Privacy & Security</strong> <strong>Screen Recording</strong></li>
<li>Toggle on <strong>Rowboat</strong></li>
<li>You may need to restart the app after granting permission</li>
</ol>
</div>
<DialogFooter>
<Button variant="outline" onClick={() => setShowMeetingPermissions(false)}>Cancel</Button>
<Button onClick={() => { void startMeetingAfterPermissions() }}>Continue</Button>
<Button variant="outline" onClick={() => { void handleOpenScreenRecordingSettings() }}>Open System Settings</Button>
<Button onClick={() => { void handleCheckPermissionAndRetry() }} disabled={checkingPermission}>
{checkingPermission ? 'Checking...' : 'Check Again'}
</Button>
</DialogFooter>
</DialogContent>
</Dialog>

View file

@ -15,6 +15,7 @@ import { ChartBlockExtension } from '@/extensions/chart-block'
import { TableBlockExtension } from '@/extensions/table-block'
import { CalendarBlockExtension } from '@/extensions/calendar-block'
import { EmailBlockExtension } from '@/extensions/email-block'
import { TranscriptBlockExtension } from '@/extensions/transcript-block'
import { Markdown } from 'tiptap-markdown'
import { useEffect, useCallback, useMemo, useRef, useState } from 'react'
import { Calendar, ChevronDown, ExternalLink } from 'lucide-react'
@ -155,6 +156,8 @@ function getMarkdownWithBlankLines(editor: Editor): string {
blocks.push('```calendar\n' + (node.attrs?.data as string || '{}') + '\n```')
} else if (node.type === 'emailBlock') {
blocks.push('```email\n' + (node.attrs?.data as string || '{}') + '\n```')
} else if (node.type === 'transcriptBlock') {
blocks.push('```transcript\n' + (node.attrs?.data as string || '{}') + '\n```')
} else if (node.type === 'codeBlock') {
const lang = (node.attrs?.language as string) || ''
blocks.push('```' + lang + '\n' + nodeToText(node) + '\n```')
@ -567,6 +570,7 @@ export function MarkdownEditor({
TableBlockExtension,
CalendarBlockExtension,
EmailBlockExtension,
TranscriptBlockExtension,
WikiLink.configure({
onCreate: wikiLinks?.onCreate
? (path) => {

View file

@ -0,0 +1,177 @@
import { mergeAttributes, Node } from '@tiptap/react'
import { ReactNodeViewRenderer, NodeViewWrapper } from '@tiptap/react'
import { ChevronDown, FileText } from 'lucide-react'
import { blocks } from '@x/shared'
import { useState, useMemo } from 'react'
interface TranscriptEntry {
speaker: string
text: string
}
function parseTranscript(raw: string): TranscriptEntry[] {
const entries: TranscriptEntry[] = []
const lines = raw.split('\n')
for (const line of lines) {
const trimmed = line.trim()
if (!trimmed) continue
// Match **Speaker Name:** text or **You:** text
const match = trimmed.match(/^\*\*(.+?):\*\*\s*(.*)$/)
if (match) {
entries.push({ speaker: match[1], text: match[2] })
} else if (entries.length > 0) {
// Continuation line — append to last entry
entries[entries.length - 1].text += ' ' + trimmed
}
}
return entries
}
function speakerColor(speaker: string): string {
// Simple hash to pick a consistent color per speaker
let hash = 0
for (let i = 0; i < speaker.length; i++) {
hash = speaker.charCodeAt(i) + ((hash << 5) - hash)
}
const colors = [
'#3b82f6', // blue
'#06b6d4', // cyan
'#6366f1', // indigo
'#8b5cf6', // purple
'#0ea5e9', // sky
'#2563eb', // blue darker
'#7c3aed', // violet
]
return colors[Math.abs(hash) % colors.length]
}
function TranscriptBlockView({ node, getPos, editor }: {
node: { attrs: Record<string, unknown> }
getPos: () => number | undefined
// eslint-disable-next-line @typescript-eslint/no-explicit-any
editor: any
}) {
const raw = node.attrs.data as string
let config: blocks.TranscriptBlock | null = null
try {
config = blocks.TranscriptBlockSchema.parse(JSON.parse(raw))
} catch {
// fallback below
}
// Auto-detect: expand if this is the first real block (live recording),
// collapse if there's other content above (notes have been generated)
const isFirstBlock = useMemo(() => {
try {
const pos = getPos()
if (pos === undefined) return false
const firstChild = editor?.state?.doc?.firstChild
if (!firstChild) return true
// If the transcript block is right after the first node (heading), it's the main content
return pos <= (firstChild.nodeSize ?? 0) + 1
} catch {
return false
}
}, [getPos, editor])
const [expanded, setExpanded] = useState(isFirstBlock)
const entries = useMemo(() => {
if (!config) return []
return parseTranscript(config.transcript)
}, [config])
if (!config) {
return (
<NodeViewWrapper className="transcript-block-wrapper" data-type="transcript-block">
<div className="transcript-block-card transcript-block-error">
<FileText size={16} />
<span>Invalid transcript block</span>
</div>
</NodeViewWrapper>
)
}
return (
<NodeViewWrapper className="transcript-block-wrapper" data-type="transcript-block">
<div className="transcript-block-card" onMouseDown={(e) => e.stopPropagation()}>
<button
className="transcript-block-toggle"
onClick={(e) => { e.stopPropagation(); setExpanded(!expanded) }}
onMouseDown={(e) => e.stopPropagation()}
>
<ChevronDown size={14} className={`transcript-block-chevron ${expanded ? 'transcript-block-chevron-open' : ''}`} />
<FileText size={14} />
<span>Raw transcript</span>
</button>
{expanded && (
<div className="transcript-block-content">
{entries.length > 0 ? (
entries.map((entry, i) => (
<div key={i} className="transcript-entry">
<span className="transcript-speaker" style={{ color: speakerColor(entry.speaker) }}>
{entry.speaker}
</span>
<span className="transcript-text">{entry.text}</span>
</div>
))
) : (
<div className="transcript-raw">{config.transcript}</div>
)}
</div>
)}
</div>
</NodeViewWrapper>
)
}
export const TranscriptBlockExtension = Node.create({
name: 'transcriptBlock',
group: 'block',
atom: true,
selectable: true,
draggable: false,
addAttributes() {
return {
data: { default: '{}' },
}
},
parseHTML() {
return [{
tag: 'pre',
priority: 60,
getAttrs(element) {
const code = element.querySelector('code')
if (!code) return false
const cls = code.className || ''
if (cls.includes('language-transcript')) {
return { data: code.textContent || '{}' }
}
return false
},
}]
},
renderHTML({ HTMLAttributes }: { HTMLAttributes: Record<string, unknown> }) {
return ['div', mergeAttributes(HTMLAttributes, { 'data-type': 'transcript-block' })]
},
addNodeView() {
return ReactNodeViewRenderer(TranscriptBlockView)
},
addStorage() {
return {
markdown: {
serialize(state: { write: (text: string) => void; closeBlock: (node: unknown) => void }, node: { attrs: { data: string } }) {
state.write('```transcript\n' + node.attrs.data + '\n```')
state.closeBlock(node)
},
parse: {},
},
}
},
})

View file

@ -60,7 +60,7 @@ export interface CalendarEventMeta {
}
function formatTranscript(entries: TranscriptEntry[], date: string, calendarEvent?: CalendarEventMeta): string {
const noteTitle = calendarEvent?.summary || 'Meeting note';
const noteTitle = calendarEvent?.summary || 'Meeting Notes';
const lines = [
'---',
'type: meeting',
@ -89,13 +89,18 @@ function formatTranscript(entries: TranscriptEntry[], date: string, calendarEven
`# ${noteTitle}`,
'',
);
// Build the raw transcript text
const transcriptLines: string[] = [];
for (let i = 0; i < entries.length; i++) {
if (i > 0 && entries[i].speaker !== entries[i - 1].speaker) {
lines.push('');
transcriptLines.push('');
}
lines.push(`**${entries[i].speaker}:** ${entries[i].text}`);
lines.push('');
transcriptLines.push(`**${entries[i].speaker}:** ${entries[i].text}`);
transcriptLines.push('');
}
const transcriptText = transcriptLines.join('\n').trim();
const transcriptData = JSON.stringify({ transcript: transcriptText });
lines.push('```transcript', transcriptData, '```');
return lines.join('\n');
}
@ -187,52 +192,83 @@ export function useMeetingTranscription(onAutoStop?: () => void) {
if (state !== 'idle') return null;
setState('connecting');
// Detect headphones vs speakers
const usingHeadphones = await detectHeadphones();
console.log(`[meeting] Audio output mode: ${usingHeadphones ? 'headphones' : 'speakers'}`);
// Rowboat WebSocket + bearer token when signed in; else local Deepgram API key
let ws: WebSocket;
try {
const account = await refreshRowboatAccount();
if (
account?.signedIn &&
account.accessToken &&
account.config?.websocketApiUrl
) {
const listenUrl = buildDeepgramListenUrl(account.config.websocketApiUrl, DEEPGRAM_PARAMS);
console.log('[meeting] Using Rowboat WebSocket');
ws = new WebSocket(listenUrl, ['bearer', account.accessToken]);
} else {
const config = await window.ipc.invoke('voice:getConfig', null);
if (!config?.deepgram) {
console.error('[meeting] No Deepgram config available');
setState('idle');
return null;
// Run independent setup steps in parallel for faster startup
const [headphoneResult, wsResult, micResult, systemResult] = await Promise.allSettled([
// 1. Detect headphones vs speakers
detectHeadphones(),
// 2. Set up Deepgram WebSocket (account refresh + connect + wait for open)
(async () => {
const account = await refreshRowboatAccount();
let ws: WebSocket;
if (
account?.signedIn &&
account.accessToken &&
account.config?.websocketApiUrl
) {
const listenUrl = buildDeepgramListenUrl(account.config.websocketApiUrl, DEEPGRAM_PARAMS);
console.log('[meeting] Using Rowboat WebSocket');
ws = new WebSocket(listenUrl, ['bearer', account.accessToken]);
} else {
const config = await window.ipc.invoke('voice:getConfig', null);
if (!config?.deepgram) {
throw new Error('No Deepgram config available');
}
console.log('[meeting] Using Deepgram API key');
ws = new WebSocket(DEEPGRAM_LISTEN_URL, ['token', config.deepgram.apiKey]);
}
console.log('[meeting] Using Deepgram API key');
ws = new WebSocket(DEEPGRAM_LISTEN_URL, ['token', config.deepgram.apiKey]);
}
} catch (err) {
console.error('[meeting] Failed to connect Deepgram:', err);
setState('idle');
return null;
}
wsRef.current = ws;
const ok = await new Promise<boolean>((resolve) => {
ws.onopen = () => resolve(true);
ws.onerror = () => resolve(false);
setTimeout(() => resolve(false), 5000);
});
if (!ok) throw new Error('WebSocket failed to connect');
console.log('[meeting] WebSocket connected');
return ws;
})(),
// 3. Get mic stream
navigator.mediaDevices.getUserMedia({
audio: {
echoCancellation: true,
noiseSuppression: true,
autoGainControl: true,
},
}),
// 4. Get system audio via getDisplayMedia (loopback)
(async () => {
const stream = await navigator.mediaDevices.getDisplayMedia({ audio: true, video: true });
stream.getVideoTracks().forEach(t => t.stop());
if (stream.getAudioTracks().length === 0) {
stream.getTracks().forEach(t => t.stop());
throw new Error('No audio track from getDisplayMedia');
}
console.log('[meeting] System audio captured');
return stream;
})(),
]);
// Wait for WS open
const wsOk = await new Promise<boolean>((resolve) => {
ws.onopen = () => resolve(true);
ws.onerror = () => resolve(false);
setTimeout(() => resolve(false), 5000);
});
if (!wsOk) {
console.error('[meeting] WebSocket failed to connect');
// Check for failures — clean up any successful resources if something failed
const failed = wsResult.status === 'rejected'
|| micResult.status === 'rejected'
|| systemResult.status === 'rejected';
if (failed) {
if (wsResult.status === 'rejected') console.error('[meeting] WebSocket setup failed:', wsResult.reason);
if (micResult.status === 'rejected') console.error('[meeting] Microphone access denied:', micResult.reason);
if (systemResult.status === 'rejected') console.error('[meeting] System audio access denied:', systemResult.reason);
// Clean up any resources that did succeed
if (wsResult.status === 'fulfilled') { wsResult.value.close(); }
if (micResult.status === 'fulfilled') { micResult.value.getTracks().forEach(t => t.stop()); }
if (systemResult.status === 'fulfilled') { systemResult.value.getTracks().forEach(t => t.stop()); }
cleanup();
setState('idle');
return null;
}
console.log('[meeting] WebSocket connected');
const usingHeadphones = headphoneResult.status === 'fulfilled' ? headphoneResult.value : false;
console.log(`[meeting] Audio output mode: ${usingHeadphones ? 'headphones' : 'speakers'}`);
const ws = wsResult.value;
wsRef.current = ws;
// Set up WS message handler
transcriptRef.current = [];
@ -283,43 +319,10 @@ export function useMeetingTranscription(onAutoStop?: () => void) {
wsRef.current = null;
};
// Get mic stream
let micStream: MediaStream;
try {
micStream = await navigator.mediaDevices.getUserMedia({
audio: {
echoCancellation: true,
noiseSuppression: true,
autoGainControl: true,
},
});
} catch (err) {
console.error('[meeting] Microphone access denied:', err);
cleanup();
setState('idle');
return null;
}
const micStream = micResult.value;
micStreamRef.current = micStream;
// Get system audio via getDisplayMedia (loopback)
let systemStream: MediaStream;
try {
systemStream = await navigator.mediaDevices.getDisplayMedia({ audio: true, video: true });
systemStream.getVideoTracks().forEach(t => t.stop());
} catch (err) {
console.error('[meeting] System audio access denied:', err);
cleanup();
setState('idle');
return null;
}
if (systemStream.getAudioTracks().length === 0) {
console.error('[meeting] No audio track from getDisplayMedia');
systemStream.getTracks().forEach(t => t.stop());
cleanup();
setState('idle');
return null;
}
console.log('[meeting] System audio captured');
const systemStream = systemResult.value;
systemStreamRef.current = systemStream;
// ----- Audio pipeline -----

View file

@ -618,7 +618,8 @@
.tiptap-editor .ProseMirror .chart-block-wrapper,
.tiptap-editor .ProseMirror .table-block-wrapper,
.tiptap-editor .ProseMirror .calendar-block-wrapper,
.tiptap-editor .ProseMirror .email-block-wrapper {
.tiptap-editor .ProseMirror .email-block-wrapper,
.tiptap-editor .ProseMirror .transcript-block-wrapper {
margin: 8px 0;
}
@ -628,7 +629,8 @@
.tiptap-editor .ProseMirror .table-block-card,
.tiptap-editor .ProseMirror .calendar-block-card,
.tiptap-editor .ProseMirror .email-block-card,
.tiptap-editor .ProseMirror .email-draft-block-card {
.tiptap-editor .ProseMirror .email-draft-block-card,
.tiptap-editor .ProseMirror .transcript-block-card {
position: relative;
padding: 12px 14px;
border: 1px solid var(--border);
@ -644,7 +646,8 @@
.tiptap-editor .ProseMirror .table-block-card:hover,
.tiptap-editor .ProseMirror .calendar-block-card:hover,
.tiptap-editor .ProseMirror .email-block-card:hover,
.tiptap-editor .ProseMirror .email-draft-block-card:hover {
.tiptap-editor .ProseMirror .email-draft-block-card:hover,
.tiptap-editor .ProseMirror .transcript-block-card:hover {
background-color: color-mix(in srgb, var(--muted) 70%, transparent);
}
@ -1488,6 +1491,74 @@
margin-left: 4px;
}
/* Transcript block */
.tiptap-editor .ProseMirror .transcript-block-toggle {
display: flex;
align-items: center;
gap: 6px;
width: 100%;
padding: 0;
font-size: 13px;
font-weight: 500;
color: color-mix(in srgb, var(--foreground) 60%, transparent);
background: none;
border: none;
cursor: pointer;
transition: color 0.12s ease;
}
.tiptap-editor .ProseMirror .transcript-block-toggle:hover {
color: var(--foreground);
}
.tiptap-editor .ProseMirror .transcript-block-chevron {
transition: transform 0.15s ease;
flex-shrink: 0;
}
.tiptap-editor .ProseMirror .transcript-block-chevron-open {
transform: rotate(180deg);
}
.tiptap-editor .ProseMirror .transcript-block-content {
margin-top: 10px;
padding-top: 10px;
border-top: 1px solid color-mix(in srgb, var(--foreground) 10%, transparent);
display: flex;
flex-direction: column;
gap: 6px;
}
.tiptap-editor .ProseMirror .transcript-entry {
font-size: 13px;
line-height: 1.5;
}
.tiptap-editor .ProseMirror .transcript-speaker {
font-weight: 600;
margin-right: 6px;
}
.tiptap-editor .ProseMirror .transcript-text {
color: color-mix(in srgb, var(--foreground) 75%, transparent);
}
.tiptap-editor .ProseMirror .transcript-raw {
font-size: 13px;
line-height: 1.6;
color: color-mix(in srgb, var(--foreground) 70%, transparent);
white-space: pre-wrap;
word-break: break-word;
}
.tiptap-editor .ProseMirror .transcript-block-error {
display: flex;
align-items: center;
gap: 6px;
color: color-mix(in srgb, var(--foreground) 55%, transparent);
font-size: 13px;
}
/* Meeting event banner */
.meeting-event-banner {
position: relative;