create a new note with transcript immediately

This commit is contained in:
Arjun 2026-02-03 13:47:59 +05:30
parent 5fc05f279b
commit 81cbd1a891
2 changed files with 126 additions and 18 deletions

View file

@ -1441,6 +1441,29 @@ function App() {
}, },
}), [tree, selectedPath, workspaceRoot, collectDirPaths]) }), [tree, selectedPath, workspaceRoot, collectDirPaths])
// Handler for when a voice note is created/updated
const handleVoiceNoteCreated = useCallback(async (notePath: string) => {
// Refresh the tree to show the new file/folder
const newTree = await loadDirectory()
setTree(newTree)
// Expand parent directories to show the file
const parts = notePath.split('/')
const parentPaths: string[] = []
for (let i = 1; i < parts.length; i++) {
parentPaths.push(parts.slice(0, i).join('/'))
}
setExpandedPaths(prev => {
const newSet = new Set(prev)
parentPaths.forEach(p => newSet.add(p))
return newSet
})
// Select the file to show it in the editor
setIsGraphOpen(false)
setSelectedPath(notePath)
}, [loadDirectory])
const ensureWikiFile = useCallback(async (wikiPath: string) => { const ensureWikiFile = useCallback(async (wikiPath: string) => {
const resolvedPath = toKnowledgePath(wikiPath) const resolvedPath = toKnowledgePath(wikiPath)
if (!resolvedPath) return null if (!resolvedPath) return null
@ -1687,6 +1710,7 @@ function App() {
expandedPaths={expandedPaths} expandedPaths={expandedPaths}
onSelectFile={toggleExpand} onSelectFile={toggleExpand}
knowledgeActions={knowledgeActions} knowledgeActions={knowledgeActions}
onVoiceNoteCreated={handleVoiceNoteCreated}
runs={runs} runs={runs}
currentRunId={runId} currentRunId={runId}
tasksActions={{ tasksActions={{

View file

@ -90,6 +90,7 @@ type SidebarContentPanelProps = {
expandedPaths: Set<string> expandedPaths: Set<string>
onSelectFile: (path: string, kind: "file" | "dir") => void onSelectFile: (path: string, kind: "file" | "dir") => void
knowledgeActions: KnowledgeActions knowledgeActions: KnowledgeActions
onVoiceNoteCreated?: (path: string) => void
runs?: RunListItem[] runs?: RunListItem[]
currentRunId?: string | null currentRunId?: string | null
tasksActions?: TasksActions tasksActions?: TasksActions
@ -106,6 +107,7 @@ export function SidebarContentPanel({
expandedPaths, expandedPaths,
onSelectFile, onSelectFile,
knowledgeActions, knowledgeActions,
onVoiceNoteCreated,
runs = [], runs = [],
currentRunId, currentRunId,
tasksActions, tasksActions,
@ -128,6 +130,7 @@ export function SidebarContentPanel({
expandedPaths={expandedPaths} expandedPaths={expandedPaths}
onSelectFile={onSelectFile} onSelectFile={onSelectFile}
actions={knowledgeActions} actions={knowledgeActions}
onVoiceNoteCreated={onVoiceNoteCreated}
/> />
)} )}
{activeSection === "tasks" && ( {activeSection === "tasks" && (
@ -174,13 +177,50 @@ async function transcribeWithDeepgram(audioBlob: Blob): Promise<string | null> {
} }
// Voice Note Recording Button // Voice Note Recording Button
function VoiceNoteButton() { function VoiceNoteButton({ onNoteCreated }: { onNoteCreated?: (path: string) => void }) {
const [isRecording, setIsRecording] = React.useState(false) const [isRecording, setIsRecording] = React.useState(false)
const mediaRecorderRef = React.useRef<MediaRecorder | null>(null) const mediaRecorderRef = React.useRef<MediaRecorder | null>(null)
const chunksRef = React.useRef<Blob[]>([]) const chunksRef = React.useRef<Blob[]>([])
const notePathRef = React.useRef<string | null>(null)
const timestampRef = React.useRef<string | null>(null)
const startRecording = async () => { const startRecording = async () => {
try { try {
// Generate timestamp and paths immediately
const now = new Date()
const timestamp = now.toISOString().replace(/[:.]/g, '-')
const dateStr = now.toISOString().split('T')[0] // YYYY-MM-DD
const noteName = `voice-memo-${timestamp}`
const notePath = `knowledge/Voice Memos/${dateStr}/${noteName}.md`
timestampRef.current = timestamp
notePathRef.current = notePath
// Create the note immediately with a "Recording..." placeholder
await window.ipc.invoke('workspace:mkdir', {
path: `knowledge/Voice Memos/${dateStr}`,
recursive: true,
})
const initialContent = `# Voice Memo
**Type:** voice memo
**Recorded:** ${now.toLocaleString()}
## Transcript
*Recording in progress...*
`
await window.ipc.invoke('workspace:writeFile', {
path: notePath,
data: initialContent,
opts: { encoding: 'utf8' },
})
// Select the note so the user can see it
onNoteCreated?.(notePath)
// Start actual recording
const stream = await navigator.mediaDevices.getUserMedia({ audio: true }) const stream = await navigator.mediaDevices.getUserMedia({ audio: true })
const mimeType = MediaRecorder.isTypeSupported('audio/mp4') const mimeType = MediaRecorder.isTypeSupported('audio/mp4')
? 'audio/mp4' ? 'audio/mp4'
@ -196,9 +236,9 @@ function VoiceNoteButton() {
stream.getTracks().forEach((t) => t.stop()) stream.getTracks().forEach((t) => t.stop())
const blob = new Blob(chunksRef.current, { type: mimeType }) const blob = new Blob(chunksRef.current, { type: mimeType })
const ext = mimeType === 'audio/mp4' ? 'm4a' : 'webm' const ext = mimeType === 'audio/mp4' ? 'm4a' : 'webm'
const timestamp = new Date().toISOString().replace(/[:.]/g, '-') const audioFilename = `voice-memo-${timestampRef.current}.${ext}`
const filename = `voice-memo-${timestamp}.${ext}`
// Save audio file to voice_memos folder (for backup/reference)
try { try {
await window.ipc.invoke('workspace:mkdir', { await window.ipc.invoke('workspace:mkdir', {
path: 'voice_memos', path: 'voice_memos',
@ -214,34 +254,76 @@ function VoiceNoteButton() {
) )
await window.ipc.invoke('workspace:writeFile', { await window.ipc.invoke('workspace:writeFile', {
path: `voice_memos/${filename}`, path: `voice_memos/${audioFilename}`,
data: base64, data: base64,
opts: { encoding: 'base64' }, opts: { encoding: 'base64' },
}) })
toast('Voice memo saved', 'success')
} catch { } catch {
toast('Failed to save voice memo', 'error') console.error('Failed to save audio file')
return
} }
// Transcribe and save transcript alongside the audio file // Update note to show transcribing status
const transcript = await transcribeWithDeepgram(blob) const currentNotePath = notePathRef.current
if (transcript) { if (currentNotePath) {
const txtFilename = filename.replace(/\.[^.]+$/, '.txt') const transcribingContent = `# Voice Memo
**Type:** voice memo
**Recorded:** ${new Date().toLocaleString()}
## Transcript
*Transcribing...*
`
await window.ipc.invoke('workspace:writeFile', { await window.ipc.invoke('workspace:writeFile', {
path: `voice_memos/${txtFilename}`, path: currentNotePath,
data: transcript, data: transcribingContent,
opts: { encoding: 'utf8' }, opts: { encoding: 'utf8' },
}) })
toast('Transcription saved', 'success') }
} else {
toast('Transcription failed', 'error') // Transcribe and update the note with the transcript
const transcript = await transcribeWithDeepgram(blob)
if (currentNotePath) {
const finalContent = transcript
? `# Voice Memo
**Type:** voice memo
**Recorded:** ${new Date().toLocaleString()}
## Transcript
${transcript}
`
: `# Voice Memo
**Type:** voice memo
**Recorded:** ${new Date().toLocaleString()}
## Transcript
*Transcription failed. Please try again.*
`
await window.ipc.invoke('workspace:writeFile', {
path: currentNotePath,
data: finalContent,
opts: { encoding: 'utf8' },
})
// Re-select to trigger refresh
onNoteCreated?.(currentNotePath)
if (transcript) {
toast('Voice note transcribed', 'success')
} else {
toast('Transcription failed', 'error')
}
} }
} }
recorder.start() recorder.start()
mediaRecorderRef.current = recorder mediaRecorderRef.current = recorder
setIsRecording(true) setIsRecording(true)
toast('Recording started', 'success')
} catch { } catch {
toast('Could not access microphone', 'error') toast('Could not access microphone', 'error')
} }
@ -283,15 +365,17 @@ function KnowledgeSection({
expandedPaths, expandedPaths,
onSelectFile, onSelectFile,
actions, actions,
onVoiceNoteCreated,
}: { }: {
tree: TreeNode[] tree: TreeNode[]
selectedPath: string | null selectedPath: string | null
expandedPaths: Set<string> expandedPaths: Set<string>
onSelectFile: (path: string, kind: "file" | "dir") => void onSelectFile: (path: string, kind: "file" | "dir") => void
actions: KnowledgeActions actions: KnowledgeActions
onVoiceNoteCreated?: (path: string) => void
}) { }) {
const isExpanded = expandedPaths.size > 0 const isExpanded = expandedPaths.size > 0
const quickActions = [ const quickActions = [
{ icon: FilePlus, label: "New Note", action: () => actions.createNote() }, { icon: FilePlus, label: "New Note", action: () => actions.createNote() },
{ icon: FolderPlus, label: "New Folder", action: () => actions.createFolder() }, { icon: FolderPlus, label: "New Folder", action: () => actions.createFolder() },
@ -316,7 +400,7 @@ function KnowledgeSection({
<TooltipContent side="bottom">{action.label}</TooltipContent> <TooltipContent side="bottom">{action.label}</TooltipContent>
</Tooltip> </Tooltip>
))} ))}
<VoiceNoteButton /> <VoiceNoteButton onNoteCreated={onVoiceNoteCreated} />
<Tooltip> <Tooltip>
<TooltipTrigger asChild> <TooltipTrigger asChild>
<button <button