fix voice memos and move metadata to properties

- Fix duplicate tab bug via onNoteCreatedRef (stale closure in recorder.onstop)
- Fix transcription not showing: read file from disk and update editor directly
- Move voice memo type/recorded/path from body to YAML frontmatter (Properties)
- Update note creation agent to detect voice memos via frontmatter

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Arjun 2026-03-16 23:02:24 +05:30
parent 2992196bb3
commit 98dfd0f159
3 changed files with 62 additions and 26 deletions

View file

@ -3263,9 +3263,38 @@ function App() {
return newSet
})
// Select the file to show it in the editor
// If tab already exists for this path (e.g. second call after transcription),
// force a content reload instead of creating a duplicate tab.
const existingTab = fileTabs.find(tab => tab.path === notePath)
if (existingTab) {
setActiveFileTabId(existingTab.id)
// Read fresh content from disk and update the editor
try {
const result = await window.ipc.invoke('workspace:readFile', { path: notePath, encoding: 'utf8' })
const { raw: fm, body } = splitFrontmatter(result.data)
frontmatterByPathRef.current.set(notePath, fm)
setFileContent(body)
setEditorContent(body)
editorContentRef.current = body
editorPathRef.current = notePath
initialContentRef.current = body
initialContentByPathRef.current.set(notePath, body)
setEditorContentByPath(prev => ({ ...prev, [notePath]: body }))
editorContentByPathRef.current.set(notePath, body)
// Bump editor session to force TipTap to pick up the new content
setEditorSessionByTabId(prev => ({
...prev,
[existingTab.id]: (prev[existingTab.id] ?? 0) + 1,
}))
} catch {
// File read failed — ignore
}
return
}
// First call — open the file in a tab
navigateToFile(notePath)
}, [loadDirectory, navigateToFile])
}, [loadDirectory, navigateToFile, fileTabs])
const ensureWikiFile = useCallback(async (wikiPath: string) => {
const resolvedPath = toKnowledgePath(wikiPath)

View file

@ -608,6 +608,9 @@ function VoiceNoteButton({ onNoteCreated }: { onNoteCreated?: (path: string) =>
const notePathRef = React.useRef<string | null>(null)
const timestampRef = React.useRef<string | null>(null)
const relativePathRef = React.useRef<string | null>(null)
// Keep a ref to always call the latest onNoteCreated (avoids stale closure in recorder.onstop)
const onNoteCreatedRef = React.useRef(onNoteCreated)
React.useEffect(() => { onNoteCreatedRef.current = onNoteCreated }, [onNoteCreated])
React.useEffect(() => {
window.ipc.invoke('workspace:readFile', {
@ -642,11 +645,12 @@ function VoiceNoteButton({ onNoteCreated }: { onNoteCreated?: (path: string) =>
recursive: true,
})
const initialContent = `# Voice Memo
**Type:** voice memo
**Recorded:** ${now.toLocaleString()}
**Path:** ${relativePath}
const initialContent = `---
type: voice memo
recorded: "${now.toISOString()}"
path: ${relativePath}
---
# Voice Memo
## Transcript
@ -659,7 +663,7 @@ function VoiceNoteButton({ onNoteCreated }: { onNoteCreated?: (path: string) =>
})
// Select the note so the user can see it
onNoteCreated?.(notePath)
onNoteCreatedRef.current?.(notePath)
// Start actual recording
const stream = await navigator.mediaDevices.getUserMedia({ audio: true })
@ -707,11 +711,12 @@ function VoiceNoteButton({ onNoteCreated }: { onNoteCreated?: (path: string) =>
const currentNotePath = notePathRef.current
const currentRelativePath = relativePathRef.current
if (currentNotePath && currentRelativePath) {
const transcribingContent = `# Voice Memo
**Type:** voice memo
**Recorded:** ${new Date().toLocaleString()}
**Path:** ${currentRelativePath}
const transcribingContent = `---
type: voice memo
recorded: "${new Date().toISOString()}"
path: ${currentRelativePath}
---
# Voice Memo
## Transcript
@ -728,21 +733,23 @@ function VoiceNoteButton({ onNoteCreated }: { onNoteCreated?: (path: string) =>
const transcript = await transcribeWithDeepgram(blob)
if (currentNotePath && currentRelativePath) {
const finalContent = transcript
? `# Voice Memo
**Type:** voice memo
**Recorded:** ${new Date().toLocaleString()}
**Path:** ${currentRelativePath}
? `---
type: voice memo
recorded: "${new Date().toISOString()}"
path: ${currentRelativePath}
---
# Voice Memo
## Transcript
${transcript}
`
: `# Voice Memo
**Type:** voice memo
**Recorded:** ${new Date().toLocaleString()}
**Path:** ${currentRelativePath}
: `---
type: voice memo
recorded: "${new Date().toISOString()}"
path: ${currentRelativePath}
---
# Voice Memo
## Transcript
@ -755,7 +762,7 @@ ${transcript}
})
// Re-select to trigger refresh
onNoteCreated?.(currentNotePath)
onNoteCreatedRef.current?.(currentNotePath)
if (transcript) {
toast('Voice note transcribed', 'success')

View file

@ -165,8 +165,8 @@ workspace-readFile({ path: "{source_file}" })
- Email signature
**Voice memo indicators:**
- Has \`**Type:** voice memo\` field
- Has \`**Path:**\` field with path like \`Voice Memos/YYYY-MM-DD/...\`
- Has YAML frontmatter with \`type: voice memo\`
- Has frontmatter \`path:\` field like \`Voice Memos/YYYY-MM-DD/...\`
- Has \`## Transcript\` section
**Set processing mode:**