mirror of
https://github.com/samvallad33/vestige.git
synced 2026-05-11 00:32:36 +02:00
feat(v2.0.5): Intentional Amnesia — active forgetting via top-down inhibitory control
First AI memory system to model forgetting as a neuroscience-grounded PROCESS rather than passive decay. Adds the `suppress` MCP tool (#24), Rac1 cascade worker, migration V10, and dashboard forgetting indicators. Based on: - Anderson, Hanslmayr & Quaegebeur (2025), Nat Rev Neurosci — right lateral PFC as the domain-general inhibitory controller; SIF compounds with each stopping attempt. - Cervantes-Sandoval et al. (2020), Front Cell Neurosci PMC7477079 — Rac1 GTPase as the active synaptic destabilization mechanism. What's new: * `suppress` MCP tool — each call compounds `suppression_count` and subtracts a `0.15 × count` penalty (saturating at 80%) from retrieval scores during hybrid search. Distinct from delete (removes) and demote (one-shot). * Rac1 cascade worker — background sweep piggybacks the 6h consolidation loop, walks `memory_connections` edges from recently-suppressed seeds, applies attenuated FSRS decay to co-activated neighbors. You don't just forget Jake — you fade the café, the roommate, the birthday. * 24h labile window — reversible via `suppress({id, reverse: true})` within 24 hours. Matches Nader reconsolidation semantics. * Migration V10 — additive-only (`suppression_count`, `suppressed_at` + partial indices). All v2.0.x DBs upgrade seamlessly on first launch. * Dashboard: `ForgettingIndicator.svelte` pulses when suppressions are active. 3D graph nodes dim to 20% opacity when suppressed. New WebSocket events: `MemorySuppressed`, `MemoryUnsuppressed`, `Rac1CascadeSwept`. Heartbeat carries `suppressed_count`. * Search pipeline: SIF penalty inserted into the accessibility stage so it stacks on top of passive FSRS decay. * Tool count bumped 23 → 24. Cognitive modules 29 → 30. Memories persist — they are INHIBITED, not erased. `memory.get(id)` returns full content through any number of suppressions. The 24h labile window is a grace period for regret. Also fixes issue #31 (dashboard graph view buggy) as a companion UI bug discovered during the v2.0.5 audit cycle: * Root cause: node glow `SpriteMaterial` had no `map`, so `THREE.Sprite` rendered as a solid-coloured 1×1 plane. Additive blending + `UnrealBloomPass(0.8, 0.4, 0.85)` amplified the square edges into hard-edged glowing cubes. * Fix: shared 128×128 radial-gradient `CanvasTexture` singleton used as the sprite map. Retuned bloom to `(0.55, 0.6, 0.2)`. Halved fog density (0.008 → 0.0035). Edges bumped from dark navy `0x4a4a7a` to brand violet `0x8b5cf6` with higher opacity. Added explicit `scene.background` and a 2000-point starfield for depth. * 21 regression tests added in `ui-fixes.test.ts` locking every invariant in (shared texture singleton, depthWrite:false, scale ×6, bloom magic numbers via source regex, starfield presence). Tests: 1,284 Rust (+47) + 171 Vitest (+21) = 1,455 total, 0 failed Clippy: clean across all targets, zero warnings Release binary: 22.6MB, `cargo build --release -p vestige-mcp` green Versions: workspace aligned at 2.0.5 across all 6 crates/packages Closes #31
This commit is contained in:
parent
95bde93b49
commit
8178beb961
359 changed files with 8277 additions and 3416 deletions
|
|
@ -18,7 +18,7 @@ use std::time::Instant;
|
|||
use chrono::{DateTime, Utc};
|
||||
|
||||
use crate::memory::KnowledgeNode;
|
||||
use crate::neuroscience::emotional_memory::{EmotionalMemory, EmotionCategory};
|
||||
use crate::neuroscience::emotional_memory::{EmotionCategory, EmotionalMemory};
|
||||
use crate::neuroscience::importance_signals::ImportanceSignals;
|
||||
use crate::neuroscience::synaptic_tagging::SynapticTaggingSystem;
|
||||
|
||||
|
|
@ -197,13 +197,11 @@ impl DreamEngine {
|
|||
phases.push(phase2);
|
||||
|
||||
// ==================== PHASE 3: REM (Creative) ====================
|
||||
let (connections, emotional_processed, phase3) =
|
||||
self.phase_rem(&triaged, emotional_memory);
|
||||
let (connections, emotional_processed, phase3) = self.phase_rem(&triaged, emotional_memory);
|
||||
phases.push(phase3);
|
||||
|
||||
// ==================== PHASE 4: Integration ====================
|
||||
let (insights, phase4) =
|
||||
self.phase_integration(&connections, &triaged);
|
||||
let (insights, phase4) = self.phase_integration(&connections, &triaged);
|
||||
phases.push(phase4);
|
||||
|
||||
FourPhaseDreamResult {
|
||||
|
|
@ -262,26 +260,31 @@ impl DreamEngine {
|
|||
}
|
||||
|
||||
// Sort by importance (highest first)
|
||||
triaged.sort_by(|a, b| b.importance.partial_cmp(&a.importance).unwrap_or(std::cmp::Ordering::Equal));
|
||||
triaged.sort_by(|a, b| {
|
||||
b.importance
|
||||
.partial_cmp(&a.importance)
|
||||
.unwrap_or(std::cmp::Ordering::Equal)
|
||||
});
|
||||
|
||||
// Build replay queue: 70% high-value, 30% random noise floor
|
||||
let high_value_count = (triaged.len() as f64 * self.high_value_ratio).ceil() as usize;
|
||||
let random_count = triaged.len().saturating_sub(high_value_count);
|
||||
|
||||
let mut replay_queue: Vec<String> = triaged.iter()
|
||||
let mut replay_queue: Vec<String> = triaged
|
||||
.iter()
|
||||
.take(high_value_count)
|
||||
.map(|m| m.id.clone())
|
||||
.collect();
|
||||
|
||||
// Add random noise floor from the remaining memories
|
||||
if random_count > 0 {
|
||||
let remaining: Vec<&TriagedMemory> = triaged.iter()
|
||||
.skip(high_value_count)
|
||||
.collect();
|
||||
let remaining: Vec<&TriagedMemory> = triaged.iter().skip(high_value_count).collect();
|
||||
// Simple deterministic shuffle using content hash
|
||||
let mut noise: Vec<&TriagedMemory> = remaining;
|
||||
noise.sort_by_key(|m| {
|
||||
let hash: u64 = m.id.bytes().fold(0u64, |acc, b| acc.wrapping_mul(31).wrapping_add(b as u64));
|
||||
let hash: u64 =
|
||||
m.id.bytes()
|
||||
.fold(0u64, |acc, b| acc.wrapping_mul(31).wrapping_add(b as u64));
|
||||
hash
|
||||
});
|
||||
for m in noise.iter().take(random_count) {
|
||||
|
|
@ -307,7 +310,9 @@ impl DreamEngine {
|
|||
actions.push(format!(
|
||||
"Replay queue: {} high-value + {} noise = {} total",
|
||||
high_value_count.min(triaged.len()),
|
||||
replay_queue.len().saturating_sub(high_value_count.min(triaged.len())),
|
||||
replay_queue
|
||||
.len()
|
||||
.saturating_sub(high_value_count.min(triaged.len())),
|
||||
replay_queue.len()
|
||||
));
|
||||
|
||||
|
|
@ -333,16 +338,25 @@ impl DreamEngine {
|
|||
emotion: &EmotionCategory,
|
||||
) -> TriageCategory {
|
||||
// High emotional content
|
||||
if matches!(emotion, EmotionCategory::Frustration | EmotionCategory::Urgency | EmotionCategory::Joy | EmotionCategory::Surprise)
|
||||
&& node.sentiment_magnitude > 0.4 {
|
||||
return TriageCategory::Emotional;
|
||||
}
|
||||
if matches!(
|
||||
emotion,
|
||||
EmotionCategory::Frustration
|
||||
| EmotionCategory::Urgency
|
||||
| EmotionCategory::Joy
|
||||
| EmotionCategory::Surprise
|
||||
) && node.sentiment_magnitude > 0.4
|
||||
{
|
||||
return TriageCategory::Emotional;
|
||||
}
|
||||
|
||||
// Future-relevant (intentions, TODOs)
|
||||
let content_lower = node.content.to_lowercase();
|
||||
if content_lower.contains("todo") || content_lower.contains("remind")
|
||||
|| content_lower.contains("intention") || content_lower.contains("next time")
|
||||
|| content_lower.contains("plan to") {
|
||||
if content_lower.contains("todo")
|
||||
|| content_lower.contains("remind")
|
||||
|| content_lower.contains("intention")
|
||||
|| content_lower.contains("next time")
|
||||
|| content_lower.contains("plan to")
|
||||
{
|
||||
return TriageCategory::FutureRelevant;
|
||||
}
|
||||
|
||||
|
|
@ -403,7 +417,8 @@ impl DreamEngine {
|
|||
|
||||
actions.push(format!(
|
||||
"Processed {} waves of {} memories",
|
||||
wave_count, replay_queue.len()
|
||||
wave_count,
|
||||
replay_queue.len()
|
||||
));
|
||||
actions.push(format!(
|
||||
"Strengthened {} memories via synaptic tagging",
|
||||
|
|
@ -459,7 +474,11 @@ impl DreamEngine {
|
|||
// Group memories by primary tag for cross-domain pairing
|
||||
let mut tag_groups: HashMap<String, Vec<&TriagedMemory>> = HashMap::new();
|
||||
for tm in triaged {
|
||||
let primary_tag = tm.tags.first().cloned().unwrap_or_else(|| "untagged".to_string());
|
||||
let primary_tag = tm
|
||||
.tags
|
||||
.first()
|
||||
.cloned()
|
||||
.unwrap_or_else(|| "untagged".to_string());
|
||||
tag_groups.entry(primary_tag).or_default().push(tm);
|
||||
}
|
||||
|
||||
|
|
@ -487,7 +506,11 @@ impl DreamEngine {
|
|||
if similarity > self.min_insight_confidence {
|
||||
let conn_type = self.classify_connection(mem_a, mem_b, similarity);
|
||||
let insight = self.generate_connection_insight(
|
||||
mem_a, mem_b, &tag_keys[i], &tag_keys[j], conn_type,
|
||||
mem_a,
|
||||
mem_b,
|
||||
&tag_keys[i],
|
||||
&tag_keys[j],
|
||||
conn_type,
|
||||
);
|
||||
|
||||
connections.push(CreativeConnection {
|
||||
|
|
@ -531,7 +554,10 @@ impl DreamEngine {
|
|||
// Pattern extraction: find repeated patterns across memories
|
||||
let pattern_count = self.extract_patterns(triaged, &mut connections);
|
||||
if pattern_count > 0 {
|
||||
actions.push(format!("Pattern extraction: {} shared patterns found", pattern_count));
|
||||
actions.push(format!(
|
||||
"Pattern extraction: {} shared patterns found",
|
||||
pattern_count
|
||||
));
|
||||
}
|
||||
|
||||
let phase = PhaseResult {
|
||||
|
|
@ -545,11 +571,13 @@ impl DreamEngine {
|
|||
}
|
||||
|
||||
fn content_similarity(&self, a: &str, b: &str) -> f64 {
|
||||
let words_a: HashSet<&str> = a.split_whitespace()
|
||||
let words_a: HashSet<&str> = a
|
||||
.split_whitespace()
|
||||
.map(|w| w.trim_matches(|c: char| !c.is_alphanumeric()))
|
||||
.filter(|w| w.len() > 3)
|
||||
.collect();
|
||||
let words_b: HashSet<&str> = b.split_whitespace()
|
||||
let words_b: HashSet<&str> = b
|
||||
.split_whitespace()
|
||||
.map(|w| w.trim_matches(|c: char| !c.is_alphanumeric()))
|
||||
.filter(|w| w.len() > 3)
|
||||
.collect();
|
||||
|
|
@ -598,8 +626,16 @@ impl DreamEngine {
|
|||
tag_b: &str,
|
||||
conn_type: CreativeConnectionType,
|
||||
) -> String {
|
||||
let a_summary = if a.content.len() > 60 { &a.content[..60] } else { &a.content };
|
||||
let b_summary = if b.content.len() > 60 { &b.content[..60] } else { &b.content };
|
||||
let a_summary = if a.content.len() > 60 {
|
||||
&a.content[..60]
|
||||
} else {
|
||||
&a.content
|
||||
};
|
||||
let b_summary = if b.content.len() > 60 {
|
||||
&b.content[..60]
|
||||
} else {
|
||||
&b.content
|
||||
};
|
||||
|
||||
match conn_type {
|
||||
CreativeConnectionType::CrossDomain => {
|
||||
|
|
@ -638,7 +674,9 @@ impl DreamEngine {
|
|||
let mut bigram_index: HashMap<(String, String), Vec<usize>> = HashMap::new();
|
||||
|
||||
for (idx, tm) in triaged.iter().enumerate() {
|
||||
let words: Vec<String> = tm.content.split_whitespace()
|
||||
let words: Vec<String> = tm
|
||||
.content
|
||||
.split_whitespace()
|
||||
.map(|w| w.to_lowercase())
|
||||
.filter(|w| w.len() > 3)
|
||||
.collect();
|
||||
|
|
@ -656,18 +694,21 @@ impl DreamEngine {
|
|||
pattern_count += 1;
|
||||
// Create a connection between the first and last memory sharing this pattern
|
||||
if let (Some(&first), Some(&last)) = (indices.first(), indices.last())
|
||||
&& first != last {
|
||||
connections.push(CreativeConnection {
|
||||
memory_a_id: triaged[first].id.clone(),
|
||||
memory_b_id: triaged[last].id.clone(),
|
||||
insight: format!(
|
||||
"Shared pattern '{} {}' found across {} memories",
|
||||
bigram.0, bigram.1, indices.len()
|
||||
),
|
||||
confidence: (indices.len() as f64 / triaged.len() as f64).min(1.0),
|
||||
connection_type: CreativeConnectionType::CrossDomain,
|
||||
});
|
||||
}
|
||||
&& first != last
|
||||
{
|
||||
connections.push(CreativeConnection {
|
||||
memory_a_id: triaged[first].id.clone(),
|
||||
memory_b_id: triaged[last].id.clone(),
|
||||
insight: format!(
|
||||
"Shared pattern '{} {}' found across {} memories",
|
||||
bigram.0,
|
||||
bigram.1,
|
||||
indices.len()
|
||||
),
|
||||
confidence: (indices.len() as f64 / triaged.len() as f64).min(1.0),
|
||||
connection_type: CreativeConnectionType::CrossDomain,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -692,7 +733,8 @@ impl DreamEngine {
|
|||
let mut actions = Vec::new();
|
||||
|
||||
// Validate connections: keep only those above threshold
|
||||
let valid_connections: Vec<&CreativeConnection> = connections.iter()
|
||||
let valid_connections: Vec<&CreativeConnection> = connections
|
||||
.iter()
|
||||
.filter(|c| c.confidence >= self.validation_threshold)
|
||||
.collect();
|
||||
|
||||
|
|
@ -739,7 +781,9 @@ impl DreamEngine {
|
|||
insights.sort_by(|a, b| {
|
||||
let score_a = a.confidence * a.novelty;
|
||||
let score_b = b.confidence * b.novelty;
|
||||
score_b.partial_cmp(&score_a).unwrap_or(std::cmp::Ordering::Equal)
|
||||
score_b
|
||||
.partial_cmp(&score_a)
|
||||
.unwrap_or(std::cmp::Ordering::Equal)
|
||||
});
|
||||
|
||||
// Cap at 20 insights
|
||||
|
|
@ -753,7 +797,10 @@ impl DreamEngine {
|
|||
} else {
|
||||
triaged.iter().map(|m| m.retention_strength).sum::<f64>() / triaged.len() as f64
|
||||
};
|
||||
actions.push(format!("Average retention across dreamed memories: {:.2}", avg_retention));
|
||||
actions.push(format!(
|
||||
"Average retention across dreamed memories: {:.2}",
|
||||
avg_retention
|
||||
));
|
||||
|
||||
let phase = PhaseResult {
|
||||
phase: DreamPhase::Integration,
|
||||
|
|
@ -840,6 +887,8 @@ mod tests {
|
|||
temporal_level: None,
|
||||
has_embedding: None,
|
||||
embedding_model: None,
|
||||
suppression_count: 0,
|
||||
suppressed_at: None,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -863,13 +912,15 @@ mod tests {
|
|||
let importance = ImportanceSignals::new();
|
||||
let mut synaptic = SynapticTaggingSystem::new();
|
||||
|
||||
let memories: Vec<KnowledgeNode> = (0..10).map(|i| {
|
||||
make_test_node(
|
||||
&format!("mem-{}", i),
|
||||
&format!("Test memory content for dream cycle number {}", i),
|
||||
&["test"],
|
||||
)
|
||||
}).collect();
|
||||
let memories: Vec<KnowledgeNode> = (0..10)
|
||||
.map(|i| {
|
||||
make_test_node(
|
||||
&format!("mem-{}", i),
|
||||
&format!("Test memory content for dream cycle number {}", i),
|
||||
&["test"],
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
let result = engine.run(&memories, &mut emotional, &importance, &mut synaptic);
|
||||
|
||||
|
|
@ -890,7 +941,11 @@ mod tests {
|
|||
|
||||
let memories = vec![
|
||||
make_emotional_node("emo-1", "Critical production crash error panic!", 0.9),
|
||||
make_test_node("future-1", "TODO: remind me to add caching next time", &["planning"]),
|
||||
make_test_node(
|
||||
"future-1",
|
||||
"TODO: remind me to add caching next time",
|
||||
&["planning"],
|
||||
),
|
||||
make_test_node("standard-1", "The function returns a string", &["docs"]),
|
||||
];
|
||||
|
||||
|
|
@ -915,13 +970,15 @@ mod tests {
|
|||
let mut emotional = EmotionalMemory::new();
|
||||
let importance = ImportanceSignals::new();
|
||||
|
||||
let memories: Vec<KnowledgeNode> = (0..20).map(|i| {
|
||||
make_test_node(
|
||||
&format!("mem-{}", i),
|
||||
&format!("Memory with varying importance content {}", i),
|
||||
&["test"],
|
||||
)
|
||||
}).collect();
|
||||
let memories: Vec<KnowledgeNode> = (0..20)
|
||||
.map(|i| {
|
||||
make_test_node(
|
||||
&format!("mem-{}", i),
|
||||
&format!("Memory with varying importance content {}", i),
|
||||
&["test"],
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
let (_triaged, queue, _phase) = engine.phase_nrem1(&memories, &mut emotional, &importance);
|
||||
|
||||
|
|
@ -934,8 +991,8 @@ mod tests {
|
|||
let engine = DreamEngine::new();
|
||||
let mut synaptic = SynapticTaggingSystem::new();
|
||||
|
||||
let triaged: Vec<TriagedMemory> = (0..10).map(|i| {
|
||||
TriagedMemory {
|
||||
let triaged: Vec<TriagedMemory> = (0..10)
|
||||
.map(|i| TriagedMemory {
|
||||
id: format!("mem-{}", i),
|
||||
content: format!("Test memory {}", i),
|
||||
importance: 0.5,
|
||||
|
|
@ -945,8 +1002,8 @@ mod tests {
|
|||
retention_strength: 0.7,
|
||||
emotional_valence: 0.0,
|
||||
is_flashbulb: false,
|
||||
}
|
||||
}).collect();
|
||||
})
|
||||
.collect();
|
||||
|
||||
let replay_queue: Vec<String> = triaged.iter().map(|m| m.id.clone()).collect();
|
||||
|
||||
|
|
@ -1031,7 +1088,10 @@ mod tests {
|
|||
|
||||
assert_eq!(phase.phase, DreamPhase::Rem);
|
||||
// Should find connection via shared "error handling" and "pattern" words
|
||||
assert!(!connections.is_empty(), "Should find cross-domain error handling pattern");
|
||||
assert!(
|
||||
!connections.is_empty(),
|
||||
"Should find cross-domain error handling pattern"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
@ -1039,23 +1099,25 @@ mod tests {
|
|||
let engine = DreamEngine::new();
|
||||
let mut emotional = EmotionalMemory::new();
|
||||
|
||||
let triaged = vec![
|
||||
TriagedMemory {
|
||||
id: "angry-1".to_string(),
|
||||
content: "Critical production error crashed the entire system".to_string(),
|
||||
importance: 0.8,
|
||||
category: TriageCategory::Emotional,
|
||||
tags: vec!["incident".to_string()],
|
||||
created_at: Utc::now(),
|
||||
retention_strength: 0.9,
|
||||
emotional_valence: -0.8,
|
||||
is_flashbulb: false,
|
||||
},
|
||||
];
|
||||
let triaged = vec![TriagedMemory {
|
||||
id: "angry-1".to_string(),
|
||||
content: "Critical production error crashed the entire system".to_string(),
|
||||
importance: 0.8,
|
||||
category: TriageCategory::Emotional,
|
||||
tags: vec!["incident".to_string()],
|
||||
created_at: Utc::now(),
|
||||
retention_strength: 0.9,
|
||||
emotional_valence: -0.8,
|
||||
is_flashbulb: false,
|
||||
}];
|
||||
|
||||
let (_connections, emotional_processed, _phase) = engine.phase_rem(&triaged, &mut emotional);
|
||||
let (_connections, emotional_processed, _phase) =
|
||||
engine.phase_rem(&triaged, &mut emotional);
|
||||
|
||||
assert_eq!(emotional_processed, 1, "Negative emotional memory should be processed");
|
||||
assert_eq!(
|
||||
emotional_processed, 1,
|
||||
"Negative emotional memory should be processed"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
@ -1120,7 +1182,11 @@ mod tests {
|
|||
"error handling with Result type pattern",
|
||||
"error handling with try-catch pattern",
|
||||
);
|
||||
assert!(sim > 0.2, "Similar content should have >0.2 Jaccard: {}", sim);
|
||||
assert!(
|
||||
sim > 0.2,
|
||||
"Similar content should have >0.2 Jaccard: {}",
|
||||
sim
|
||||
);
|
||||
|
||||
let dissim = engine.content_similarity(
|
||||
"Rust memory management with ownership",
|
||||
|
|
@ -1151,16 +1217,19 @@ mod tests {
|
|||
let importance = ImportanceSignals::new();
|
||||
let mut synaptic = SynapticTaggingSystem::new();
|
||||
|
||||
let memories: Vec<KnowledgeNode> = (0..5).map(|i| {
|
||||
make_test_node(&format!("m{}", i), &format!("Content {}", i), &["test"])
|
||||
}).collect();
|
||||
let memories: Vec<KnowledgeNode> = (0..5)
|
||||
.map(|i| make_test_node(&format!("m{}", i), &format!("Content {}", i), &["test"]))
|
||||
.collect();
|
||||
|
||||
let result = engine.run(&memories, &mut emotional, &importance, &mut synaptic);
|
||||
|
||||
for phase in &result.phases {
|
||||
// Duration should be non-negative (might be 0ms for fast operations)
|
||||
assert!(phase.duration_ms < 10000);
|
||||
assert!(!phase.actions.is_empty(), "Each phase should report actions");
|
||||
assert!(
|
||||
!phase.actions.is_empty(),
|
||||
"Each phase should report actions"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1170,7 +1239,11 @@ mod tests {
|
|||
let mut emotional = EmotionalMemory::new();
|
||||
let importance = ImportanceSignals::new();
|
||||
|
||||
let mut node = make_test_node("flash-1", "CRITICAL: Production server crash! Emergency rollback needed immediately!", &["incident"]);
|
||||
let mut node = make_test_node(
|
||||
"flash-1",
|
||||
"CRITICAL: Production server crash! Emergency rollback needed immediately!",
|
||||
&["incident"],
|
||||
);
|
||||
node.sentiment_magnitude = 0.9;
|
||||
|
||||
let (triaged, _queue, phase) = engine.phase_nrem1(&[node], &mut emotional, &importance);
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue