mirror of
https://github.com/samvallad33/vestige.git
synced 2026-05-09 15:52:37 +02:00
feat(v2.0.5): Intentional Amnesia — active forgetting via top-down inhibitory control
First AI memory system to model forgetting as a neuroscience-grounded PROCESS rather than passive decay. Adds the `suppress` MCP tool (#24), Rac1 cascade worker, migration V10, and dashboard forgetting indicators. Based on: - Anderson, Hanslmayr & Quaegebeur (2025), Nat Rev Neurosci — right lateral PFC as the domain-general inhibitory controller; SIF compounds with each stopping attempt. - Cervantes-Sandoval et al. (2020), Front Cell Neurosci PMC7477079 — Rac1 GTPase as the active synaptic destabilization mechanism. What's new: * `suppress` MCP tool — each call compounds `suppression_count` and subtracts a `0.15 × count` penalty (saturating at 80%) from retrieval scores during hybrid search. Distinct from delete (removes) and demote (one-shot). * Rac1 cascade worker — background sweep piggybacks the 6h consolidation loop, walks `memory_connections` edges from recently-suppressed seeds, applies attenuated FSRS decay to co-activated neighbors. You don't just forget Jake — you fade the café, the roommate, the birthday. * 24h labile window — reversible via `suppress({id, reverse: true})` within 24 hours. Matches Nader reconsolidation semantics. * Migration V10 — additive-only (`suppression_count`, `suppressed_at` + partial indices). All v2.0.x DBs upgrade seamlessly on first launch. * Dashboard: `ForgettingIndicator.svelte` pulses when suppressions are active. 3D graph nodes dim to 20% opacity when suppressed. New WebSocket events: `MemorySuppressed`, `MemoryUnsuppressed`, `Rac1CascadeSwept`. Heartbeat carries `suppressed_count`. * Search pipeline: SIF penalty inserted into the accessibility stage so it stacks on top of passive FSRS decay. * Tool count bumped 23 → 24. Cognitive modules 29 → 30. Memories persist — they are INHIBITED, not erased. `memory.get(id)` returns full content through any number of suppressions. The 24h labile window is a grace period for regret. Also fixes issue #31 (dashboard graph view buggy) as a companion UI bug discovered during the v2.0.5 audit cycle: * Root cause: node glow `SpriteMaterial` had no `map`, so `THREE.Sprite` rendered as a solid-coloured 1×1 plane. Additive blending + `UnrealBloomPass(0.8, 0.4, 0.85)` amplified the square edges into hard-edged glowing cubes. * Fix: shared 128×128 radial-gradient `CanvasTexture` singleton used as the sprite map. Retuned bloom to `(0.55, 0.6, 0.2)`. Halved fog density (0.008 → 0.0035). Edges bumped from dark navy `0x4a4a7a` to brand violet `0x8b5cf6` with higher opacity. Added explicit `scene.background` and a 2000-point starfield for depth. * 21 regression tests added in `ui-fixes.test.ts` locking every invariant in (shared texture singleton, depthWrite:false, scale ×6, bloom magic numbers via source regex, starfield presence). Tests: 1,284 Rust (+47) + 171 Vitest (+21) = 1,455 total, 0 failed Clippy: clean across all targets, zero warnings Release binary: 22.6MB, `cargo build --release -p vestige-mcp` green Versions: workspace aligned at 2.0.5 across all 6 crates/packages Closes #31
This commit is contained in:
parent
95bde93b49
commit
8178beb961
359 changed files with 8277 additions and 3416 deletions
|
|
@ -587,9 +587,10 @@ impl ContextCapture {
|
|||
|
||||
// Java Spring
|
||||
if let Ok(content) = fs::read_to_string(self.project_root.join("pom.xml"))
|
||||
&& content.contains("spring") {
|
||||
frameworks.push(Framework::Spring);
|
||||
}
|
||||
&& content.contains("spring")
|
||||
{
|
||||
frameworks.push(Framework::Spring);
|
||||
}
|
||||
|
||||
// Ruby Rails
|
||||
if self.file_exists("config/routes.rb") {
|
||||
|
|
@ -613,36 +614,40 @@ impl ContextCapture {
|
|||
fn detect_project_name(&self) -> Result<Option<String>> {
|
||||
// Try Cargo.toml
|
||||
if let Ok(content) = fs::read_to_string(self.project_root.join("Cargo.toml"))
|
||||
&& let Some(name) = self.extract_toml_value(&content, "name") {
|
||||
return Ok(Some(name));
|
||||
}
|
||||
&& let Some(name) = self.extract_toml_value(&content, "name")
|
||||
{
|
||||
return Ok(Some(name));
|
||||
}
|
||||
|
||||
// Try package.json
|
||||
if let Ok(content) = fs::read_to_string(self.project_root.join("package.json"))
|
||||
&& let Some(name) = self.extract_json_value(&content, "name") {
|
||||
return Ok(Some(name));
|
||||
}
|
||||
&& let Some(name) = self.extract_json_value(&content, "name")
|
||||
{
|
||||
return Ok(Some(name));
|
||||
}
|
||||
|
||||
// Try pyproject.toml
|
||||
if let Ok(content) = fs::read_to_string(self.project_root.join("pyproject.toml"))
|
||||
&& let Some(name) = self.extract_toml_value(&content, "name") {
|
||||
return Ok(Some(name));
|
||||
}
|
||||
&& let Some(name) = self.extract_toml_value(&content, "name")
|
||||
{
|
||||
return Ok(Some(name));
|
||||
}
|
||||
|
||||
// Try go.mod
|
||||
if let Ok(content) = fs::read_to_string(self.project_root.join("go.mod"))
|
||||
&& let Some(line) = content.lines().next()
|
||||
&& line.starts_with("module ") {
|
||||
let name = line
|
||||
.trim_start_matches("module ")
|
||||
.split('/')
|
||||
.next_back()
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
if !name.is_empty() {
|
||||
return Ok(Some(name));
|
||||
}
|
||||
}
|
||||
&& line.starts_with("module ")
|
||||
{
|
||||
let name = line
|
||||
.trim_start_matches("module ")
|
||||
.split('/')
|
||||
.next_back()
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
if !name.is_empty() {
|
||||
return Ok(Some(name));
|
||||
}
|
||||
}
|
||||
|
||||
// Fall back to directory name
|
||||
Ok(self
|
||||
|
|
@ -729,17 +734,18 @@ impl ContextCapture {
|
|||
for test_dir in test_dirs {
|
||||
let test_path = self.project_root.join(test_dir);
|
||||
if test_path.exists()
|
||||
&& let Ok(entries) = fs::read_dir(&test_path) {
|
||||
for entry in entries.filter_map(|e| e.ok()) {
|
||||
let entry_path = entry.path();
|
||||
if let Some(entry_stem) = entry_path.file_stem() {
|
||||
let entry_stem = entry_stem.to_string_lossy();
|
||||
if entry_stem.contains(&stem) {
|
||||
related.push(entry_path);
|
||||
}
|
||||
&& let Ok(entries) = fs::read_dir(&test_path)
|
||||
{
|
||||
for entry in entries.filter_map(|e| e.ok()) {
|
||||
let entry_path = entry.path();
|
||||
if let Some(entry_stem) = entry_path.file_stem() {
|
||||
let entry_stem = entry_stem.to_string_lossy();
|
||||
if entry_stem.contains(&stem) {
|
||||
related.push(entry_path);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// For Rust, look for mod.rs in same directory
|
||||
|
|
@ -794,38 +800,40 @@ impl ContextCapture {
|
|||
// For Rust, use the parent directory name relative to src/
|
||||
if path.extension().map(|e| e == "rs").unwrap_or(false)
|
||||
&& let Ok(relative) = path.strip_prefix(&self.project_root)
|
||||
&& let Ok(src_relative) = relative.strip_prefix("src") {
|
||||
// Get the module path
|
||||
let components: Vec<_> = src_relative
|
||||
.parent()?
|
||||
.components()
|
||||
.map(|c| c.as_os_str().to_string_lossy().to_string())
|
||||
.collect();
|
||||
&& let Ok(src_relative) = relative.strip_prefix("src")
|
||||
{
|
||||
// Get the module path
|
||||
let components: Vec<_> = src_relative
|
||||
.parent()?
|
||||
.components()
|
||||
.map(|c| c.as_os_str().to_string_lossy().to_string())
|
||||
.collect();
|
||||
|
||||
if !components.is_empty() {
|
||||
return Some(components.join("::"));
|
||||
}
|
||||
}
|
||||
if !components.is_empty() {
|
||||
return Some(components.join("::"));
|
||||
}
|
||||
}
|
||||
|
||||
// For TypeScript/JavaScript, use the parent directory
|
||||
if path
|
||||
.extension()
|
||||
.map(|e| e == "ts" || e == "tsx" || e == "js" || e == "jsx")
|
||||
.unwrap_or(false)
|
||||
&& let Ok(relative) = path.strip_prefix(&self.project_root) {
|
||||
// Skip src/ or lib/ prefix
|
||||
let relative = relative
|
||||
.strip_prefix("src")
|
||||
.or_else(|_| relative.strip_prefix("lib"))
|
||||
.unwrap_or(relative);
|
||||
&& let Ok(relative) = path.strip_prefix(&self.project_root)
|
||||
{
|
||||
// Skip src/ or lib/ prefix
|
||||
let relative = relative
|
||||
.strip_prefix("src")
|
||||
.or_else(|_| relative.strip_prefix("lib"))
|
||||
.unwrap_or(relative);
|
||||
|
||||
if let Some(parent) = relative.parent() {
|
||||
let module = parent.to_string_lossy().replace('/', ".");
|
||||
if !module.is_empty() {
|
||||
return Some(module);
|
||||
}
|
||||
if let Some(parent) = relative.parent() {
|
||||
let module = parent.to_string_lossy().replace('/', ".");
|
||||
if !module.is_empty() {
|
||||
return Some(module);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
|
@ -865,10 +873,11 @@ impl ContextCapture {
|
|||
let trimmed = line.trim();
|
||||
if (trimmed.starts_with(&format!("{} ", key))
|
||||
|| trimmed.starts_with(&format!("{}=", key)))
|
||||
&& let Some(value) = trimmed.split('=').nth(1) {
|
||||
let value = value.trim().trim_matches('"').trim_matches('\'');
|
||||
return Some(value.to_string());
|
||||
}
|
||||
&& let Some(value) = trimmed.split('=').nth(1)
|
||||
{
|
||||
let value = value.trim().trim_matches('"').trim_matches('\'');
|
||||
return Some(value.to_string());
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
|
|
|||
|
|
@ -275,9 +275,10 @@ impl GitAnalyzer {
|
|||
files.push(path.to_path_buf());
|
||||
}
|
||||
if let Some(path) = delta.old_file().path()
|
||||
&& !files.contains(&path.to_path_buf()) {
|
||||
files.push(path.to_path_buf());
|
||||
}
|
||||
&& !files.contains(&path.to_path_buf())
|
||||
{
|
||||
files.push(path.to_path_buf());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -408,7 +409,11 @@ impl GitAnalyzer {
|
|||
}
|
||||
|
||||
// Sort by strength
|
||||
relationships.sort_by(|a, b| b.strength.partial_cmp(&a.strength).unwrap_or(std::cmp::Ordering::Equal));
|
||||
relationships.sort_by(|a, b| {
|
||||
b.strength
|
||||
.partial_cmp(&a.strength)
|
||||
.unwrap_or(std::cmp::Ordering::Equal)
|
||||
});
|
||||
|
||||
Ok(relationships)
|
||||
}
|
||||
|
|
@ -492,9 +497,10 @@ impl GitAnalyzer {
|
|||
.unwrap_or_else(Utc::now);
|
||||
|
||||
if let Some(since_time) = since
|
||||
&& commit_time < since_time {
|
||||
continue;
|
||||
}
|
||||
&& commit_time < since_time
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
let message = commit.message().map(|m| m.to_string()).unwrap_or_default();
|
||||
|
||||
|
|
@ -541,7 +547,12 @@ impl GitAnalyzer {
|
|||
let symptom = if let Some(colon_byte_pos) = first_line.find(':') {
|
||||
// Convert byte position to char position for safe slicing
|
||||
let colon_char_pos = first_line[..colon_byte_pos].chars().count();
|
||||
first_line.chars().skip(colon_char_pos + 1).collect::<String>().trim().to_string()
|
||||
first_line
|
||||
.chars()
|
||||
.skip(colon_char_pos + 1)
|
||||
.collect::<String>()
|
||||
.trim()
|
||||
.to_string()
|
||||
} else {
|
||||
first_line.to_string()
|
||||
};
|
||||
|
|
|
|||
|
|
@ -210,18 +210,23 @@ impl PatternDetector {
|
|||
|
||||
for pattern in relevant_patterns {
|
||||
if let Some(confidence) = self.calculate_match_confidence(code, &code_lower, pattern)
|
||||
&& confidence >= 0.3 {
|
||||
matches.push(PatternMatch {
|
||||
pattern: pattern.clone(),
|
||||
confidence,
|
||||
location: None, // Would need line-level analysis
|
||||
suggestions: self.generate_suggestions(pattern, code),
|
||||
});
|
||||
}
|
||||
&& confidence >= 0.3
|
||||
{
|
||||
matches.push(PatternMatch {
|
||||
pattern: pattern.clone(),
|
||||
confidence,
|
||||
location: None, // Would need line-level analysis
|
||||
suggestions: self.generate_suggestions(pattern, code),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by confidence
|
||||
matches.sort_by(|a, b| b.confidence.partial_cmp(&a.confidence).unwrap_or(std::cmp::Ordering::Equal));
|
||||
matches.sort_by(|a, b| {
|
||||
b.confidence
|
||||
.partial_cmp(&a.confidence)
|
||||
.unwrap_or(std::cmp::Ordering::Equal)
|
||||
});
|
||||
|
||||
Ok(matches)
|
||||
}
|
||||
|
|
@ -325,7 +330,11 @@ impl PatternDetector {
|
|||
}
|
||||
|
||||
// Sort by relevance
|
||||
suggestions.sort_by(|a, b| b.relevance.partial_cmp(&a.relevance).unwrap_or(std::cmp::Ordering::Equal));
|
||||
suggestions.sort_by(|a, b| {
|
||||
b.relevance
|
||||
.partial_cmp(&a.relevance)
|
||||
.unwrap_or(std::cmp::Ordering::Equal)
|
||||
});
|
||||
|
||||
Ok(suggestions)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -630,9 +630,7 @@ mod tests {
|
|||
let related = tracker.get_related_files(Path::new("src/main.rs")).unwrap();
|
||||
|
||||
assert!(!related.is_empty());
|
||||
assert!(related
|
||||
.iter()
|
||||
.any(|r| r.path == PathBuf::from("src/lib.rs")));
|
||||
assert!(related.iter().any(|r| r.path == Path::new("src/lib.rs")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
|||
|
|
@ -221,7 +221,6 @@ pub enum DecisionStatus {
|
|||
Deprecated,
|
||||
}
|
||||
|
||||
|
||||
// ============================================================================
|
||||
// BUG FIX
|
||||
// ============================================================================
|
||||
|
|
@ -273,7 +272,6 @@ pub enum BugSeverity {
|
|||
Trivial,
|
||||
}
|
||||
|
||||
|
||||
// ============================================================================
|
||||
// CODE PATTERN
|
||||
// ============================================================================
|
||||
|
|
|
|||
|
|
@ -10,13 +10,13 @@
|
|||
|
||||
use std::collections::HashSet;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use std::time::Duration;
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use notify::{Config, Event, EventKind, RecommendedWatcher, RecursiveMode, Watcher};
|
||||
use tokio::sync::{broadcast, mpsc, RwLock};
|
||||
use tokio::sync::{RwLock, broadcast, mpsc};
|
||||
|
||||
use super::patterns::PatternDetector;
|
||||
use super::relationships::RelationshipTracker;
|
||||
|
|
@ -576,11 +576,12 @@ impl ManualEventHandler {
|
|||
|
||||
// Detect patterns
|
||||
if self.config.detect_patterns
|
||||
&& let Ok(content) = std::fs::read_to_string(path) {
|
||||
let language = CodebaseWatcher::detect_language(path);
|
||||
let detector = self.detector.read().await;
|
||||
let _ = detector.detect_patterns(&content, &language);
|
||||
}
|
||||
&& let Ok(content) = std::fs::read_to_string(path)
|
||||
{
|
||||
let language = CodebaseWatcher::detect_language(path);
|
||||
let detector = self.detector.read().await;
|
||||
let _ = detector.detect_patterns(&content, &language);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue