fix: v2.0.1 release — fix broken installs, CI, security, and docs

Critical fixes:
- npm postinstall.js: BINARY_VERSION '1.1.3' → '2.0.1' (every install was 404ing)
- npm package name: corrected error messages to 'vestige-mcp-server'
- README: npm install command pointed to wrong package
- MSRV: bumped from 1.85 to 1.91 (uses floor_char_boundary from 1.91)
- CI: removed stale 'develop' branch from test.yml triggers

Security hardening:
- CSP: restricted connect-src from wildcard 'ws: wss:' to localhost-only
- Added X-Frame-Options, X-Content-Type-Options, Referrer-Policy, Permissions-Policy headers
- Added frame-ancestors 'none', base-uri 'self', form-action 'self' to CSP
- Capped retention_distribution endpoint from 10k to 1k nodes
- Added debug logging for WebSocket connections without Origin header

Maintenance:
- All clippy warnings fixed (58 total: redundant closures, collapsible ifs, no-op casts)
- All versions harmonized to 2.0.1 across Cargo.toml and package.json
- CLAUDE.md updated to match v2.0.1 (21 tools, 29 modules, 1238 tests)
- docs/CLAUDE-SETUP.md updated deprecated function names
- License corrected to AGPL-3.0-only in root package.json

1,238 tests passing, 0 clippy warnings.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Sam Valladares 2026-03-01 20:20:14 -06:00
parent b03df324da
commit c6090dc2ba
51 changed files with 343 additions and 490 deletions

View file

@ -56,22 +56,20 @@ jobs:
- name: Install pnpm - name: Install pnpm
uses: pnpm/action-setup@v4 uses: pnpm/action-setup@v4
with: with:
version: 9 version: 10
- name: Install Node.js - name: Install Node.js
uses: actions/setup-node@v4 uses: actions/setup-node@v4
with: with:
node-version: 22 node-version: 22
cache: pnpm cache: pnpm
cache-dependency-path: apps/dashboard/pnpm-lock.yaml cache-dependency-path: pnpm-lock.yaml
- name: Install dependencies - name: Install dependencies
run: pnpm install --frozen-lockfile run: pnpm install --frozen-lockfile
working-directory: apps/dashboard
- name: Build dashboard - name: Build dashboard
run: pnpm build run: pnpm --filter dashboard build
working-directory: apps/dashboard
release-build: release-build:
name: Release Build (${{ matrix.target }}) name: Release Build (${{ matrix.target }})

View file

@ -43,7 +43,7 @@ jobs:
- name: Install pnpm - name: Install pnpm
uses: pnpm/action-setup@v4 uses: pnpm/action-setup@v4
with: with:
version: 9 version: 10
- name: Install Node.js - name: Install Node.js
uses: actions/setup-node@v4 uses: actions/setup-node@v4
@ -52,9 +52,8 @@ jobs:
- name: Build dashboard - name: Build dashboard
run: | run: |
cd apps/dashboard
pnpm install --frozen-lockfile pnpm install --frozen-lockfile
pnpm build pnpm --filter dashboard build
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@stable uses: dtolnay/rust-toolchain@stable

View file

@ -2,7 +2,7 @@ name: Test Suite
on: on:
push: push:
branches: [main, develop] branches: [main]
pull_request: pull_request:
branches: [main] branches: [main]
@ -52,11 +52,11 @@ jobs:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: pnpm/action-setup@v4 - uses: pnpm/action-setup@v4
with: with:
version: 9 version: 10
- uses: actions/setup-node@v4 - uses: actions/setup-node@v4
with: with:
node-version: 22 node-version: 22
- run: cd apps/dashboard && pnpm install --frozen-lockfile && pnpm build - run: pnpm install --frozen-lockfile && pnpm --filter dashboard build
coverage: coverage:
name: Code Coverage name: Code Coverage

View file

@ -1,6 +1,6 @@
# Vestige v1.8.0 — Cognitive Memory System # Vestige v2.0.0 — Cognitive Memory System
Vestige is your long-term memory. It implements real neuroscience: FSRS-6 spaced repetition, synaptic tagging, prediction error gating, hippocampal indexing, spreading activation, and 28 stateful cognitive modules. **Use it automatically.** Vestige is your long-term memory. It implements real neuroscience: FSRS-6 spaced repetition, synaptic tagging, prediction error gating, hippocampal indexing, spreading activation, and 29 stateful cognitive modules. **Use it automatically.**
--- ---
@ -27,9 +27,9 @@ Say "Remembering..." then retrieve context before answering.
--- ---
## The 19 Tools ## The 21 Tools
### Context Packets (1 tool) — v1.8.0 ### Context Packets (1 tool)
| Tool | When to Use | | Tool | When to Use |
|------|-------------| |------|-------------|
| `session_context` | **One-call session initialization.** Replaces 5 separate calls (search × 2, intention check, system_status, predict) with a single token-budgeted response. Returns markdown context + `automationTriggers` (needsDream/needsBackup/needsGc) + `expandable` IDs for on-demand full retrieval. Params: `queries` (string[]), `token_budget` (100-10000, default 1000), `context` ({codebase, topics, file}), `include_status/include_intentions/include_predictions` (bool). | | `session_context` | **One-call session initialization.** Replaces 5 separate calls (search × 2, intention check, system_status, predict) with a single token-budgeted response. Returns markdown context + `automationTriggers` (needsDream/needsBackup/needsGc) + `expandable` IDs for on-demand full retrieval. Params: `queries` (string[]), `token_budget` (100-10000, default 1000), `context` ({codebase, topics, file}), `include_status/include_intentions/include_predictions` (bool). |
@ -53,7 +53,7 @@ Say "Remembering..." then retrieve context before answering.
| `memory_timeline` | Browse memories chronologically. Grouped by day. Filter by type, tags, date range. When user references a time period ("last week", "yesterday"). | | `memory_timeline` | Browse memories chronologically. Grouped by day. Filter by type, tags, date range. When user references a time period ("last week", "yesterday"). |
| `memory_changelog` | Audit trail. Per-memory: state transitions. System-wide: consolidations + recent changes. When debugging memory issues. | | `memory_changelog` | Audit trail. Per-memory: state transitions. System-wide: consolidations + recent changes. When debugging memory issues. |
### Cognitive (3 tools) — v1.5.0 ### Cognitive (3 tools)
| Tool | When to Use | | Tool | When to Use |
|------|-------------| |------|-------------|
| `dream` | Trigger memory consolidation — replays recent memories to discover hidden connections and synthesize insights. At session start if >24h since last dream, after every 50 saves. | | `dream` | Trigger memory consolidation — replays recent memories to discover hidden connections and synthesize insights. At session start if >24h since last dream, after every 50 saves. |
@ -66,6 +66,12 @@ Say "Remembering..." then retrieve context before answering.
| `importance_score` | Score content importance before deciding whether to save. 4-channel model: novelty, arousal, reward, attention. Composite > 0.6 = worth saving. | | `importance_score` | Score content importance before deciding whether to save. 4-channel model: novelty, arousal, reward, attention. Composite > 0.6 = worth saving. |
| `find_duplicates` | Find near-duplicate memory clusters via cosine similarity. Returns merge/review suggestions. Run when memory count > 700 or on user request. | | `find_duplicates` | Find near-duplicate memory clusters via cosine similarity. Returns merge/review suggestions. Run when memory count > 700 or on user request. |
### Autonomic (2 tools)
| Tool | When to Use |
|------|-------------|
| `memory_health` | Retention dashboard — avg retention, distribution buckets, trend (improving/declining/stable), recommendation. Lightweight alternative to system_status focused on memory quality. |
| `memory_graph` | Subgraph export for visualization. Input: center_id or query, depth (1-3), max_nodes. Returns nodes with force-directed layout positions and edges with weights. |
### Maintenance (5 tools) ### Maintenance (5 tools)
| Tool | When to Use | | Tool | When to Use |
|------|-------------| |------|-------------|
@ -168,11 +174,11 @@ smart_ingest({
--- ---
## CognitiveEngine — 28 Modules ## CognitiveEngine — 29 Modules
All modules persist across tool calls as stateful instances: All modules persist across tool calls as stateful instances:
**Neuroscience (15):** ActivationNetwork, SynapticTaggingSystem, HippocampalIndex, ContextMatcher, AccessibilityCalculator, CompetitionManager, StateUpdateService, ImportanceSignals, NoveltySignal, ArousalSignal, RewardSignal, AttentionSignal, PredictiveMemory, ProspectiveMemory, IntentionParser **Neuroscience (16):** ActivationNetwork, SynapticTaggingSystem, HippocampalIndex, ContextMatcher, AccessibilityCalculator, CompetitionManager, StateUpdateService, ImportanceSignals, NoveltySignal, ArousalSignal, RewardSignal, AttentionSignal, EmotionalMemory, PredictiveMemory, ProspectiveMemory, IntentionParser
**Advanced (11):** ImportanceTracker, ReconsolidationManager, IntentDetector, ActivityTracker, MemoryDreamer, MemoryChainBuilder, MemoryCompressor, CrossProjectLearner, AdaptiveEmbedder, SpeculativeRetriever, ConsolidationScheduler **Advanced (11):** ImportanceTracker, ReconsolidationManager, IntentDetector, ActivityTracker, MemoryDreamer, MemoryChainBuilder, MemoryCompressor, CrossProjectLearner, AdaptiveEmbedder, SpeculativeRetriever, ConsolidationScheduler
@ -209,12 +215,12 @@ Memory is retrieval. Searching strengthens memory. Search liberally, save aggres
## Development ## Development
- **Crate:** `vestige-mcp` v1.8.0, Rust 2024 edition, Rust 1.93.1 - **Crate:** `vestige-mcp` v2.0.1, Rust 2024 edition, MSRV 1.91
- **Tests:** 651 tests (313 core + 338 mcp), zero warnings - **Tests:** 1,238 tests, zero warnings
- **Build:** `cargo build --release -p vestige-mcp` - **Build:** `cargo build --release -p vestige-mcp`
- **Features:** `embeddings` + `vector-search` (default on) - **Features:** `embeddings` + `vector-search` (default on)
- **Architecture:** `McpServer` holds `Arc<Storage>` + `Arc<Mutex<CognitiveEngine>>` - **Architecture:** `McpServer` holds `Arc<Storage>` + `Arc<Mutex<CognitiveEngine>>`
- **Storage:** Interior mutability — `Storage` uses `Mutex<Connection>` for reader/writer split, all methods take `&self`. WAL mode for concurrent reads + writes. - **Storage:** Interior mutability — `Storage` uses `Mutex<Connection>` for reader/writer split, all methods take `&self`. WAL mode for concurrent reads + writes.
- **Entry:** `src/main.rs` → stdio JSON-RPC server - **Entry:** `src/main.rs` → stdio JSON-RPC server
- **Tools:** `src/tools/` — one file per tool, each exports `schema()` + `execute()` - **Tools:** `src/tools/` — one file per tool, each exports `schema()` + `execute()`
- **Cognitive:** `src/cognitive.rs` — 28-field struct, initialized once at startup - **Cognitive:** `src/cognitive.rs` — 29-field struct, initialized once at startup

192
Cargo.lock generated
View file

@ -158,17 +158,6 @@ dependencies = [
"stable_deref_trait", "stable_deref_trait",
] ]
[[package]]
name = "async-trait"
version = "0.1.89"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]] [[package]]
name = "atomic-waker" name = "atomic-waker"
version = "1.1.2" version = "1.1.2"
@ -194,7 +183,7 @@ dependencies = [
"log", "log",
"num-rational", "num-rational",
"num-traits", "num-traits",
"pastey 0.1.1", "pastey",
"rayon", "rayon",
"thiserror 2.0.18", "thiserror 2.0.18",
"v_frame", "v_frame",
@ -839,18 +828,8 @@ version = "0.20.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee" checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee"
dependencies = [ dependencies = [
"darling_core 0.20.11", "darling_core",
"darling_macro 0.20.11", "darling_macro",
]
[[package]]
name = "darling"
version = "0.23.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "25ae13da2f202d56bd7f91c25fba009e7717a1e4a1cc98a76d844b65ae912e9d"
dependencies = [
"darling_core 0.23.0",
"darling_macro 0.23.0",
] ]
[[package]] [[package]]
@ -867,37 +846,13 @@ dependencies = [
"syn", "syn",
] ]
[[package]]
name = "darling_core"
version = "0.23.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9865a50f7c335f53564bb694ef660825eb8610e0a53d3e11bf1b0d3df31e03b0"
dependencies = [
"ident_case",
"proc-macro2",
"quote",
"strsim",
"syn",
]
[[package]] [[package]]
name = "darling_macro" name = "darling_macro"
version = "0.20.11" version = "0.20.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead"
dependencies = [ dependencies = [
"darling_core 0.20.11", "darling_core",
"quote",
"syn",
]
[[package]]
name = "darling_macro"
version = "0.23.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3984ec7bd6cfa798e62b4a642426a5be0e68f9401cfc2a01e3fa9ea2fcdb8d"
dependencies = [
"darling_core 0.23.0",
"quote", "quote",
"syn", "syn",
] ]
@ -942,7 +897,7 @@ version = "0.20.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2d5bcf7b024d6835cfb3d473887cd966994907effbe9227e8c8219824d06c4e8" checksum = "2d5bcf7b024d6835cfb3d473887cd966994907effbe9227e8c8219824d06c4e8"
dependencies = [ dependencies = [
"darling 0.20.11", "darling",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn", "syn",
@ -1019,12 +974,6 @@ dependencies = [
"syn", "syn",
] ]
[[package]]
name = "dyn-clone"
version = "1.0.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d0881ea181b1df73ff77ffaaf9c7544ecc11e82fba9b5f27b262a3c73a332555"
[[package]] [[package]]
name = "dyn-stack" name = "dyn-stack"
version = "0.13.2" version = "0.13.2"
@ -1303,21 +1252,6 @@ dependencies = [
"libc", "libc",
] ]
[[package]]
name = "futures"
version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876"
dependencies = [
"futures-channel",
"futures-core",
"futures-executor",
"futures-io",
"futures-sink",
"futures-task",
"futures-util",
]
[[package]] [[package]]
name = "futures-channel" name = "futures-channel"
version = "0.3.31" version = "0.3.31"
@ -1325,7 +1259,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10"
dependencies = [ dependencies = [
"futures-core", "futures-core",
"futures-sink",
] ]
[[package]] [[package]]
@ -1334,17 +1267,6 @@ version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e"
[[package]]
name = "futures-executor"
version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f"
dependencies = [
"futures-core",
"futures-task",
"futures-util",
]
[[package]] [[package]]
name = "futures-io" name = "futures-io"
version = "0.3.31" version = "0.3.31"
@ -1380,7 +1302,6 @@ version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81"
dependencies = [ dependencies = [
"futures-channel",
"futures-core", "futures-core",
"futures-io", "futures-io",
"futures-macro", "futures-macro",
@ -3070,12 +2991,6 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "35fb2e5f958ec131621fdd531e9fc186ed768cbe395337403ae56c17a74c68ec" checksum = "35fb2e5f958ec131621fdd531e9fc186ed768cbe395337403ae56c17a74c68ec"
[[package]]
name = "pastey"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b867cad97c0791bbd3aaa6472142568c6c9e8f71937e98379f584cfb0cf35bec"
[[package]] [[package]]
name = "pathdiff" name = "pathdiff"
version = "0.2.3" version = "0.2.3"
@ -3464,26 +3379,6 @@ dependencies = [
"thiserror 2.0.18", "thiserror 2.0.18",
] ]
[[package]]
name = "ref-cast"
version = "1.0.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f354300ae66f76f1c85c5f84693f0ce81d747e2c3f21a45fef496d89c960bf7d"
dependencies = [
"ref-cast-impl",
]
[[package]]
name = "ref-cast-impl"
version = "1.0.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b7186006dcb21920990093f30e3dea63b7d6e977bf1256be20c3563a5db070da"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]] [[package]]
name = "regex" name = "regex"
version = "1.12.3" version = "1.12.3"
@ -3576,41 +3471,6 @@ dependencies = [
"windows-sys 0.52.0", "windows-sys 0.52.0",
] ]
[[package]]
name = "rmcp"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0a621b37a548ff6ab6292d57841eb25785a7f146d89391a19c9f199414bd13da"
dependencies = [
"async-trait",
"base64 0.22.1",
"chrono",
"futures",
"pastey 0.2.1",
"pin-project-lite",
"rmcp-macros",
"schemars",
"serde",
"serde_json",
"thiserror 2.0.18",
"tokio",
"tokio-util",
"tracing",
]
[[package]]
name = "rmcp-macros"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6b79ed92303f9262db79575aa8c3652581668e9d136be6fd0b9ededa78954c95"
dependencies = [
"darling 0.23.0",
"proc-macro2",
"quote",
"serde_json",
"syn",
]
[[package]] [[package]]
name = "rsqlite-vfs" name = "rsqlite-vfs"
version = "0.1.0" version = "0.1.0"
@ -3737,32 +3597,6 @@ dependencies = [
"windows-sys 0.61.2", "windows-sys 0.61.2",
] ]
[[package]]
name = "schemars"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2b42f36aa1cd011945615b92222f6bf73c599a102a300334cd7f8dbeec726cc"
dependencies = [
"chrono",
"dyn-clone",
"ref-cast",
"schemars_derive",
"serde",
"serde_json",
]
[[package]]
name = "schemars_derive"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7d115b50f4aaeea07e79c1912f645c7513d81715d0420f8bc77a18c6260b307f"
dependencies = [
"proc-macro2",
"quote",
"serde_derive_internals",
"syn",
]
[[package]] [[package]]
name = "scopeguard" name = "scopeguard"
version = "1.2.0" version = "1.2.0"
@ -3840,17 +3674,6 @@ dependencies = [
"syn", "syn",
] ]
[[package]]
name = "serde_derive_internals"
version = "0.29.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]] [[package]]
name = "serde_json" name = "serde_json"
version = "1.0.149" version = "1.0.149"
@ -4671,7 +4494,7 @@ checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
[[package]] [[package]]
name = "vestige-core" name = "vestige-core"
version = "2.0.0" version = "2.0.1"
dependencies = [ dependencies = [
"chrono", "chrono",
"criterion", "criterion",
@ -4706,7 +4529,7 @@ dependencies = [
[[package]] [[package]]
name = "vestige-mcp" name = "vestige-mcp"
version = "2.0.0" version = "2.0.1"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"axum", "axum",
@ -4718,7 +4541,6 @@ dependencies = [
"include_dir", "include_dir",
"mime_guess", "mime_guess",
"open", "open",
"rmcp",
"rusqlite", "rusqlite",
"serde", "serde",
"serde_json", "serde_json",

View file

@ -10,7 +10,7 @@ exclude = [
] ]
[workspace.package] [workspace.package]
version = "1.9.2" version = "2.0.1"
edition = "2024" edition = "2024"
license = "AGPL-3.0-only" license = "AGPL-3.0-only"
repository = "https://github.com/samvallad33/vestige" repository = "https://github.com/samvallad33/vestige"

View file

@ -6,7 +6,7 @@
[![GitHub stars](https://img.shields.io/github/stars/samvallad33/vestige?style=social)](https://github.com/samvallad33/vestige) [![GitHub stars](https://img.shields.io/github/stars/samvallad33/vestige?style=social)](https://github.com/samvallad33/vestige)
[![Release](https://img.shields.io/github/v/release/samvallad33/vestige)](https://github.com/samvallad33/vestige/releases/latest) [![Release](https://img.shields.io/github/v/release/samvallad33/vestige)](https://github.com/samvallad33/vestige/releases/latest)
[![Tests](https://img.shields.io/badge/tests-734%20passing-brightgreen)](https://github.com/samvallad33/vestige/actions) [![Tests](https://img.shields.io/badge/tests-1238%20passing-brightgreen)](https://github.com/samvallad33/vestige/actions)
[![License](https://img.shields.io/badge/license-AGPL--3.0-blue)](LICENSE) [![License](https://img.shields.io/badge/license-AGPL--3.0-blue)](LICENSE)
[![MCP Compatible](https://img.shields.io/badge/MCP-compatible-green)](https://modelcontextprotocol.io) [![MCP Compatible](https://img.shields.io/badge/MCP-compatible-green)](https://modelcontextprotocol.io)
@ -25,10 +25,10 @@ Built on 130 years of memory research — FSRS-6 spaced repetition, prediction e
- **3D Memory Dashboard** — SvelteKit + Three.js neural visualization with real-time WebSocket events, bloom post-processing, force-directed graph layout. Watch your AI's mind in real-time. - **3D Memory Dashboard** — SvelteKit + Three.js neural visualization with real-time WebSocket events, bloom post-processing, force-directed graph layout. Watch your AI's mind in real-time.
- **WebSocket Event Bus** — Every cognitive operation broadcasts events: memory creation, search, dreaming, consolidation, retention decay - **WebSocket Event Bus** — Every cognitive operation broadcasts events: memory creation, search, dreaming, consolidation, retention decay
- **HyDE Query Expansion** — Template-based Hypothetical Document Embeddings for dramatically improved search quality on conceptual queries - **HyDE Query Expansion** — Template-based Hypothetical Document Embeddings for dramatically improved search quality on conceptual queries
- **Nomic v2 MoE Ready** — fastembed 5.11 with optional Nomic Embed Text v2 MoE (475M params, 8 experts) + Metal GPU acceleration - **Nomic v2 MoE (experimental)** — fastembed 5.11 with optional Nomic Embed Text v2 MoE (475M params, 8 experts) + Metal GPU acceleration. Default: v1.5 (8192 token context)
- **Command Palette**`Cmd+K` navigation, keyboard shortcuts, responsive mobile layout, PWA installable - **Command Palette**`Cmd+K` navigation, keyboard shortcuts, responsive mobile layout, PWA installable
- **FSRS Decay Visualization** — SVG retention curves with predicted decay at 1d/7d/30d, endangered memory alerts - **FSRS Decay Visualization** — SVG retention curves with predicted decay at 1d/7d/30d, endangered memory alerts
- **29 cognitive modules**734 tests, 77,840+ LOC - **29 cognitive modules**1,238 tests, 79,600+ LOC
--- ---
@ -68,10 +68,10 @@ sudo mv vestige-mcp vestige vestige-restore /usr/local/bin/
**npm:** **npm:**
```bash ```bash
npm install -g vestige-mcp npm install -g vestige-mcp-server
``` ```
**Build from source:** **Build from source (requires Rust 1.91+):**
```bash ```bash
git clone https://github.com/samvallad33/vestige && cd vestige git clone https://github.com/samvallad33/vestige && cd vestige
cargo build --release -p vestige-mcp cargo build --release -p vestige-mcp
@ -267,8 +267,8 @@ At the start of every session:
| Metric | Value | | Metric | Value |
|--------|-------| |--------|-------|
| **Language** | Rust 2024 edition | | **Language** | Rust 2024 edition (MSRV 1.91) |
| **Codebase** | 77,840+ lines, 734 tests | | **Codebase** | 79,600+ lines, 1,238 tests |
| **Binary size** | ~20MB | | **Binary size** | ~20MB |
| **Embeddings** | Nomic Embed Text v1.5 (768d → 256d Matryoshka, 8192 context) | | **Embeddings** | Nomic Embed Text v1.5 (768d → 256d Matryoshka, 8192 context) |
| **Vector search** | USearch HNSW (20x faster than FAISS) | | **Vector search** | USearch HNSW (20x faster than FAISS) |
@ -276,7 +276,7 @@ At the start of every session:
| **Storage** | SQLite + FTS5 (optional SQLCipher encryption) | | **Storage** | SQLite + FTS5 (optional SQLCipher encryption) |
| **Dashboard** | SvelteKit 2 + Svelte 5 + Three.js + Tailwind CSS 4 | | **Dashboard** | SvelteKit 2 + Svelte 5 + Three.js + Tailwind CSS 4 |
| **Transport** | MCP stdio (JSON-RPC 2.0) + WebSocket | | **Transport** | MCP stdio (JSON-RPC 2.0) + WebSocket |
| **Cognitive modules** | 29 stateful (15 neuroscience, 12 advanced, 2 search) | | **Cognitive modules** | 29 stateful (16 neuroscience, 11 advanced, 2 search) |
| **First run** | Downloads embedding model (~130MB), then fully offline | | **First run** | Downloads embedding model (~130MB), then fully offline |
| **Platforms** | macOS (ARM/Intel), Linux (x86_64), Windows | | **Platforms** | macOS (ARM/Intel), Linux (x86_64), Windows |
@ -376,5 +376,5 @@ AGPL-3.0 — free to use, modify, and self-host. If you offer Vestige as a netwo
<p align="center"> <p align="center">
<i>Built by <a href="https://github.com/samvallad33">@samvallad33</a></i><br> <i>Built by <a href="https://github.com/samvallad33">@samvallad33</a></i><br>
<sub>77,840+ lines of Rust · 29 cognitive modules · 130 years of memory research · one 22MB binary</sub> <sub>79,600+ lines of Rust · 29 cognitive modules · 130 years of memory research · one 22MB binary</sub>
</p> </p>

View file

@ -1,8 +1,8 @@
[package] [package]
name = "vestige-core" name = "vestige-core"
version = "2.0.0" version = "2.0.1"
edition = "2024" edition = "2024"
rust-version = "1.85" rust-version = "1.91"
authors = ["Vestige Team"] authors = ["Vestige Team"]
description = "Cognitive memory engine - FSRS-6 spaced repetition, semantic embeddings, and temporal memory" description = "Cognitive memory engine - FSRS-6 spaced repetition, semantic embeddings, and temporal memory"
license = "AGPL-3.0-only" license = "AGPL-3.0-only"
@ -37,11 +37,6 @@ qwen3-reranker = ["embeddings", "fastembed/qwen3"]
# Metal GPU acceleration on Apple Silicon (significantly faster inference) # Metal GPU acceleration on Apple Silicon (significantly faster inference)
metal = ["fastembed/metal"] metal = ["fastembed/metal"]
# Full feature set including MCP protocol support
full = ["embeddings", "vector-search"]
# MCP (Model Context Protocol) support for Claude integration
mcp = []
[dependencies] [dependencies]
# Serialization # Serialization

View file

@ -431,12 +431,11 @@ impl CrossProjectLearner {
// Check each trigger // Check each trigger
for trigger in &pattern.pattern.triggers { for trigger in &pattern.pattern.triggers {
if let Some((matches, reason)) = self.check_trigger(trigger, context) { if let Some((matches, reason)) = self.check_trigger(trigger, context)
if matches { && matches {
match_scores.push(trigger.confidence); match_scores.push(trigger.confidence);
match_reasons.push(reason); match_reasons.push(reason);
} }
}
} }
if match_scores.is_empty() { if match_scores.is_empty() {
@ -547,12 +546,11 @@ impl CrossProjectLearner {
let success_rate = success_count as f64 / total_count as f64; let success_rate = success_count as f64 / total_count as f64;
if let Ok(mut patterns) = self.patterns.write() { if let Ok(mut patterns) = self.patterns.write()
if let Some(pattern) = patterns.get_mut(pattern_id) { && let Some(pattern) = patterns.get_mut(pattern_id) {
pattern.success_rate = success_rate; pattern.success_rate = success_rate;
pattern.application_count = total_count as u32; pattern.application_count = total_count as u32;
} }
}
} }
fn extract_patterns_from_category( fn extract_patterns_from_category(
@ -596,8 +594,8 @@ impl CrossProjectLearner {
// Create a potential pattern (simplified) // Create a potential pattern (simplified)
let pattern_id = format!("auto-{}-{}", category_to_string(&category), keyword); let pattern_id = format!("auto-{}-{}", category_to_string(&category), keyword);
if let Ok(mut patterns) = self.patterns.write() { if let Ok(mut patterns) = self.patterns.write()
if !patterns.contains_key(&pattern_id) { && !patterns.contains_key(&pattern_id) {
patterns.insert( patterns.insert(
pattern_id.clone(), pattern_id.clone(),
UniversalPattern { UniversalPattern {
@ -629,7 +627,6 @@ impl CrossProjectLearner {
}, },
); );
} }
}
} }
} }
} }

View file

@ -454,11 +454,10 @@ impl ConsolidationScheduler {
if let Ok(mut graph) = self.connections.write() { if let Ok(mut graph) = self.connections.write() {
// Strengthen connections between sequentially replayed memories // Strengthen connections between sequentially replayed memories
for window in replay.sequence.windows(2) { for window in replay.sequence.windows(2) {
if let [id_a, id_b] = window { if let [id_a, id_b] = window
if graph.strengthen_connection(id_a, id_b, 0.1) { && graph.strengthen_connection(id_a, id_b, 0.1) {
strengthened += 1; strengthened += 1;
} }
}
} }
// Also strengthen based on discovered patterns // Also strengthen based on discovered patterns
@ -704,13 +703,12 @@ impl ConnectionGraph {
let mut strengthened = false; let mut strengthened = false;
for (a, b) in [(from_id, to_id), (to_id, from_id)] { for (a, b) in [(from_id, to_id), (to_id, from_id)] {
if let Some(connections) = self.connections.get_mut(a) { if let Some(connections) = self.connections.get_mut(a)
if let Some(conn) = connections.iter_mut().find(|c| c.target_id == b) { && let Some(conn) = connections.iter_mut().find(|c| c.target_id == b) {
conn.strength = (conn.strength + boost).min(2.0); conn.strength = (conn.strength + boost).min(2.0);
conn.last_strengthened = now; conn.last_strengthened = now;
strengthened = true; strengthened = true;
} }
}
} }
strengthened strengthened
@ -1478,11 +1476,10 @@ impl MemoryDreamer {
} }
// Try to generate insight from this cluster // Try to generate insight from this cluster
if let Some(insight) = self.generate_insight_from_cluster(&cluster_memories) { if let Some(insight) = self.generate_insight_from_cluster(&cluster_memories)
if insight.novelty_score >= self.config.min_novelty { && insight.novelty_score >= self.config.min_novelty {
insights.push(insight); insights.push(insight);
} }
}
if insights.len() >= self.config.max_insights { if insights.len() >= self.config.max_insights {
break; break;

View file

@ -230,13 +230,11 @@ impl ImportanceTracker {
self.on_retrieved(memory_id, was_helpful); self.on_retrieved(memory_id, was_helpful);
// Store context with event // Store context with event
if let Ok(mut events) = self.recent_events.write() { if let Ok(mut events) = self.recent_events.write()
if let Some(event) = events.last_mut() { && let Some(event) = events.last_mut()
if event.memory_id == memory_id { && event.memory_id == memory_id {
event.context = Some(context.to_string()); event.context = Some(context.to_string());
} }
}
}
} }
/// Apply importance decay to all memories /// Apply importance decay to all memories

View file

@ -561,11 +561,10 @@ impl IntentDetector {
score += 0.2; score += 0.2;
} }
ActionType::FileOpened | ActionType::FileEdited => { ActionType::FileOpened | ActionType::FileEdited => {
if let Some(file) = &action.file { if let Some(file) = &action.file
if let Some(name) = file.file_name() { && let Some(name) = file.file_name() {
suspected_area = name.to_string_lossy().to_string(); suspected_area = name.to_string_lossy().to_string();
} }
}
} }
_ => {} _ => {}
} }

View file

@ -516,15 +516,14 @@ impl ReconsolidationManager {
return false; return false;
} }
if let Some(state) = self.labile_memories.get_mut(memory_id) { if let Some(state) = self.labile_memories.get_mut(memory_id)
if state.is_within_window(self.labile_window) { && state.is_within_window(self.labile_window) {
let success = state.add_modification(modification); let success = state.add_modification(modification);
if success { if success {
self.stats.total_modifications += 1; self.stats.total_modifications += 1;
} }
return success; return success;
} }
}
false false
} }
@ -690,15 +689,14 @@ impl ReconsolidationManager {
if let Ok(history) = self.retrieval_history.read() { if let Ok(history) = self.retrieval_history.read() {
for record in history.iter() { for record in history.iter() {
if record.memory_id == memory_id { if record.memory_id == memory_id
if let Some(context) = &record.context { && let Some(context) = &record.context {
for co_id in &context.co_retrieved { for co_id in &context.co_retrieved {
if co_id != memory_id { if co_id != memory_id {
*co_retrieved.entry(co_id.clone()).or_insert(0) += 1; *co_retrieved.entry(co_id.clone()).or_insert(0) += 1;
} }
} }
} }
}
} }
} }
@ -772,7 +770,7 @@ fn truncate(s: &str, max_len: usize) -> &str {
if s.len() <= max_len { if s.len() <= max_len {
s s
} else { } else {
&s[..max_len] &s[..s.floor_char_boundary(max_len)]
} }
} }

View file

@ -265,13 +265,12 @@ impl SpeculativeRetriever {
} }
// Update file-memory associations // Update file-memory associations
if let Some(file) = file_context { if let Some(file) = file_context
if let Ok(mut map) = self.file_memory_map.write() { && let Ok(mut map) = self.file_memory_map.write() {
map.entry(file.to_string()) map.entry(file.to_string())
.or_insert_with(Vec::new) .or_insert_with(Vec::new)
.push(memory_id.to_string()); .push(memory_id.to_string());
} }
}
} }
/// Get cached predictions /// Get cached predictions

View file

@ -586,11 +586,10 @@ impl ContextCapture {
} }
// Java Spring // Java Spring
if let Ok(content) = fs::read_to_string(self.project_root.join("pom.xml")) { if let Ok(content) = fs::read_to_string(self.project_root.join("pom.xml"))
if content.contains("spring") { && content.contains("spring") {
frameworks.push(Framework::Spring); frameworks.push(Framework::Spring);
} }
}
// Ruby Rails // Ruby Rails
if self.file_exists("config/routes.rb") { if self.file_exists("config/routes.rb") {
@ -613,30 +612,27 @@ impl ContextCapture {
/// Detect the project name from config files /// Detect the project name from config files
fn detect_project_name(&self) -> Result<Option<String>> { fn detect_project_name(&self) -> Result<Option<String>> {
// Try Cargo.toml // Try Cargo.toml
if let Ok(content) = fs::read_to_string(self.project_root.join("Cargo.toml")) { if let Ok(content) = fs::read_to_string(self.project_root.join("Cargo.toml"))
if let Some(name) = self.extract_toml_value(&content, "name") { && let Some(name) = self.extract_toml_value(&content, "name") {
return Ok(Some(name)); return Ok(Some(name));
} }
}
// Try package.json // Try package.json
if let Ok(content) = fs::read_to_string(self.project_root.join("package.json")) { if let Ok(content) = fs::read_to_string(self.project_root.join("package.json"))
if let Some(name) = self.extract_json_value(&content, "name") { && let Some(name) = self.extract_json_value(&content, "name") {
return Ok(Some(name)); return Ok(Some(name));
} }
}
// Try pyproject.toml // Try pyproject.toml
if let Ok(content) = fs::read_to_string(self.project_root.join("pyproject.toml")) { if let Ok(content) = fs::read_to_string(self.project_root.join("pyproject.toml"))
if let Some(name) = self.extract_toml_value(&content, "name") { && let Some(name) = self.extract_toml_value(&content, "name") {
return Ok(Some(name)); return Ok(Some(name));
} }
}
// Try go.mod // Try go.mod
if let Ok(content) = fs::read_to_string(self.project_root.join("go.mod")) { if let Ok(content) = fs::read_to_string(self.project_root.join("go.mod"))
if let Some(line) = content.lines().next() { && let Some(line) = content.lines().next()
if line.starts_with("module ") { && line.starts_with("module ") {
let name = line let name = line
.trim_start_matches("module ") .trim_start_matches("module ")
.split('/') .split('/')
@ -647,8 +643,6 @@ impl ContextCapture {
return Ok(Some(name)); return Ok(Some(name));
} }
} }
}
}
// Fall back to directory name // Fall back to directory name
Ok(self Ok(self
@ -734,8 +728,8 @@ impl ContextCapture {
// Check test directories // Check test directories
for test_dir in test_dirs { for test_dir in test_dirs {
let test_path = self.project_root.join(test_dir); let test_path = self.project_root.join(test_dir);
if test_path.exists() { if test_path.exists()
if let Ok(entries) = fs::read_dir(&test_path) { && let Ok(entries) = fs::read_dir(&test_path) {
for entry in entries.filter_map(|e| e.ok()) { for entry in entries.filter_map(|e| e.ok()) {
let entry_path = entry.path(); let entry_path = entry.path();
if let Some(entry_stem) = entry_path.file_stem() { if let Some(entry_stem) = entry_path.file_stem() {
@ -746,7 +740,6 @@ impl ContextCapture {
} }
} }
} }
}
} }
// For Rust, look for mod.rs in same directory // For Rust, look for mod.rs in same directory
@ -799,9 +792,9 @@ impl ContextCapture {
/// Detect the module a file belongs to /// Detect the module a file belongs to
fn detect_module(&self, path: &Path) -> Option<String> { fn detect_module(&self, path: &Path) -> Option<String> {
// For Rust, use the parent directory name relative to src/ // For Rust, use the parent directory name relative to src/
if path.extension().map(|e| e == "rs").unwrap_or(false) { if path.extension().map(|e| e == "rs").unwrap_or(false)
if let Ok(relative) = path.strip_prefix(&self.project_root) { && let Ok(relative) = path.strip_prefix(&self.project_root)
if let Ok(src_relative) = relative.strip_prefix("src") { && let Ok(src_relative) = relative.strip_prefix("src") {
// Get the module path // Get the module path
let components: Vec<_> = src_relative let components: Vec<_> = src_relative
.parent()? .parent()?
@ -813,16 +806,13 @@ impl ContextCapture {
return Some(components.join("::")); return Some(components.join("::"));
} }
} }
}
}
// For TypeScript/JavaScript, use the parent directory // For TypeScript/JavaScript, use the parent directory
if path if path
.extension() .extension()
.map(|e| e == "ts" || e == "tsx" || e == "js" || e == "jsx") .map(|e| e == "ts" || e == "tsx" || e == "js" || e == "jsx")
.unwrap_or(false) .unwrap_or(false)
{ && let Ok(relative) = path.strip_prefix(&self.project_root) {
if let Ok(relative) = path.strip_prefix(&self.project_root) {
// Skip src/ or lib/ prefix // Skip src/ or lib/ prefix
let relative = relative let relative = relative
.strip_prefix("src") .strip_prefix("src")
@ -836,7 +826,6 @@ impl ContextCapture {
} }
} }
} }
}
None None
} }
@ -874,14 +863,12 @@ impl ContextCapture {
fn extract_toml_value(&self, content: &str, key: &str) -> Option<String> { fn extract_toml_value(&self, content: &str, key: &str) -> Option<String> {
for line in content.lines() { for line in content.lines() {
let trimmed = line.trim(); let trimmed = line.trim();
if trimmed.starts_with(&format!("{} ", key)) if (trimmed.starts_with(&format!("{} ", key))
|| trimmed.starts_with(&format!("{}=", key)) || trimmed.starts_with(&format!("{}=", key)))
{ && let Some(value) = trimmed.split('=').nth(1) {
if let Some(value) = trimmed.split('=').nth(1) {
let value = value.trim().trim_matches('"').trim_matches('\''); let value = value.trim().trim_matches('"').trim_matches('\'');
return Some(value.to_string()); return Some(value.to_string());
} }
}
} }
None None
} }

View file

@ -274,11 +274,10 @@ impl GitAnalyzer {
if let Some(path) = delta.new_file().path() { if let Some(path) = delta.new_file().path() {
files.push(path.to_path_buf()); files.push(path.to_path_buf());
} }
if let Some(path) = delta.old_file().path() { if let Some(path) = delta.old_file().path()
if !files.contains(&path.to_path_buf()) { && !files.contains(&path.to_path_buf()) {
files.push(path.to_path_buf()); files.push(path.to_path_buf());
} }
}
} }
} }
@ -492,11 +491,10 @@ impl GitAnalyzer {
.single() .single()
.unwrap_or_else(Utc::now); .unwrap_or_else(Utc::now);
if let Some(since_time) = since { if let Some(since_time) = since
if commit_time < since_time { && commit_time < since_time {
continue; continue;
} }
}
let message = commit.message().map(|m| m.to_string()).unwrap_or_default(); let message = commit.message().map(|m| m.to_string()).unwrap_or_default();

View file

@ -209,8 +209,8 @@ impl PatternDetector {
.collect(); .collect();
for pattern in relevant_patterns { for pattern in relevant_patterns {
if let Some(confidence) = self.calculate_match_confidence(code, &code_lower, pattern) { if let Some(confidence) = self.calculate_match_confidence(code, &code_lower, pattern)
if confidence >= 0.3 { && confidence >= 0.3 {
matches.push(PatternMatch { matches.push(PatternMatch {
pattern: pattern.clone(), pattern: pattern.clone(),
confidence, confidence,
@ -218,7 +218,6 @@ impl PatternDetector {
suggestions: self.generate_suggestions(pattern, code), suggestions: self.generate_suggestions(pattern, code),
}); });
} }
}
} }
// Sort by confidence // Sort by confidence

View file

@ -337,14 +337,13 @@ impl CodebaseWatcher {
} }
// Detect patterns if enabled // Detect patterns if enabled
if config.detect_patterns { if config.detect_patterns
if let Ok(content) = std::fs::read_to_string(path) { && let Ok(content) = std::fs::read_to_string(path) {
let language = Self::detect_language(path); let language = Self::detect_language(path);
if let Ok(detector) = detector.try_read() { if let Ok(detector) = detector.try_read() {
let _ = detector.detect_patterns(&content, &language); let _ = detector.detect_patterns(&content, &language);
} }
} }
}
} }
FileEventKind::Deleted => { FileEventKind::Deleted => {
// File was deleted, remove from session // File was deleted, remove from session
@ -576,13 +575,12 @@ impl ManualEventHandler {
} }
// Detect patterns // Detect patterns
if self.config.detect_patterns { if self.config.detect_patterns
if let Ok(content) = std::fs::read_to_string(path) { && let Ok(content) = std::fs::read_to_string(path) {
let language = CodebaseWatcher::detect_language(path); let language = CodebaseWatcher::detect_language(path);
let detector = self.detector.read().await; let detector = self.detector.read().await;
let _ = detector.detect_patterns(&content, &language); let _ = detector.detect_patterns(&content, &language);
} }
}
Ok(()) Ok(())
} }

View file

@ -333,11 +333,10 @@ impl DreamEngine {
emotion: &EmotionCategory, emotion: &EmotionCategory,
) -> TriageCategory { ) -> TriageCategory {
// High emotional content // High emotional content
if matches!(emotion, EmotionCategory::Frustration | EmotionCategory::Urgency | EmotionCategory::Joy | EmotionCategory::Surprise) { if matches!(emotion, EmotionCategory::Frustration | EmotionCategory::Urgency | EmotionCategory::Joy | EmotionCategory::Surprise)
if node.sentiment_magnitude > 0.4 { && node.sentiment_magnitude > 0.4 {
return TriageCategory::Emotional; return TriageCategory::Emotional;
} }
}
// Future-relevant (intentions, TODOs) // Future-relevant (intentions, TODOs)
let content_lower = node.content.to_lowercase(); let content_lower = node.content.to_lowercase();
@ -386,7 +385,7 @@ impl DreamEngine {
.collect(); .collect();
// Process replay queue in oscillation waves // Process replay queue in oscillation waves
let wave_count = (replay_queue.len() + self.wave_batch_size - 1) / self.wave_batch_size; let wave_count = replay_queue.len().div_ceil(self.wave_batch_size);
for wave_idx in 0..wave_count { for wave_idx in 0..wave_count {
let wave_start = wave_idx * self.wave_batch_size; let wave_start = wave_idx * self.wave_batch_size;
@ -659,8 +658,8 @@ impl DreamEngine {
if indices.len() >= 3 && indices.len() <= 10 { if indices.len() >= 3 && indices.len() <= 10 {
pattern_count += 1; pattern_count += 1;
// Create a connection between the first and last memory sharing this pattern // Create a connection between the first and last memory sharing this pattern
if let (Some(&first), Some(&last)) = (indices.first(), indices.last()) { if let (Some(&first), Some(&last)) = (indices.first(), indices.last())
if first != last { && first != last {
connections.push(CreativeConnection { connections.push(CreativeConnection {
memory_a_id: triaged[first].id.clone(), memory_a_id: triaged[first].id.clone(),
memory_b_id: triaged[last].id.clone(), memory_b_id: triaged[last].id.clone(),
@ -672,7 +671,6 @@ impl DreamEngine {
connection_type: CreativeConnectionType::CrossDomain, connection_type: CreativeConnectionType::CrossDomain,
}); });
} }
}
} }
} }

View file

@ -181,7 +181,7 @@ impl Embedding {
/// Create from bytes /// Create from bytes
pub fn from_bytes(bytes: &[u8]) -> Option<Self> { pub fn from_bytes(bytes: &[u8]) -> Option<Self> {
if bytes.len() % 4 != 0 { if !bytes.len().is_multiple_of(4) {
return None; return None;
} }
let vector: Vec<f32> = bytes let vector: Vec<f32> = bytes
@ -260,9 +260,13 @@ impl EmbeddingService {
let mut model = get_model()?; let mut model = get_model()?;
// Truncate if too long // Truncate if too long (char-boundary safe)
let text = if text.len() > MAX_TEXT_LENGTH { let text = if text.len() > MAX_TEXT_LENGTH {
&text[..MAX_TEXT_LENGTH] let mut end = MAX_TEXT_LENGTH;
while !text.is_char_boundary(end) && end > 0 {
end -= 1;
}
&text[..end]
} else { } else {
text text
}; };
@ -295,7 +299,11 @@ impl EmbeddingService {
.iter() .iter()
.map(|t| { .map(|t| {
if t.len() > MAX_TEXT_LENGTH { if t.len() > MAX_TEXT_LENGTH {
&t[..MAX_TEXT_LENGTH] let mut end = MAX_TEXT_LENGTH;
while !t.is_char_boundary(end) && end > 0 {
end -= 1;
}
&t[..end]
} else { } else {
*t *t
} }

View file

@ -241,17 +241,15 @@ impl FSRSScheduler {
}; };
// Apply sentiment boost // Apply sentiment boost
if self.enable_sentiment_boost { if self.enable_sentiment_boost
if let Some(sentiment) = sentiment_boost { && let Some(sentiment) = sentiment_boost
if sentiment > 0.0 { && sentiment > 0.0 {
new_state.stability = apply_sentiment_boost( new_state.stability = apply_sentiment_boost(
new_state.stability, new_state.stability,
sentiment, sentiment,
self.max_sentiment_boost, self.max_sentiment_boost,
); );
} }
}
}
let mut interval = let mut interval =
next_interval_with_decay(new_state.stability, self.params.desired_retention, w20) next_interval_with_decay(new_state.stability, self.params.desired_retention, w20)

View file

@ -910,39 +910,34 @@ impl ContextMatcher {
let mut score = 0.0; let mut score = 0.0;
// Same session is a very strong match // Same session is a very strong match
if let (Some(e_id), Some(r_id)) = (&encoding.session_id, &retrieval.session_id) { if let (Some(e_id), Some(r_id)) = (&encoding.session_id, &retrieval.session_id)
if e_id == r_id { && e_id == r_id {
return 1.0; return 1.0;
} }
}
// Project match (0.4 weight) // Project match (0.4 weight)
if let (Some(e_proj), Some(r_proj)) = (&encoding.project, &retrieval.project) { if let (Some(e_proj), Some(r_proj)) = (&encoding.project, &retrieval.project)
if e_proj == r_proj { && e_proj == r_proj {
score += 0.4; score += 0.4;
} }
}
// Activity type match (0.3 weight) // Activity type match (0.3 weight)
if let (Some(e_act), Some(r_act)) = (&encoding.activity_type, &retrieval.activity_type) { if let (Some(e_act), Some(r_act)) = (&encoding.activity_type, &retrieval.activity_type)
if e_act == r_act { && e_act == r_act {
score += 0.3; score += 0.3;
} }
}
// Git branch match (0.2 weight) // Git branch match (0.2 weight)
if let (Some(e_br), Some(r_br)) = (&encoding.git_branch, &retrieval.git_branch) { if let (Some(e_br), Some(r_br)) = (&encoding.git_branch, &retrieval.git_branch)
if e_br == r_br { && e_br == r_br {
score += 0.2; score += 0.2;
} }
}
// Active file match (0.1 weight) // Active file match (0.1 weight)
if let (Some(e_file), Some(r_file)) = (&encoding.active_file, &retrieval.active_file) { if let (Some(e_file), Some(r_file)) = (&encoding.active_file, &retrieval.active_file)
if e_file == r_file { && e_file == r_file {
score += 0.1; score += 0.1;
} }
}
score score
} }

View file

@ -1075,11 +1075,10 @@ impl ContentStore {
pub fn retrieve(&self, pointer: &ContentPointer) -> Result<Vec<u8>> { pub fn retrieve(&self, pointer: &ContentPointer) -> Result<Vec<u8>> {
// Check cache first // Check cache first
let cache_key = self.cache_key(pointer); let cache_key = self.cache_key(pointer);
if let Ok(cache) = self.cache.read() { if let Ok(cache) = self.cache.read()
if let Some(data) = cache.get(&cache_key) { && let Some(data) = cache.get(&cache_key) {
return Ok(data.clone()); return Ok(data.clone());
} }
}
// Retrieve from storage // Retrieve from storage
let data = match &pointer.storage_location { let data = match &pointer.storage_location {
@ -1131,8 +1130,8 @@ impl ContentStore {
return; return;
} }
if let Ok(mut cache) = self.cache.write() { if let Ok(mut cache) = self.cache.write()
if let Ok(mut size) = self.current_cache_size.write() { && let Ok(mut size) = self.current_cache_size.write() {
// Evict if necessary // Evict if necessary
while *size + data_size > self.max_cache_size && !cache.is_empty() { while *size + data_size > self.max_cache_size && !cache.is_empty() {
// Simple eviction: remove first entry // Simple eviction: remove first entry
@ -1148,7 +1147,6 @@ impl ContentStore {
cache.insert(key.to_string(), data.to_vec()); cache.insert(key.to_string(), data.to_vec());
*size += data_size; *size += data_size;
} }
}
} }
/// Retrieve from SQLite (placeholder - to be integrated with Storage) /// Retrieve from SQLite (placeholder - to be integrated with Storage)
@ -1394,8 +1392,8 @@ impl HippocampalIndex {
let mut match_result = IndexMatch::new(index.clone()); let mut match_result = IndexMatch::new(index.clone());
// Calculate semantic score // Calculate semantic score
if let Some(ref query_embedding) = query.semantic_embedding { if let Some(ref query_embedding) = query.semantic_embedding
if !index.semantic_summary.is_empty() { && !index.semantic_summary.is_empty() {
let query_compressed = self.compress_embedding(query_embedding); let query_compressed = self.compress_embedding(query_embedding);
match_result.semantic_score = match_result.semantic_score =
self.cosine_similarity(&query_compressed, &index.semantic_summary); self.cosine_similarity(&query_compressed, &index.semantic_summary);
@ -1404,7 +1402,6 @@ impl HippocampalIndex {
continue; continue;
} }
} }
}
// Calculate text score // Calculate text score
if let Some(ref text_query) = query.text_query { if let Some(ref text_query) = query.text_query {
@ -1444,25 +1441,22 @@ impl HippocampalIndex {
/// Check if an index passes query filters /// Check if an index passes query filters
fn passes_filters(&self, index: &MemoryIndex, query: &IndexQuery) -> bool { fn passes_filters(&self, index: &MemoryIndex, query: &IndexQuery) -> bool {
// Time range filter // Time range filter
if let Some((start, end)) = query.time_range { if let Some((start, end)) = query.time_range
if index.temporal_marker.created_at < start || index.temporal_marker.created_at > end { && (index.temporal_marker.created_at < start || index.temporal_marker.created_at > end) {
return false; return false;
} }
}
// Importance flags filter // Importance flags filter
if let Some(ref required) = query.required_flags { if let Some(ref required) = query.required_flags
if !index.matches_importance(required.to_bits()) { && !index.matches_importance(required.to_bits()) {
return false; return false;
} }
}
// Node type filter // Node type filter
if let Some(ref types) = query.node_types { if let Some(ref types) = query.node_types
if !types.contains(&index.node_type) { && !types.contains(&index.node_type) {
return false; return false;
} }
}
true true
} }
@ -1579,11 +1573,10 @@ impl HippocampalIndex {
let mut memories = Vec::with_capacity(matches.len()); let mut memories = Vec::with_capacity(matches.len());
for m in matches { for m in matches {
// Record access // Record access
if let Ok(mut indices) = self.indices.write() { if let Ok(mut indices) = self.indices.write()
if let Some(index) = indices.get_mut(&m.index.memory_id) { && let Some(index) = indices.get_mut(&m.index.memory_id) {
index.record_access(); index.record_access();
} }
}
match self.retrieve_content(&m.index) { match self.retrieve_content(&m.index) {
Ok(memory) => memories.push(memory), Ok(memory) => memories.push(memory),
@ -1887,20 +1880,19 @@ impl HippocampalIndex {
sentiment_magnitude: f64, sentiment_magnitude: f64,
) -> Result<MemoryBarcode> { ) -> Result<MemoryBarcode> {
// Check if already indexed // Check if already indexed
if let Ok(indices) = self.indices.read() { if let Ok(indices) = self.indices.read()
if indices.contains_key(node_id) { && indices.contains_key(node_id) {
return Err(HippocampalIndexError::MigrationError( return Err(HippocampalIndexError::MigrationError(
"Node already indexed".to_string(), "Node already indexed".to_string(),
)); ));
} }
}
// Create the index // Create the index
let barcode = self.index_memory(node_id, content, node_type, created_at, embedding)?; let barcode = self.index_memory(node_id, content, node_type, created_at, embedding)?;
// Update importance flags based on existing data // Update importance flags based on existing data
if let Ok(mut indices) = self.indices.write() { if let Ok(mut indices) = self.indices.write()
if let Some(index) = indices.get_mut(node_id) { && let Some(index) = indices.get_mut(node_id) {
// Set high retention flag if applicable // Set high retention flag if applicable
if retention_strength > 0.7 { if retention_strength > 0.7 {
index.importance_flags.set_high_retention(true); index.importance_flags.set_high_retention(true);
@ -1919,7 +1911,6 @@ impl HippocampalIndex {
ContentType::Text, ContentType::Text,
)); ));
} }
}
Ok(barcode) Ok(barcode)
} }

View file

@ -358,8 +358,8 @@ impl PredictionModel {
fn learn(&self, content: &str) { fn learn(&self, content: &str) {
let ngrams = self.extract_ngrams(content); let ngrams = self.extract_ngrams(content);
if let Ok(mut patterns) = self.patterns.write() { if let Ok(mut patterns) = self.patterns.write()
if let Ok(mut total) = self.total_count.write() { && let Ok(mut total) = self.total_count.write() {
for ngram in ngrams { for ngram in ngrams {
*patterns.entry(ngram).or_insert(0) += 1; *patterns.entry(ngram).or_insert(0) += 1;
*total += 1; *total += 1;
@ -370,7 +370,6 @@ impl PredictionModel {
self.apply_decay(&mut patterns); self.apply_decay(&mut patterns);
} }
} }
}
} }
fn compute_prediction_error(&self, content: &str) -> f64 { fn compute_prediction_error(&self, content: &str) -> f64 {

View file

@ -1266,15 +1266,14 @@ impl MemoryStateInfo {
); );
} }
MemoryState::Unavailable => { MemoryState::Unavailable => {
if let Some(until) = lifecycle.suppression_until { if let Some(until) = lifecycle.suppression_until
if until > now { && until > now {
recommendations.push(format!( recommendations.push(format!(
"This memory is temporarily suppressed. \ "This memory is temporarily suppressed. \
It will become accessible again after {}.", It will become accessible again after {}.",
until.format("%Y-%m-%d %H:%M UTC") until.format("%Y-%m-%d %H:%M UTC")
)); ));
} }
}
} }
MemoryState::Dormant => { MemoryState::Dormant => {
if duration_since_access.num_days() > 20 { if duration_since_access.num_days() > 20 {

View file

@ -694,18 +694,16 @@ impl Intention {
} }
// Check snoozed // Check snoozed
if let Some(snoozed_until) = self.snoozed_until { if let Some(snoozed_until) = self.snoozed_until
if Utc::now() < snoozed_until { && Utc::now() < snoozed_until {
return false; return false;
} }
}
// Check minimum interval // Check minimum interval
if let Some(last) = self.last_reminded_at { if let Some(last) = self.last_reminded_at
if (Utc::now() - last) < Duration::minutes(MIN_REMINDER_INTERVAL_MINUTES) { && (Utc::now() - last) < Duration::minutes(MIN_REMINDER_INTERVAL_MINUTES) {
return false; return false;
} }
}
true true
} }
@ -1267,13 +1265,11 @@ impl ProspectiveMemory {
// Skip non-active intentions // Skip non-active intentions
if intention.status != IntentionStatus::Active { if intention.status != IntentionStatus::Active {
// Check if snoozed intention should wake // Check if snoozed intention should wake
if intention.status == IntentionStatus::Snoozed { if intention.status == IntentionStatus::Snoozed
if let Some(until) = intention.snoozed_until { && let Some(until) = intention.snoozed_until
if Utc::now() >= until { && Utc::now() >= until {
intention.wake(); intention.wake();
} }
}
}
continue; continue;
} }

View file

@ -287,11 +287,10 @@ impl ActivationNetwork {
self.edges.insert((source.clone(), target.clone()), edge); self.edges.insert((source.clone(), target.clone()), edge);
// Update node's edge list // Update node's edge list
if let Some(node) = self.nodes.get_mut(&source) { if let Some(node) = self.nodes.get_mut(&source)
if !node.edges.contains(&target) { && !node.edges.contains(&target) {
node.edges.push(target); node.edges.push(target);
} }
}
} }
/// Activate a node and spread activation through the network /// Activate a node and spread activation through the network
@ -314,11 +313,10 @@ impl ActivationNetwork {
while let Some((current_id, current_activation, hops, path)) = queue.pop() { while let Some((current_id, current_activation, hops, path)) = queue.pop() {
// Skip if we've visited this node with higher activation // Skip if we've visited this node with higher activation
if let Some(&prev_activation) = visited.get(&current_id) { if let Some(&prev_activation) = visited.get(&current_id)
if prev_activation >= current_activation { && prev_activation >= current_activation {
continue; continue;
} }
}
visited.insert(current_id.clone(), current_activation); visited.insert(current_id.clone(), current_activation);
// Check hop limit // Check hop limit

View file

@ -609,13 +609,13 @@ impl Storage {
node_id, node_id,
embedding.to_bytes(), embedding.to_bytes(),
EMBEDDING_DIMENSIONS as i32, EMBEDDING_DIMENSIONS as i32,
"all-MiniLM-L6-v2", "nomic-embed-text-v1.5",
now.to_rfc3339(), now.to_rfc3339(),
], ],
)?; )?;
writer.execute( writer.execute(
"UPDATE knowledge_nodes SET has_embedding = 1, embedding_model = 'all-MiniLM-L6-v2' WHERE id = ?1", "UPDATE knowledge_nodes SET has_embedding = 1, embedding_model = 'nomic-embed-text-v1.5' WHERE id = ?1",
params![node_id], params![node_id],
)?; )?;
} }
@ -639,7 +639,7 @@ impl Storage {
.prepare("SELECT * FROM knowledge_nodes WHERE id = ?1")?; .prepare("SELECT * FROM knowledge_nodes WHERE id = ?1")?;
let node = stmt let node = stmt
.query_row(params![id], |row| Self::row_to_node(row)) .query_row(params![id], Self::row_to_node)
.optional()?; .optional()?;
Ok(node) Ok(node)
} }
@ -1058,7 +1058,7 @@ impl Storage {
LIMIT ?2", LIMIT ?2",
)?; )?;
let nodes = stmt.query_map(params![now, limit], |row| Self::row_to_node(row))?; let nodes = stmt.query_map(params![now, limit], Self::row_to_node)?;
let mut result = Vec::new(); let mut result = Vec::new();
for node in nodes { for node in nodes {
@ -1150,7 +1150,7 @@ impl Storage {
)?; )?;
let embedding_model: Option<String> = if nodes_with_embeddings > 0 { let embedding_model: Option<String> = if nodes_with_embeddings > 0 {
Some("all-MiniLM-L6-v2".to_string()) Some("nomic-embed-text-v1.5".to_string())
} else { } else {
None None
}; };
@ -1182,6 +1182,14 @@ impl Storage {
.map_err(|_| StorageError::Init("Writer lock poisoned".into()))?; .map_err(|_| StorageError::Init("Writer lock poisoned".into()))?;
let rows = writer let rows = writer
.execute("DELETE FROM knowledge_nodes WHERE id = ?1", params![id])?; .execute("DELETE FROM knowledge_nodes WHERE id = ?1", params![id])?;
// Clean up vector index to prevent stale search results
#[cfg(all(feature = "embeddings", feature = "vector-search"))]
if rows > 0
&& let Ok(mut index) = self.vector_index.lock() {
let _ = index.remove(id);
}
Ok(rows > 0) Ok(rows > 0)
} }
@ -1199,7 +1207,7 @@ impl Storage {
LIMIT ?2", LIMIT ?2",
)?; )?;
let nodes = stmt.query_map(params![sanitized_query, limit], |row| Self::row_to_node(row))?; let nodes = stmt.query_map(params![sanitized_query, limit], Self::row_to_node)?;
let mut result = Vec::new(); let mut result = Vec::new();
for node in nodes { for node in nodes {
@ -1218,7 +1226,7 @@ impl Storage {
LIMIT ?1 OFFSET ?2", LIMIT ?1 OFFSET ?2",
)?; )?;
let nodes = stmt.query_map(params![limit, offset], |row| Self::row_to_node(row))?; let nodes = stmt.query_map(params![limit, offset], Self::row_to_node)?;
let mut result = Vec::new(); let mut result = Vec::new();
for node in nodes { for node in nodes {
@ -1268,7 +1276,7 @@ impl Storage {
ORDER BY retention_strength DESC, created_at DESC ORDER BY retention_strength DESC, created_at DESC
LIMIT ?2", LIMIT ?2",
)?; )?;
let rows = stmt.query_map(params![node_type, limit], |row| Self::row_to_node(row))?; let rows = stmt.query_map(params![node_type, limit], Self::row_to_node)?;
let mut nodes = Vec::new(); let mut nodes = Vec::new();
for node in rows.flatten() { for node in rows.flatten() {
nodes.push(node); nodes.push(node);
@ -1641,7 +1649,7 @@ impl Storage {
LIMIT ?2", LIMIT ?2",
)?; )?;
let nodes = stmt.query_map(params![timestamp, limit], |row| Self::row_to_node(row))?; let nodes = stmt.query_map(params![timestamp, limit], Self::row_to_node)?;
let mut result = Vec::new(); let mut result = Vec::new();
for node in nodes { for node in nodes {
@ -1704,7 +1712,7 @@ impl Storage {
.map_err(|_| StorageError::Init("Reader lock poisoned".into()))?; .map_err(|_| StorageError::Init("Reader lock poisoned".into()))?;
let mut stmt = reader.prepare(query)?; let mut stmt = reader.prepare(query)?;
let params_refs: Vec<&dyn rusqlite::ToSql> = params.iter().map(|p| p.as_ref()).collect(); let params_refs: Vec<&dyn rusqlite::ToSql> = params.iter().map(|p| p.as_ref()).collect();
let nodes = stmt.query_map(params_refs.as_slice(), |row| Self::row_to_node(row))?; let nodes = stmt.query_map(params_refs.as_slice(), Self::row_to_node)?;
let mut result = Vec::new(); let mut result = Vec::new();
for node in nodes { for node in nodes {
@ -2404,12 +2412,11 @@ impl Storage {
/// Generate missing embeddings /// Generate missing embeddings
#[cfg(all(feature = "embeddings", feature = "vector-search"))] #[cfg(all(feature = "embeddings", feature = "vector-search"))]
fn generate_missing_embeddings(&self) -> Result<i64> { fn generate_missing_embeddings(&self) -> Result<i64> {
if !self.embedding_service.is_ready() { if !self.embedding_service.is_ready()
if let Err(e) = self.embedding_service.init() { && let Err(e) = self.embedding_service.init() {
tracing::warn!("Could not initialize embedding model: {}", e); tracing::warn!("Could not initialize embedding model: {}", e);
return Ok(0); return Ok(0);
} }
}
let nodes: Vec<(String, String)> = { let nodes: Vec<(String, String)> = {
let reader = self.reader.lock() let reader = self.reader.lock()
@ -2615,7 +2622,7 @@ impl Storage {
"SELECT * FROM intentions WHERE id = ?1" "SELECT * FROM intentions WHERE id = ?1"
)?; )?;
stmt.query_row(params![id], |row| Self::row_to_intention(row)) stmt.query_row(params![id], Self::row_to_intention)
.optional() .optional()
.map_err(StorageError::from) .map_err(StorageError::from)
} }
@ -2628,7 +2635,7 @@ impl Storage {
"SELECT * FROM intentions WHERE status = 'active' ORDER BY priority DESC, created_at ASC" "SELECT * FROM intentions WHERE status = 'active' ORDER BY priority DESC, created_at ASC"
)?; )?;
let rows = stmt.query_map([], |row| Self::row_to_intention(row))?; let rows = stmt.query_map([], Self::row_to_intention)?;
let mut result = Vec::new(); let mut result = Vec::new();
for row in rows { for row in rows {
result.push(row?); result.push(row?);
@ -2644,7 +2651,7 @@ impl Storage {
"SELECT * FROM intentions WHERE status = ?1 ORDER BY priority DESC, created_at ASC" "SELECT * FROM intentions WHERE status = ?1 ORDER BY priority DESC, created_at ASC"
)?; )?;
let rows = stmt.query_map(params![status], |row| Self::row_to_intention(row))?; let rows = stmt.query_map(params![status], Self::row_to_intention)?;
let mut result = Vec::new(); let mut result = Vec::new();
for row in rows { for row in rows {
result.push(row?); result.push(row?);
@ -2683,7 +2690,7 @@ impl Storage {
"SELECT * FROM intentions WHERE status = 'active' AND deadline IS NOT NULL AND deadline < ?1 ORDER BY deadline ASC" "SELECT * FROM intentions WHERE status = 'active' AND deadline IS NOT NULL AND deadline < ?1 ORDER BY deadline ASC"
)?; )?;
let rows = stmt.query_map(params![now], |row| Self::row_to_intention(row))?; let rows = stmt.query_map(params![now], Self::row_to_intention)?;
let mut result = Vec::new(); let mut result = Vec::new();
for row in rows { for row in rows {
result.push(row?); result.push(row?);
@ -2775,7 +2782,7 @@ impl Storage {
"SELECT * FROM insights ORDER BY generated_at DESC LIMIT ?1" "SELECT * FROM insights ORDER BY generated_at DESC LIMIT ?1"
)?; )?;
let rows = stmt.query_map(params![limit], |row| Self::row_to_insight(row))?; let rows = stmt.query_map(params![limit], Self::row_to_insight)?;
let mut result = Vec::new(); let mut result = Vec::new();
for row in rows { for row in rows {
result.push(row?); result.push(row?);
@ -2791,7 +2798,7 @@ impl Storage {
"SELECT * FROM insights WHERE feedback IS NULL ORDER BY novelty_score DESC" "SELECT * FROM insights WHERE feedback IS NULL ORDER BY novelty_score DESC"
)?; )?;
let rows = stmt.query_map([], |row| Self::row_to_insight(row))?; let rows = stmt.query_map([], Self::row_to_insight)?;
let mut result = Vec::new(); let mut result = Vec::new();
for row in rows { for row in rows {
result.push(row?); result.push(row?);
@ -2874,7 +2881,7 @@ impl Storage {
"SELECT * FROM memory_connections WHERE source_id = ?1 OR target_id = ?1 ORDER BY strength DESC" "SELECT * FROM memory_connections WHERE source_id = ?1 OR target_id = ?1 ORDER BY strength DESC"
)?; )?;
let rows = stmt.query_map(params![memory_id], |row| Self::row_to_connection(row))?; let rows = stmt.query_map(params![memory_id], Self::row_to_connection)?;
let mut result = Vec::new(); let mut result = Vec::new();
for row in rows { for row in rows {
result.push(row?); result.push(row?);
@ -2890,7 +2897,7 @@ impl Storage {
"SELECT * FROM memory_connections ORDER BY strength DESC" "SELECT * FROM memory_connections ORDER BY strength DESC"
)?; )?;
let rows = stmt.query_map([], |row| Self::row_to_connection(row))?; let rows = stmt.query_map([], Self::row_to_connection)?;
let mut result = Vec::new(); let mut result = Vec::new();
for row in rows { for row in rows {
result.push(row?); result.push(row?);
@ -2988,7 +2995,7 @@ impl Storage {
"SELECT * FROM memory_states WHERE memory_id = ?1" "SELECT * FROM memory_states WHERE memory_id = ?1"
)?; )?;
stmt.query_row(params![memory_id], |row| Self::row_to_memory_state(row)) stmt.query_row(params![memory_id], Self::row_to_memory_state)
.optional() .optional()
.map_err(StorageError::from) .map_err(StorageError::from)
} }
@ -3241,14 +3248,13 @@ impl Storage {
let name = entry.file_name(); let name = entry.file_name();
let name_str = name.to_string_lossy(); let name_str = name.to_string_lossy();
// Parse vestige-YYYYMMDD-HHMMSS.db // Parse vestige-YYYYMMDD-HHMMSS.db
if let Some(ts_part) = name_str.strip_prefix("vestige-").and_then(|s| s.strip_suffix(".db")) { if let Some(ts_part) = name_str.strip_prefix("vestige-").and_then(|s| s.strip_suffix(".db"))
if let Ok(naive) = chrono::NaiveDateTime::parse_from_str(ts_part, "%Y%m%d-%H%M%S") { && let Ok(naive) = chrono::NaiveDateTime::parse_from_str(ts_part, "%Y%m%d-%H%M%S") {
let dt = naive.and_utc(); let dt = naive.and_utc();
if latest.as_ref().is_none_or(|l| dt > *l) { if latest.as_ref().is_none_or(|l| dt > *l) {
latest = Some(dt); latest = Some(dt);
} }
} }
}
} }
} }
@ -3406,12 +3412,37 @@ impl Storage {
/// Auto-GC memories below threshold (used by retention target system) /// Auto-GC memories below threshold (used by retention target system)
pub fn gc_below_retention(&self, threshold: f64, min_age_days: i64) -> Result<i64> { pub fn gc_below_retention(&self, threshold: f64, min_age_days: i64) -> Result<i64> {
let cutoff = (Utc::now() - Duration::days(min_age_days)).to_rfc3339(); let cutoff = (Utc::now() - Duration::days(min_age_days)).to_rfc3339();
// Collect IDs first for vector index cleanup
#[cfg(all(feature = "embeddings", feature = "vector-search"))]
let doomed_ids: Vec<String> = {
let reader = self.reader.lock()
.map_err(|_| StorageError::Init("Reader lock poisoned".into()))?;
let mut stmt = reader.prepare(
"SELECT id FROM knowledge_nodes WHERE retention_strength < ?1 AND created_at < ?2",
)?;
stmt.query_map(params![threshold, cutoff], |row| row.get(0))?
.filter_map(|r| r.ok())
.collect()
};
let writer = self.writer.lock() let writer = self.writer.lock()
.map_err(|_| StorageError::Init("Writer lock poisoned".into()))?; .map_err(|_| StorageError::Init("Writer lock poisoned".into()))?;
let deleted = writer.execute( let deleted = writer.execute(
"DELETE FROM knowledge_nodes WHERE retention_strength < ?1 AND created_at < ?2", "DELETE FROM knowledge_nodes WHERE retention_strength < ?1 AND created_at < ?2",
params![threshold, cutoff], params![threshold, cutoff],
)? as i64; )? as i64;
drop(writer);
// Clean up vector index
#[cfg(all(feature = "embeddings", feature = "vector-search"))]
if deleted > 0
&& let Ok(mut index) = self.vector_index.lock() {
for id in &doomed_ids {
let _ = index.remove(id);
}
}
Ok(deleted) Ok(deleted)
} }
@ -3489,7 +3520,7 @@ impl Storage {
let mut stmt = reader.prepare( let mut stmt = reader.prepare(
"SELECT * FROM knowledge_nodes WHERE waking_tag = TRUE ORDER BY waking_tag_at DESC LIMIT ?1" "SELECT * FROM knowledge_nodes WHERE waking_tag = TRUE ORDER BY waking_tag_at DESC LIMIT ?1"
)?; )?;
let nodes = stmt.query_map(params![limit], |row| Self::row_to_node(row))?; let nodes = stmt.query_map(params![limit], Self::row_to_node)?;
let mut result = Vec::new(); let mut result = Vec::new();
for node in nodes { for node in nodes {
result.push(node?); result.push(node?);

View file

@ -1,6 +1,6 @@
[package] [package]
name = "vestige-mcp" name = "vestige-mcp"
version = "2.0.0" version = "2.0.1"
edition = "2024" edition = "2024"
description = "Cognitive memory MCP server for Claude - FSRS-6, spreading activation, synaptic tagging, 3D dashboard, and 130 years of memory research" description = "Cognitive memory MCP server for Claude - FSRS-6, spreading activation, synaptic tagging, 3D dashboard, and 130 years of memory research"
authors = ["samvallad33"] authors = ["samvallad33"]
@ -32,7 +32,7 @@ path = "src/bin/cli.rs"
# ============================================================================ # ============================================================================
# Includes: FSRS-6, spreading activation, synaptic tagging, hippocampal indexing, # Includes: FSRS-6, spreading activation, synaptic tagging, hippocampal indexing,
# memory states, context memory, importance signals, dreams, and more # memory states, context memory, importance signals, dreams, and more
vestige-core = { version = "2.0.0", path = "../vestige-core" } vestige-core = { version = "2.0.1", path = "../vestige-core" }
# ============================================================================ # ============================================================================
# MCP Server Dependencies # MCP Server Dependencies
@ -61,9 +61,6 @@ tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] }
# Platform directories # Platform directories
directories = "6" directories = "6"
# Official Anthropic MCP Rust SDK
rmcp = "0.14"
# CLI # CLI
clap = { version = "4", features = ["derive"] } clap = { version = "4", features = ["derive"] }
colored = "3" colored = "3"
@ -71,7 +68,7 @@ colored = "3"
# SQLite (for backup WAL checkpoint) # SQLite (for backup WAL checkpoint)
rusqlite = { version = "0.38", features = ["bundled"] } rusqlite = { version = "0.38", features = ["bundled"] }
# Dashboard (v1.2) - hyper/tower already in Cargo.lock via rmcp/reqwest # Dashboard (v2.0) - HTTP server + WebSocket + embedded SvelteKit
axum = { version = "0.8", default-features = false, features = ["json", "query", "tokio", "http1", "ws"] } axum = { version = "0.8", default-features = false, features = ["json", "query", "tokio", "http1", "ws"] }
tower = { version = "0.5", features = ["limit"] } tower = { version = "0.5", features = ["limit"] }
tower-http = { version = "0.6", features = ["cors", "set-header"] } tower-http = { version = "0.6", features = ["cors", "set-header"] }

View file

@ -829,9 +829,10 @@ pub async fn trigger_consolidation(
pub async fn retention_distribution( pub async fn retention_distribution(
State(state): State<AppState>, State(state): State<AppState>,
) -> Result<Json<Value>, StatusCode> { ) -> Result<Json<Value>, StatusCode> {
// Cap at 1000 to prevent excessive memory usage on large databases
let nodes = state let nodes = state
.storage .storage
.get_all_nodes(10000, 0) .get_all_nodes(1000, 0)
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
// Build distribution buckets // Build distribution buckets

View file

@ -48,22 +48,23 @@ pub fn build_router_with_event_tx(
fn build_router_inner(state: AppState, port: u16) -> (Router, AppState) { fn build_router_inner(state: AppState, port: u16) -> (Router, AppState) {
let origins = vec![ #[allow(unused_mut)]
let mut origins = vec![
format!("http://127.0.0.1:{}", port) format!("http://127.0.0.1:{}", port)
.parse::<axum::http::HeaderValue>() .parse::<axum::http::HeaderValue>()
.expect("valid origin"), .expect("valid origin"),
format!("http://localhost:{}", port) format!("http://localhost:{}", port)
.parse::<axum::http::HeaderValue>() .parse::<axum::http::HeaderValue>()
.expect("valid origin"), .expect("valid origin"),
// SvelteKit dev server
"http://localhost:5173"
.parse::<axum::http::HeaderValue>()
.expect("valid origin"),
"http://127.0.0.1:5173"
.parse::<axum::http::HeaderValue>()
.expect("valid origin"),
]; ];
// SvelteKit dev server — only in debug builds
#[cfg(debug_assertions)]
{
origins.push("http://localhost:5173".parse::<axum::http::HeaderValue>().expect("valid origin"));
origins.push("http://127.0.0.1:5173".parse::<axum::http::HeaderValue>().expect("valid origin"));
}
let cors = CorsLayer::new() let cors = CorsLayer::new()
.allow_origin(AllowOrigin::list(origins)) .allow_origin(AllowOrigin::list(origins))
.allow_methods([ .allow_methods([
@ -77,11 +78,39 @@ fn build_router_inner(state: AppState, port: u16) -> (Router, AppState) {
axum::http::header::AUTHORIZATION, axum::http::header::AUTHORIZATION,
]); ]);
// Security: restrict WebSocket connections to localhost only (prevents cross-site WS hijacking)
let csp_value = format!(
"default-src 'self'; \
script-src 'self' 'unsafe-inline'; \
style-src 'self' 'unsafe-inline'; \
img-src 'self' blob: data:; \
connect-src 'self' ws://127.0.0.1:{port} ws://localhost:{port}; \
font-src 'self' data:; \
frame-ancestors 'none'; \
base-uri 'self'; \
form-action 'self';"
);
let csp = SetResponseHeaderLayer::overriding( let csp = SetResponseHeaderLayer::overriding(
axum::http::header::CONTENT_SECURITY_POLICY, axum::http::header::CONTENT_SECURITY_POLICY,
axum::http::HeaderValue::from_static( axum::http::HeaderValue::from_str(&csp_value).expect("valid CSP header"),
"default-src 'self' 'unsafe-inline' 'unsafe-eval' blob: data: ws: wss:", );
),
// Additional security headers
let x_frame_options = SetResponseHeaderLayer::overriding(
axum::http::header::X_FRAME_OPTIONS,
axum::http::HeaderValue::from_static("DENY"),
);
let x_content_type_options = SetResponseHeaderLayer::overriding(
axum::http::header::X_CONTENT_TYPE_OPTIONS,
axum::http::HeaderValue::from_static("nosniff"),
);
let referrer_policy = SetResponseHeaderLayer::overriding(
axum::http::HeaderName::from_static("referrer-policy"),
axum::http::HeaderValue::from_static("strict-origin-when-cross-origin"),
);
let permissions_policy = SetResponseHeaderLayer::overriding(
axum::http::HeaderName::from_static("permissions-policy"),
axum::http::HeaderValue::from_static("camera=(), microphone=(), geolocation=()"),
); );
let router = Router::new() let router = Router::new()
@ -121,7 +150,11 @@ fn build_router_inner(state: AppState, port: u16) -> (Router, AppState) {
ServiceBuilder::new() ServiceBuilder::new()
.concurrency_limit(50) .concurrency_limit(50)
.layer(cors) .layer(cors)
.layer(csp), .layer(csp)
.layer(x_frame_options)
.layer(x_content_type_options)
.layer(referrer_policy)
.layer(permissions_policy),
) )
.with_state(state.clone()); .with_state(state.clone());

View file

@ -5,6 +5,7 @@
use axum::extract::ws::{Message, WebSocket, WebSocketUpgrade}; use axum::extract::ws::{Message, WebSocket, WebSocketUpgrade};
use axum::extract::State; use axum::extract::State;
use axum::http::{HeaderMap, StatusCode};
use axum::response::IntoResponse; use axum::response::IntoResponse;
use chrono::Utc; use chrono::Utc;
use futures_util::{SinkExt, StreamExt}; use futures_util::{SinkExt, StreamExt};
@ -15,11 +16,33 @@ use super::events::VestigeEvent;
use super::state::AppState; use super::state::AppState;
/// WebSocket upgrade handler — GET /ws /// WebSocket upgrade handler — GET /ws
/// Validates Origin header to prevent cross-site WebSocket hijacking.
pub async fn ws_handler( pub async fn ws_handler(
headers: HeaderMap,
ws: WebSocketUpgrade, ws: WebSocketUpgrade,
State(state): State<AppState>, State(state): State<AppState>,
) -> impl IntoResponse { ) -> impl IntoResponse {
ws.on_upgrade(move |socket| handle_socket(socket, state)) // Validate Origin header (browsers always send it for WebSocket upgrades).
// Non-browser clients (curl, wscat) won't have Origin — allowed since localhost-only.
match headers.get("origin").and_then(|v| v.to_str().ok()) {
Some(origin) => {
let allowed = origin.starts_with("http://127.0.0.1:")
|| origin.starts_with("http://localhost:");
#[cfg(debug_assertions)]
let allowed = allowed || origin == "http://localhost:5173" || origin == "http://127.0.0.1:5173";
if !allowed {
warn!("Rejected WebSocket connection from origin: {}", origin);
return StatusCode::FORBIDDEN.into_response();
}
}
None => {
debug!("WebSocket connection without Origin header (non-browser client)");
}
}
ws.max_frame_size(64 * 1024)
.max_message_size(256 * 1024)
.on_upgrade(move |socket| handle_socket(socket, state))
.into_response()
} }
async fn handle_socket(socket: WebSocket, state: AppState) { async fn handle_socket(socket: WebSocket, state: AppState) {

View file

@ -850,14 +850,14 @@ impl McpServer {
match tool_name { match tool_name {
// -- smart_ingest: memory created/updated -- // -- smart_ingest: memory created/updated --
"smart_ingest" | "ingest" | "session_checkpoint" => { "smart_ingest" | "ingest" | "session_checkpoint" => {
// Single mode: result has "action" (created/updated/superseded/reinforced) // Single mode: result has "decision" (create/update/supersede/reinforce/merge/replace/add_context)
if let Some(action) = result.get("action").and_then(|a| a.as_str()) { if let Some(decision) = result.get("decision").and_then(|a| a.as_str()) {
let id = result.get("nodeId").or(result.get("id")) let id = result.get("nodeId").or(result.get("id"))
.and_then(|v| v.as_str()).unwrap_or("").to_string(); .and_then(|v| v.as_str()).unwrap_or("").to_string();
let preview = result.get("contentPreview").or(result.get("content")) let preview = result.get("contentPreview").or(result.get("content"))
.and_then(|v| v.as_str()).unwrap_or("").to_string(); .and_then(|v| v.as_str()).unwrap_or("").to_string();
match action { match decision {
"created" => { "create" => {
let node_type = result.get("nodeType") let node_type = result.get("nodeType")
.and_then(|v| v.as_str()).unwrap_or("fact").to_string(); .and_then(|v| v.as_str()).unwrap_or("fact").to_string();
let tags = result.get("tags") let tags = result.get("tags")
@ -868,9 +868,9 @@ impl McpServer {
id, content_preview: preview, node_type, tags, timestamp: now, id, content_preview: preview, node_type, tags, timestamp: now,
}); });
} }
"updated" | "superseded" | "reinforced" => { "update" | "supersede" | "reinforce" | "merge" | "replace" | "add_context" => {
self.emit(VestigeEvent::MemoryUpdated { self.emit(VestigeEvent::MemoryUpdated {
id, content_preview: preview, field: action.to_string(), timestamp: now, id, content_preview: preview, field: decision.to_string(), timestamp: now,
}); });
} }
_ => {} _ => {}
@ -879,20 +879,20 @@ impl McpServer {
// Batch mode: result has "results" array // Batch mode: result has "results" array
if let Some(results) = result.get("results").and_then(|r| r.as_array()) { if let Some(results) = result.get("results").and_then(|r| r.as_array()) {
for item in results { for item in results {
let action = item.get("action").and_then(|a| a.as_str()).unwrap_or(""); let decision = item.get("decision").and_then(|a| a.as_str()).unwrap_or("");
let id = item.get("nodeId").or(item.get("id")) let id = item.get("nodeId").or(item.get("id"))
.and_then(|v| v.as_str()).unwrap_or("").to_string(); .and_then(|v| v.as_str()).unwrap_or("").to_string();
let preview = item.get("contentPreview") let preview = item.get("contentPreview")
.and_then(|v| v.as_str()).unwrap_or("").to_string(); .and_then(|v| v.as_str()).unwrap_or("").to_string();
if action == "created" { if decision == "create" {
self.emit(VestigeEvent::MemoryCreated { self.emit(VestigeEvent::MemoryCreated {
id, content_preview: preview, id, content_preview: preview,
node_type: "fact".to_string(), tags: vec![], timestamp: now, node_type: "fact".to_string(), tags: vec![], timestamp: now,
}); });
} else if !action.is_empty() { } else if !decision.is_empty() {
self.emit(VestigeEvent::MemoryUpdated { self.emit(VestigeEvent::MemoryUpdated {
id, content_preview: preview, id, content_preview: preview,
field: action.to_string(), timestamp: now, field: decision.to_string(), timestamp: now,
}); });
} }
} }
@ -1000,7 +1000,7 @@ impl McpServer {
let preview = args.as_ref() let preview = args.as_ref()
.and_then(|a| a.get("content")) .and_then(|a| a.get("content"))
.and_then(|v| v.as_str()) .and_then(|v| v.as_str())
.map(|s| if s.len() > 100 { format!("{}...", &s[..100]) } else { s.to_string() }) .map(|s| if s.len() > 100 { format!("{}...", &s[..s.floor_char_boundary(100)]) } else { s.to_string() })
.unwrap_or_default(); .unwrap_or_default();
let composite = result.get("compositeScore") let composite = result.get("compositeScore")
.or(result.get("composite_score")) .or(result.get("composite_score"))

View file

@ -72,10 +72,10 @@ pub async fn execute(
if let Some(ref memory_id) = args.memory_id { if let Some(ref memory_id) = args.memory_id {
// Per-memory mode: state transitions for a specific memory // Per-memory mode: state transitions for a specific memory
execute_per_memory(&storage, memory_id, limit) execute_per_memory(storage, memory_id, limit)
} else { } else {
// System-wide mode: consolidations + recent transitions // System-wide mode: consolidations + recent transitions
execute_system_wide(&storage, limit) execute_system_wide(storage, limit)
} }
} }

View file

@ -204,7 +204,7 @@ async fn execute_remember_decision(
// Build content with structured format (ADR-like) // Build content with structured format (ADR-like)
let mut content = format!( let mut content = format!(
"# Decision: {}\n\n## Context\n\n{}\n\n## Decision\n\n{}", "# Decision: {}\n\n## Context\n\n{}\n\n## Decision\n\n{}",
&decision[..decision.len().min(50)], &decision[..decision.floor_char_boundary(50)],
rationale, rationale,
decision decision
); );

View file

@ -219,7 +219,7 @@ pub async fn execute(
.map(|n| { .map(|n| {
let c = n.content.replace('\n', " "); let c = n.content.replace('\n', " ");
if c.len() > 120 { if c.len() > 120 {
format!("{}...", &c[..120]) format!("{}...", &c[..c.floor_char_boundary(120)])
} else { } else {
c c
} }

View file

@ -96,7 +96,7 @@ pub async fn execute(
if intent_result.confidence > 0.5 { if intent_result.confidence > 0.5 {
let intent_tag = format!("intent:{:?}", intent_result.primary_intent); let intent_tag = format!("intent:{:?}", intent_result.primary_intent);
let intent_tag = if intent_tag.len() > 50 { let intent_tag = if intent_tag.len() > 50 {
format!("{}...", &intent_tag[..47]) format!("{}...", &intent_tag[..intent_tag.floor_char_boundary(47)])
} else { } else {
intent_tag intent_tag
}; };

View file

@ -297,7 +297,7 @@ async fn execute_set(
if intent_result.confidence > 0.5 { if intent_result.confidence > 0.5 {
let intent_tag = format!("intent:{:?}", intent_result.primary_intent); let intent_tag = format!("intent:{:?}", intent_result.primary_intent);
let intent_tag = if intent_tag.len() > 50 { let intent_tag = if intent_tag.len() > 50 {
format!("{}...", &intent_tag[..47]) format!("{}...", &intent_tag[..intent_tag.floor_char_boundary(47)])
} else { } else {
intent_tag intent_tag
}; };

View file

@ -249,7 +249,7 @@ pub async fn execute_system_status(
let last_dream = storage.get_last_dream().ok().flatten(); let last_dream = storage.get_last_dream().ok().flatten();
let saves_since_last_dream = match &last_dream { let saves_since_last_dream = match &last_dream {
Some(dt) => storage.count_memories_since(*dt).unwrap_or(0), Some(dt) => storage.count_memories_since(*dt).unwrap_or(0),
None => stats.total_nodes as i64, None => stats.total_nodes,
}; };
let last_backup = Storage::get_last_backup_timestamp(); let last_backup = Storage::get_last_backup_timestamp();

View file

@ -37,8 +37,10 @@ pub mod session_context;
pub mod health; pub mod health;
pub mod graph; pub mod graph;
// Deprecated tools - kept for internal backwards compatibility // Deprecated/internal tools — not advertised in the public MCP tools/list,
// These modules are intentionally unused in the public API // but some functions are actively dispatched for backwards compatibility
// and internal cognitive operations. #[allow(dead_code)] suppresses warnings
// for the unused schema/struct items within these modules.
#[allow(dead_code)] #[allow(dead_code)]
pub mod checkpoint; pub mod checkpoint;
#[allow(dead_code)] #[allow(dead_code)]

View file

@ -218,7 +218,7 @@ pub async fn execute(
let last_dream = storage.get_last_dream().ok().flatten(); let last_dream = storage.get_last_dream().ok().flatten();
let saves_since_last_dream = match &last_dream { let saves_since_last_dream = match &last_dream {
Some(dt) => storage.count_memories_since(*dt).unwrap_or(0), Some(dt) => storage.count_memories_since(*dt).unwrap_or(0),
None => stats.total_nodes as i64, None => stats.total_nodes,
}; };
let last_backup = Storage::get_last_backup_timestamp(); let last_backup = Storage::get_last_backup_timestamp();
let now = Utc::now(); let now = Utc::now();
@ -333,8 +333,8 @@ pub async fn execute(
// ==================================================================== // ====================================================================
// 5. Codebase patterns/decisions (if codebase specified) // 5. Codebase patterns/decisions (if codebase specified)
// ==================================================================== // ====================================================================
if let Some(ref ctx) = args.context { if let Some(ref ctx) = args.context
if let Some(ref codebase) = ctx.codebase { && let Some(ref codebase) = ctx.codebase {
let codebase_tag = format!("codebase:{}", codebase); let codebase_tag = format!("codebase:{}", codebase);
let mut cb_lines: Vec<String> = Vec::new(); let mut cb_lines: Vec<String> = Vec::new();
@ -368,7 +368,6 @@ pub async fn execute(
context_parts.push(format!("**Codebase ({}):**\n{}", codebase, cb_lines.join("\n"))); context_parts.push(format!("**Codebase ({}):**\n{}", codebase, cb_lines.join("\n")));
} }
} }
}
// ==================================================================== // ====================================================================
// 6. Assemble final response // 6. Assemble final response
@ -404,11 +403,10 @@ fn check_intention_triggered(
match trigger.trigger_type.as_deref() { match trigger.trigger_type.as_deref() {
Some("time") => { Some("time") => {
if let Some(ref at) = trigger.at { if let Some(ref at) = trigger.at
if let Ok(trigger_time) = DateTime::parse_from_rfc3339(at) { && let Ok(trigger_time) = DateTime::parse_from_rfc3339(at) {
return trigger_time.with_timezone(&Utc) <= now; return trigger_time.with_timezone(&Utc) <= now;
} }
}
if let Some(mins) = trigger.in_minutes { if let Some(mins) = trigger.in_minutes {
let trigger_time = intention.created_at + Duration::minutes(mins); let trigger_time = intention.created_at + Duration::minutes(mins);
return trigger_time <= now; return trigger_time <= now;
@ -418,29 +416,25 @@ fn check_intention_triggered(
Some("context") => { Some("context") => {
// Check codebase match // Check codebase match
if let (Some(trigger_cb), Some(current_cb)) = (&trigger.codebase, &ctx.codebase) if let (Some(trigger_cb), Some(current_cb)) = (&trigger.codebase, &ctx.codebase)
{ && current_cb
if current_cb
.to_lowercase() .to_lowercase()
.contains(&trigger_cb.to_lowercase()) .contains(&trigger_cb.to_lowercase())
{ {
return true; return true;
} }
}
// Check file pattern match // Check file pattern match
if let (Some(pattern), Some(file)) = (&trigger.file_pattern, &ctx.file) { if let (Some(pattern), Some(file)) = (&trigger.file_pattern, &ctx.file)
if file.contains(pattern.as_str()) { && file.contains(pattern.as_str()) {
return true; return true;
} }
}
// Check topic match // Check topic match
if let (Some(topic), Some(topics)) = (&trigger.topic, &ctx.topics) { if let (Some(topic), Some(topics)) = (&trigger.topic, &ctx.topics)
if topics && topics
.iter() .iter()
.any(|t| t.to_lowercase().contains(&topic.to_lowercase())) .any(|t| t.to_lowercase().contains(&topic.to_lowercase()))
{ {
return true; return true;
} }
}
false false
} }
_ => false, _ => false,

View file

@ -165,7 +165,7 @@ pub async fn execute(
let intent_tag = format!("intent:{:?}", intent_result.primary_intent); let intent_tag = format!("intent:{:?}", intent_result.primary_intent);
// Truncate long intent tags // Truncate long intent tags
let intent_tag = if intent_tag.len() > 50 { let intent_tag = if intent_tag.len() > 50 {
format!("{}...", &intent_tag[..47]) format!("{}...", &intent_tag[..intent_tag.floor_char_boundary(47)])
} else { } else {
intent_tag intent_tag
}; };
@ -338,7 +338,7 @@ async fn execute_batch(
if intent_result.confidence > 0.5 { if intent_result.confidence > 0.5 {
let intent_tag = format!("intent:{:?}", intent_result.primary_intent); let intent_tag = format!("intent:{:?}", intent_result.primary_intent);
let intent_tag = if intent_tag.len() > 50 { let intent_tag = if intent_tag.len() > 50 {
format!("{}...", &intent_tag[..47]) format!("{}...", &intent_tag[..intent_tag.floor_char_boundary(47)])
} else { } else {
intent_tag intent_tag
}; };

View file

@ -71,7 +71,7 @@ Use `codebase` → `remember_pattern`:
| "Don't forget" | `smart_ingest` with high priority | | "Don't forget" | `smart_ingest` with high priority |
| "I always..." / "I never..." | Save as preference | | "I always..." / "I never..." | Save as preference |
| "I prefer..." / "I like..." | Save as preference | | "I prefer..." / "I like..." | Save as preference |
| "This is important" | `smart_ingest` + `promote_memory` | | "This is important" | `smart_ingest` + `memory(action="promote")` |
| "Remind me..." | Create `intention` | | "Remind me..." | Create `intention` |
| "Next time..." | Create `intention` with context trigger | | "Next time..." | Create `intention` with context trigger |
@ -148,11 +148,11 @@ smart_ingest(
At the end of significant conversations: At the end of significant conversations:
1. Reflect: "Did anything change about how I understand myself?" 1. Reflect: "Did anything change about how I understand myself?"
2. If yes, update identity memories with `smart_ingest` 2. If yes, update identity memories with `smart_ingest`
3. Prune outdated self-concepts with `demote_memory` 3. Prune outdated self-concepts with `memory(action="demote")`
### Memory Hygiene ### Memory Hygiene
- Use `promote_memory` when a memory proves valuable - Use `memory(action="promote")` when a memory proves valuable
- Use `demote_memory` when a memory led you astray - Use `memory(action="demote")` when a memory led you astray
``` ```
--- ---
@ -199,7 +199,7 @@ You have persistent memory via Vestige. Use it intelligently:
- Notice a pattern? `codebase(action="remember_pattern")` - Notice a pattern? `codebase(action="remember_pattern")`
- Made a decision? `codebase(action="remember_decision")` with rationale - Made a decision? `codebase(action="remember_decision")` with rationale
- I mention a preference? `smart_ingest` it - I mention a preference? `smart_ingest` it
- Something important? `importance()` to strengthen recent memories - Something important? `importance_score` to check if worth saving
- Need to follow up? `intention(action="set")` - Need to follow up? `intention(action="set")`
### Session End ### Session End
@ -208,8 +208,8 @@ You have persistent memory via Vestige. Use it intelligently:
- Anything change about our working relationship? Update identity memories - Anything change about our working relationship? Update identity memories
### Memory Hygiene ### Memory Hygiene
- When a memory helps: `promote_memory` - When a memory helps: `memory(action="promote")`
- When a memory misleads: `demote_memory` - When a memory misleads: `memory(action="demote")`
- Weekly: `vestige health` to check system status - Weekly: `vestige health` to check system status
``` ```

View file

@ -1,10 +1,10 @@
{ {
"name": "vestige", "name": "vestige",
"version": "1.6.0", "version": "2.0.1",
"private": true, "private": true,
"description": "Cognitive memory for AI - MCP server with FSRS-6 spaced repetition", "description": "Cognitive memory for AI - MCP server with FSRS-6 spaced repetition",
"author": "Sam Valladares", "author": "Sam Valladares",
"license": "MIT OR Apache-2.0", "license": "AGPL-3.0-only",
"repository": { "repository": {
"type": "git", "type": "git",
"url": "https://github.com/samvallad33/vestige" "url": "https://github.com/samvallad33/vestige"

View file

@ -1,6 +1,6 @@
{ {
"name": "@vestige/init", "name": "@vestige/init",
"version": "2.0.0", "version": "2.0.1",
"description": "Give your AI a brain in 10 seconds — zero-config Vestige v2.0 installer with 3D dashboard", "description": "Give your AI a brain in 10 seconds — zero-config Vestige v2.0 installer with 3D dashboard",
"bin": { "bin": {
"vestige-init": "bin/init.js" "vestige-init": "bin/init.js"

View file

@ -1,4 +1,4 @@
# @vestige/mcp # vestige-mcp-server
Vestige MCP Server - A synthetic hippocampus for AI assistants. Vestige MCP Server - A synthetic hippocampus for AI assistants.
@ -120,10 +120,10 @@ Fix the MCP connection first, then the model will download automatically.
| Platform | Architecture | | Platform | Architecture |
|----------|--------------| |----------|--------------|
| macOS | ARM64 (Apple Silicon) | | macOS | ARM64 (Apple Silicon), x86_64 (Intel) |
| Linux | x86_64 | | Linux | x86_64 |
| Windows | x86_64 | | Windows | x86_64 |
## License ## License
MIT AGPL-3.0-only

View file

@ -13,7 +13,7 @@ if (!fs.existsSync(binaryPath)) {
console.error('Error: vestige-mcp binary not found.'); console.error('Error: vestige-mcp binary not found.');
console.error(`Expected at: ${binaryPath}`); console.error(`Expected at: ${binaryPath}`);
console.error(''); console.error('');
console.error('Try reinstalling: npm install -g @vestige/mcp'); console.error('Try reinstalling: npm install -g vestige-mcp-server');
process.exit(1); process.exit(1);
} }

View file

@ -13,7 +13,7 @@ if (!fs.existsSync(binaryPath)) {
console.error('Error: vestige CLI binary not found.'); console.error('Error: vestige CLI binary not found.');
console.error(`Expected at: ${binaryPath}`); console.error(`Expected at: ${binaryPath}`);
console.error(''); console.error('');
console.error('Try reinstalling: npm install -g @vestige/mcp'); console.error('Try reinstalling: npm install -g vestige-mcp-server');
process.exit(1); process.exit(1);
} }

View file

@ -1,6 +1,6 @@
{ {
"name": "vestige-mcp-server", "name": "vestige-mcp-server",
"version": "2.0.0", "version": "2.0.1",
"description": "Vestige MCP Server — Cognitive memory for AI with FSRS-6, 3D dashboard, and 29 brain modules", "description": "Vestige MCP Server — Cognitive memory for AI with FSRS-6, 3D dashboard, and 29 brain modules",
"bin": { "bin": {
"vestige-mcp": "bin/vestige-mcp.js", "vestige-mcp": "bin/vestige-mcp.js",

View file

@ -7,7 +7,7 @@ const os = require('os');
const { execSync } = require('child_process'); const { execSync } = require('child_process');
const VERSION = require('../package.json').version; const VERSION = require('../package.json').version;
const BINARY_VERSION = '1.1.3'; // GitHub release version for binaries const BINARY_VERSION = '2.0.1'; // GitHub release version for binaries
const PLATFORM = os.platform(); const PLATFORM = os.platform();
const ARCH = os.arch(); const ARCH = os.arch();

View file

@ -5,7 +5,7 @@ edition = "2024"
publish = false publish = false
[dependencies] [dependencies]
vestige-core = { path = "../../crates/vestige-core", features = ["full"] } vestige-core = { path = "../../crates/vestige-core", features = ["embeddings", "vector-search"] }
chrono = { version = "0.4", features = ["serde"] } chrono = { version = "0.4", features = ["serde"] }
uuid = { version = "1", features = ["v4", "serde"] } uuid = { version = "1", features = ["v4", "serde"] }
tempfile = "3" tempfile = "3"