mirror of
https://github.com/samvallad33/vestige.git
synced 2026-05-08 23:32:37 +02:00
fix: v2.0.1 release — fix broken installs, CI, security, and docs
Critical fixes: - npm postinstall.js: BINARY_VERSION '1.1.3' → '2.0.1' (every install was 404ing) - npm package name: corrected error messages to 'vestige-mcp-server' - README: npm install command pointed to wrong package - MSRV: bumped from 1.85 to 1.91 (uses floor_char_boundary from 1.91) - CI: removed stale 'develop' branch from test.yml triggers Security hardening: - CSP: restricted connect-src from wildcard 'ws: wss:' to localhost-only - Added X-Frame-Options, X-Content-Type-Options, Referrer-Policy, Permissions-Policy headers - Added frame-ancestors 'none', base-uri 'self', form-action 'self' to CSP - Capped retention_distribution endpoint from 10k to 1k nodes - Added debug logging for WebSocket connections without Origin header Maintenance: - All clippy warnings fixed (58 total: redundant closures, collapsible ifs, no-op casts) - All versions harmonized to 2.0.1 across Cargo.toml and package.json - CLAUDE.md updated to match v2.0.1 (21 tools, 29 modules, 1238 tests) - docs/CLAUDE-SETUP.md updated deprecated function names - License corrected to AGPL-3.0-only in root package.json 1,238 tests passing, 0 clippy warnings. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
parent
b03df324da
commit
c6090dc2ba
51 changed files with 343 additions and 490 deletions
|
|
@ -1,8 +1,8 @@
|
|||
[package]
|
||||
name = "vestige-core"
|
||||
version = "2.0.0"
|
||||
version = "2.0.1"
|
||||
edition = "2024"
|
||||
rust-version = "1.85"
|
||||
rust-version = "1.91"
|
||||
authors = ["Vestige Team"]
|
||||
description = "Cognitive memory engine - FSRS-6 spaced repetition, semantic embeddings, and temporal memory"
|
||||
license = "AGPL-3.0-only"
|
||||
|
|
@ -37,11 +37,6 @@ qwen3-reranker = ["embeddings", "fastembed/qwen3"]
|
|||
# Metal GPU acceleration on Apple Silicon (significantly faster inference)
|
||||
metal = ["fastembed/metal"]
|
||||
|
||||
# Full feature set including MCP protocol support
|
||||
full = ["embeddings", "vector-search"]
|
||||
|
||||
# MCP (Model Context Protocol) support for Claude integration
|
||||
mcp = []
|
||||
|
||||
[dependencies]
|
||||
# Serialization
|
||||
|
|
|
|||
|
|
@ -431,12 +431,11 @@ impl CrossProjectLearner {
|
|||
|
||||
// Check each trigger
|
||||
for trigger in &pattern.pattern.triggers {
|
||||
if let Some((matches, reason)) = self.check_trigger(trigger, context) {
|
||||
if matches {
|
||||
if let Some((matches, reason)) = self.check_trigger(trigger, context)
|
||||
&& matches {
|
||||
match_scores.push(trigger.confidence);
|
||||
match_reasons.push(reason);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if match_scores.is_empty() {
|
||||
|
|
@ -547,12 +546,11 @@ impl CrossProjectLearner {
|
|||
|
||||
let success_rate = success_count as f64 / total_count as f64;
|
||||
|
||||
if let Ok(mut patterns) = self.patterns.write() {
|
||||
if let Some(pattern) = patterns.get_mut(pattern_id) {
|
||||
if let Ok(mut patterns) = self.patterns.write()
|
||||
&& let Some(pattern) = patterns.get_mut(pattern_id) {
|
||||
pattern.success_rate = success_rate;
|
||||
pattern.application_count = total_count as u32;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_patterns_from_category(
|
||||
|
|
@ -596,8 +594,8 @@ impl CrossProjectLearner {
|
|||
// Create a potential pattern (simplified)
|
||||
let pattern_id = format!("auto-{}-{}", category_to_string(&category), keyword);
|
||||
|
||||
if let Ok(mut patterns) = self.patterns.write() {
|
||||
if !patterns.contains_key(&pattern_id) {
|
||||
if let Ok(mut patterns) = self.patterns.write()
|
||||
&& !patterns.contains_key(&pattern_id) {
|
||||
patterns.insert(
|
||||
pattern_id.clone(),
|
||||
UniversalPattern {
|
||||
|
|
@ -629,7 +627,6 @@ impl CrossProjectLearner {
|
|||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -454,11 +454,10 @@ impl ConsolidationScheduler {
|
|||
if let Ok(mut graph) = self.connections.write() {
|
||||
// Strengthen connections between sequentially replayed memories
|
||||
for window in replay.sequence.windows(2) {
|
||||
if let [id_a, id_b] = window {
|
||||
if graph.strengthen_connection(id_a, id_b, 0.1) {
|
||||
if let [id_a, id_b] = window
|
||||
&& graph.strengthen_connection(id_a, id_b, 0.1) {
|
||||
strengthened += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Also strengthen based on discovered patterns
|
||||
|
|
@ -704,13 +703,12 @@ impl ConnectionGraph {
|
|||
let mut strengthened = false;
|
||||
|
||||
for (a, b) in [(from_id, to_id), (to_id, from_id)] {
|
||||
if let Some(connections) = self.connections.get_mut(a) {
|
||||
if let Some(conn) = connections.iter_mut().find(|c| c.target_id == b) {
|
||||
if let Some(connections) = self.connections.get_mut(a)
|
||||
&& let Some(conn) = connections.iter_mut().find(|c| c.target_id == b) {
|
||||
conn.strength = (conn.strength + boost).min(2.0);
|
||||
conn.last_strengthened = now;
|
||||
strengthened = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
strengthened
|
||||
|
|
@ -1478,11 +1476,10 @@ impl MemoryDreamer {
|
|||
}
|
||||
|
||||
// Try to generate insight from this cluster
|
||||
if let Some(insight) = self.generate_insight_from_cluster(&cluster_memories) {
|
||||
if insight.novelty_score >= self.config.min_novelty {
|
||||
if let Some(insight) = self.generate_insight_from_cluster(&cluster_memories)
|
||||
&& insight.novelty_score >= self.config.min_novelty {
|
||||
insights.push(insight);
|
||||
}
|
||||
}
|
||||
|
||||
if insights.len() >= self.config.max_insights {
|
||||
break;
|
||||
|
|
|
|||
|
|
@ -230,13 +230,11 @@ impl ImportanceTracker {
|
|||
self.on_retrieved(memory_id, was_helpful);
|
||||
|
||||
// Store context with event
|
||||
if let Ok(mut events) = self.recent_events.write() {
|
||||
if let Some(event) = events.last_mut() {
|
||||
if event.memory_id == memory_id {
|
||||
if let Ok(mut events) = self.recent_events.write()
|
||||
&& let Some(event) = events.last_mut()
|
||||
&& event.memory_id == memory_id {
|
||||
event.context = Some(context.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Apply importance decay to all memories
|
||||
|
|
|
|||
|
|
@ -561,11 +561,10 @@ impl IntentDetector {
|
|||
score += 0.2;
|
||||
}
|
||||
ActionType::FileOpened | ActionType::FileEdited => {
|
||||
if let Some(file) = &action.file {
|
||||
if let Some(name) = file.file_name() {
|
||||
if let Some(file) = &action.file
|
||||
&& let Some(name) = file.file_name() {
|
||||
suspected_area = name.to_string_lossy().to_string();
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -516,15 +516,14 @@ impl ReconsolidationManager {
|
|||
return false;
|
||||
}
|
||||
|
||||
if let Some(state) = self.labile_memories.get_mut(memory_id) {
|
||||
if state.is_within_window(self.labile_window) {
|
||||
if let Some(state) = self.labile_memories.get_mut(memory_id)
|
||||
&& state.is_within_window(self.labile_window) {
|
||||
let success = state.add_modification(modification);
|
||||
if success {
|
||||
self.stats.total_modifications += 1;
|
||||
}
|
||||
return success;
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
|
|
@ -690,15 +689,14 @@ impl ReconsolidationManager {
|
|||
|
||||
if let Ok(history) = self.retrieval_history.read() {
|
||||
for record in history.iter() {
|
||||
if record.memory_id == memory_id {
|
||||
if let Some(context) = &record.context {
|
||||
if record.memory_id == memory_id
|
||||
&& let Some(context) = &record.context {
|
||||
for co_id in &context.co_retrieved {
|
||||
if co_id != memory_id {
|
||||
*co_retrieved.entry(co_id.clone()).or_insert(0) += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -772,7 +770,7 @@ fn truncate(s: &str, max_len: usize) -> &str {
|
|||
if s.len() <= max_len {
|
||||
s
|
||||
} else {
|
||||
&s[..max_len]
|
||||
&s[..s.floor_char_boundary(max_len)]
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -265,13 +265,12 @@ impl SpeculativeRetriever {
|
|||
}
|
||||
|
||||
// Update file-memory associations
|
||||
if let Some(file) = file_context {
|
||||
if let Ok(mut map) = self.file_memory_map.write() {
|
||||
if let Some(file) = file_context
|
||||
&& let Ok(mut map) = self.file_memory_map.write() {
|
||||
map.entry(file.to_string())
|
||||
.or_insert_with(Vec::new)
|
||||
.push(memory_id.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Get cached predictions
|
||||
|
|
|
|||
|
|
@ -586,11 +586,10 @@ impl ContextCapture {
|
|||
}
|
||||
|
||||
// Java Spring
|
||||
if let Ok(content) = fs::read_to_string(self.project_root.join("pom.xml")) {
|
||||
if content.contains("spring") {
|
||||
if let Ok(content) = fs::read_to_string(self.project_root.join("pom.xml"))
|
||||
&& content.contains("spring") {
|
||||
frameworks.push(Framework::Spring);
|
||||
}
|
||||
}
|
||||
|
||||
// Ruby Rails
|
||||
if self.file_exists("config/routes.rb") {
|
||||
|
|
@ -613,30 +612,27 @@ impl ContextCapture {
|
|||
/// Detect the project name from config files
|
||||
fn detect_project_name(&self) -> Result<Option<String>> {
|
||||
// Try Cargo.toml
|
||||
if let Ok(content) = fs::read_to_string(self.project_root.join("Cargo.toml")) {
|
||||
if let Some(name) = self.extract_toml_value(&content, "name") {
|
||||
if let Ok(content) = fs::read_to_string(self.project_root.join("Cargo.toml"))
|
||||
&& let Some(name) = self.extract_toml_value(&content, "name") {
|
||||
return Ok(Some(name));
|
||||
}
|
||||
}
|
||||
|
||||
// Try package.json
|
||||
if let Ok(content) = fs::read_to_string(self.project_root.join("package.json")) {
|
||||
if let Some(name) = self.extract_json_value(&content, "name") {
|
||||
if let Ok(content) = fs::read_to_string(self.project_root.join("package.json"))
|
||||
&& let Some(name) = self.extract_json_value(&content, "name") {
|
||||
return Ok(Some(name));
|
||||
}
|
||||
}
|
||||
|
||||
// Try pyproject.toml
|
||||
if let Ok(content) = fs::read_to_string(self.project_root.join("pyproject.toml")) {
|
||||
if let Some(name) = self.extract_toml_value(&content, "name") {
|
||||
if let Ok(content) = fs::read_to_string(self.project_root.join("pyproject.toml"))
|
||||
&& let Some(name) = self.extract_toml_value(&content, "name") {
|
||||
return Ok(Some(name));
|
||||
}
|
||||
}
|
||||
|
||||
// Try go.mod
|
||||
if let Ok(content) = fs::read_to_string(self.project_root.join("go.mod")) {
|
||||
if let Some(line) = content.lines().next() {
|
||||
if line.starts_with("module ") {
|
||||
if let Ok(content) = fs::read_to_string(self.project_root.join("go.mod"))
|
||||
&& let Some(line) = content.lines().next()
|
||||
&& line.starts_with("module ") {
|
||||
let name = line
|
||||
.trim_start_matches("module ")
|
||||
.split('/')
|
||||
|
|
@ -647,8 +643,6 @@ impl ContextCapture {
|
|||
return Ok(Some(name));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fall back to directory name
|
||||
Ok(self
|
||||
|
|
@ -734,8 +728,8 @@ impl ContextCapture {
|
|||
// Check test directories
|
||||
for test_dir in test_dirs {
|
||||
let test_path = self.project_root.join(test_dir);
|
||||
if test_path.exists() {
|
||||
if let Ok(entries) = fs::read_dir(&test_path) {
|
||||
if test_path.exists()
|
||||
&& let Ok(entries) = fs::read_dir(&test_path) {
|
||||
for entry in entries.filter_map(|e| e.ok()) {
|
||||
let entry_path = entry.path();
|
||||
if let Some(entry_stem) = entry_path.file_stem() {
|
||||
|
|
@ -746,7 +740,6 @@ impl ContextCapture {
|
|||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// For Rust, look for mod.rs in same directory
|
||||
|
|
@ -799,9 +792,9 @@ impl ContextCapture {
|
|||
/// Detect the module a file belongs to
|
||||
fn detect_module(&self, path: &Path) -> Option<String> {
|
||||
// For Rust, use the parent directory name relative to src/
|
||||
if path.extension().map(|e| e == "rs").unwrap_or(false) {
|
||||
if let Ok(relative) = path.strip_prefix(&self.project_root) {
|
||||
if let Ok(src_relative) = relative.strip_prefix("src") {
|
||||
if path.extension().map(|e| e == "rs").unwrap_or(false)
|
||||
&& let Ok(relative) = path.strip_prefix(&self.project_root)
|
||||
&& let Ok(src_relative) = relative.strip_prefix("src") {
|
||||
// Get the module path
|
||||
let components: Vec<_> = src_relative
|
||||
.parent()?
|
||||
|
|
@ -813,16 +806,13 @@ impl ContextCapture {
|
|||
return Some(components.join("::"));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// For TypeScript/JavaScript, use the parent directory
|
||||
if path
|
||||
.extension()
|
||||
.map(|e| e == "ts" || e == "tsx" || e == "js" || e == "jsx")
|
||||
.unwrap_or(false)
|
||||
{
|
||||
if let Ok(relative) = path.strip_prefix(&self.project_root) {
|
||||
&& let Ok(relative) = path.strip_prefix(&self.project_root) {
|
||||
// Skip src/ or lib/ prefix
|
||||
let relative = relative
|
||||
.strip_prefix("src")
|
||||
|
|
@ -836,7 +826,6 @@ impl ContextCapture {
|
|||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
|
@ -874,14 +863,12 @@ impl ContextCapture {
|
|||
fn extract_toml_value(&self, content: &str, key: &str) -> Option<String> {
|
||||
for line in content.lines() {
|
||||
let trimmed = line.trim();
|
||||
if trimmed.starts_with(&format!("{} ", key))
|
||||
|| trimmed.starts_with(&format!("{}=", key))
|
||||
{
|
||||
if let Some(value) = trimmed.split('=').nth(1) {
|
||||
if (trimmed.starts_with(&format!("{} ", key))
|
||||
|| trimmed.starts_with(&format!("{}=", key)))
|
||||
&& let Some(value) = trimmed.split('=').nth(1) {
|
||||
let value = value.trim().trim_matches('"').trim_matches('\'');
|
||||
return Some(value.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
|
|
|||
|
|
@ -274,11 +274,10 @@ impl GitAnalyzer {
|
|||
if let Some(path) = delta.new_file().path() {
|
||||
files.push(path.to_path_buf());
|
||||
}
|
||||
if let Some(path) = delta.old_file().path() {
|
||||
if !files.contains(&path.to_path_buf()) {
|
||||
if let Some(path) = delta.old_file().path()
|
||||
&& !files.contains(&path.to_path_buf()) {
|
||||
files.push(path.to_path_buf());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -492,11 +491,10 @@ impl GitAnalyzer {
|
|||
.single()
|
||||
.unwrap_or_else(Utc::now);
|
||||
|
||||
if let Some(since_time) = since {
|
||||
if commit_time < since_time {
|
||||
if let Some(since_time) = since
|
||||
&& commit_time < since_time {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
let message = commit.message().map(|m| m.to_string()).unwrap_or_default();
|
||||
|
||||
|
|
|
|||
|
|
@ -209,8 +209,8 @@ impl PatternDetector {
|
|||
.collect();
|
||||
|
||||
for pattern in relevant_patterns {
|
||||
if let Some(confidence) = self.calculate_match_confidence(code, &code_lower, pattern) {
|
||||
if confidence >= 0.3 {
|
||||
if let Some(confidence) = self.calculate_match_confidence(code, &code_lower, pattern)
|
||||
&& confidence >= 0.3 {
|
||||
matches.push(PatternMatch {
|
||||
pattern: pattern.clone(),
|
||||
confidence,
|
||||
|
|
@ -218,7 +218,6 @@ impl PatternDetector {
|
|||
suggestions: self.generate_suggestions(pattern, code),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by confidence
|
||||
|
|
|
|||
|
|
@ -337,14 +337,13 @@ impl CodebaseWatcher {
|
|||
}
|
||||
|
||||
// Detect patterns if enabled
|
||||
if config.detect_patterns {
|
||||
if let Ok(content) = std::fs::read_to_string(path) {
|
||||
if config.detect_patterns
|
||||
&& let Ok(content) = std::fs::read_to_string(path) {
|
||||
let language = Self::detect_language(path);
|
||||
if let Ok(detector) = detector.try_read() {
|
||||
let _ = detector.detect_patterns(&content, &language);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
FileEventKind::Deleted => {
|
||||
// File was deleted, remove from session
|
||||
|
|
@ -576,13 +575,12 @@ impl ManualEventHandler {
|
|||
}
|
||||
|
||||
// Detect patterns
|
||||
if self.config.detect_patterns {
|
||||
if let Ok(content) = std::fs::read_to_string(path) {
|
||||
if self.config.detect_patterns
|
||||
&& let Ok(content) = std::fs::read_to_string(path) {
|
||||
let language = CodebaseWatcher::detect_language(path);
|
||||
let detector = self.detector.read().await;
|
||||
let _ = detector.detect_patterns(&content, &language);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
|||
|
|
@ -333,11 +333,10 @@ impl DreamEngine {
|
|||
emotion: &EmotionCategory,
|
||||
) -> TriageCategory {
|
||||
// High emotional content
|
||||
if matches!(emotion, EmotionCategory::Frustration | EmotionCategory::Urgency | EmotionCategory::Joy | EmotionCategory::Surprise) {
|
||||
if node.sentiment_magnitude > 0.4 {
|
||||
if matches!(emotion, EmotionCategory::Frustration | EmotionCategory::Urgency | EmotionCategory::Joy | EmotionCategory::Surprise)
|
||||
&& node.sentiment_magnitude > 0.4 {
|
||||
return TriageCategory::Emotional;
|
||||
}
|
||||
}
|
||||
|
||||
// Future-relevant (intentions, TODOs)
|
||||
let content_lower = node.content.to_lowercase();
|
||||
|
|
@ -386,7 +385,7 @@ impl DreamEngine {
|
|||
.collect();
|
||||
|
||||
// Process replay queue in oscillation waves
|
||||
let wave_count = (replay_queue.len() + self.wave_batch_size - 1) / self.wave_batch_size;
|
||||
let wave_count = replay_queue.len().div_ceil(self.wave_batch_size);
|
||||
|
||||
for wave_idx in 0..wave_count {
|
||||
let wave_start = wave_idx * self.wave_batch_size;
|
||||
|
|
@ -659,8 +658,8 @@ impl DreamEngine {
|
|||
if indices.len() >= 3 && indices.len() <= 10 {
|
||||
pattern_count += 1;
|
||||
// Create a connection between the first and last memory sharing this pattern
|
||||
if let (Some(&first), Some(&last)) = (indices.first(), indices.last()) {
|
||||
if first != last {
|
||||
if let (Some(&first), Some(&last)) = (indices.first(), indices.last())
|
||||
&& first != last {
|
||||
connections.push(CreativeConnection {
|
||||
memory_a_id: triaged[first].id.clone(),
|
||||
memory_b_id: triaged[last].id.clone(),
|
||||
|
|
@ -672,7 +671,6 @@ impl DreamEngine {
|
|||
connection_type: CreativeConnectionType::CrossDomain,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -181,7 +181,7 @@ impl Embedding {
|
|||
|
||||
/// Create from bytes
|
||||
pub fn from_bytes(bytes: &[u8]) -> Option<Self> {
|
||||
if bytes.len() % 4 != 0 {
|
||||
if !bytes.len().is_multiple_of(4) {
|
||||
return None;
|
||||
}
|
||||
let vector: Vec<f32> = bytes
|
||||
|
|
@ -260,9 +260,13 @@ impl EmbeddingService {
|
|||
|
||||
let mut model = get_model()?;
|
||||
|
||||
// Truncate if too long
|
||||
// Truncate if too long (char-boundary safe)
|
||||
let text = if text.len() > MAX_TEXT_LENGTH {
|
||||
&text[..MAX_TEXT_LENGTH]
|
||||
let mut end = MAX_TEXT_LENGTH;
|
||||
while !text.is_char_boundary(end) && end > 0 {
|
||||
end -= 1;
|
||||
}
|
||||
&text[..end]
|
||||
} else {
|
||||
text
|
||||
};
|
||||
|
|
@ -295,7 +299,11 @@ impl EmbeddingService {
|
|||
.iter()
|
||||
.map(|t| {
|
||||
if t.len() > MAX_TEXT_LENGTH {
|
||||
&t[..MAX_TEXT_LENGTH]
|
||||
let mut end = MAX_TEXT_LENGTH;
|
||||
while !t.is_char_boundary(end) && end > 0 {
|
||||
end -= 1;
|
||||
}
|
||||
&t[..end]
|
||||
} else {
|
||||
*t
|
||||
}
|
||||
|
|
|
|||
|
|
@ -241,17 +241,15 @@ impl FSRSScheduler {
|
|||
};
|
||||
|
||||
// Apply sentiment boost
|
||||
if self.enable_sentiment_boost {
|
||||
if let Some(sentiment) = sentiment_boost {
|
||||
if sentiment > 0.0 {
|
||||
if self.enable_sentiment_boost
|
||||
&& let Some(sentiment) = sentiment_boost
|
||||
&& sentiment > 0.0 {
|
||||
new_state.stability = apply_sentiment_boost(
|
||||
new_state.stability,
|
||||
sentiment,
|
||||
self.max_sentiment_boost,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut interval =
|
||||
next_interval_with_decay(new_state.stability, self.params.desired_retention, w20)
|
||||
|
|
|
|||
|
|
@ -910,39 +910,34 @@ impl ContextMatcher {
|
|||
let mut score = 0.0;
|
||||
|
||||
// Same session is a very strong match
|
||||
if let (Some(e_id), Some(r_id)) = (&encoding.session_id, &retrieval.session_id) {
|
||||
if e_id == r_id {
|
||||
if let (Some(e_id), Some(r_id)) = (&encoding.session_id, &retrieval.session_id)
|
||||
&& e_id == r_id {
|
||||
return 1.0;
|
||||
}
|
||||
}
|
||||
|
||||
// Project match (0.4 weight)
|
||||
if let (Some(e_proj), Some(r_proj)) = (&encoding.project, &retrieval.project) {
|
||||
if e_proj == r_proj {
|
||||
if let (Some(e_proj), Some(r_proj)) = (&encoding.project, &retrieval.project)
|
||||
&& e_proj == r_proj {
|
||||
score += 0.4;
|
||||
}
|
||||
}
|
||||
|
||||
// Activity type match (0.3 weight)
|
||||
if let (Some(e_act), Some(r_act)) = (&encoding.activity_type, &retrieval.activity_type) {
|
||||
if e_act == r_act {
|
||||
if let (Some(e_act), Some(r_act)) = (&encoding.activity_type, &retrieval.activity_type)
|
||||
&& e_act == r_act {
|
||||
score += 0.3;
|
||||
}
|
||||
}
|
||||
|
||||
// Git branch match (0.2 weight)
|
||||
if let (Some(e_br), Some(r_br)) = (&encoding.git_branch, &retrieval.git_branch) {
|
||||
if e_br == r_br {
|
||||
if let (Some(e_br), Some(r_br)) = (&encoding.git_branch, &retrieval.git_branch)
|
||||
&& e_br == r_br {
|
||||
score += 0.2;
|
||||
}
|
||||
}
|
||||
|
||||
// Active file match (0.1 weight)
|
||||
if let (Some(e_file), Some(r_file)) = (&encoding.active_file, &retrieval.active_file) {
|
||||
if e_file == r_file {
|
||||
if let (Some(e_file), Some(r_file)) = (&encoding.active_file, &retrieval.active_file)
|
||||
&& e_file == r_file {
|
||||
score += 0.1;
|
||||
}
|
||||
}
|
||||
|
||||
score
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1075,11 +1075,10 @@ impl ContentStore {
|
|||
pub fn retrieve(&self, pointer: &ContentPointer) -> Result<Vec<u8>> {
|
||||
// Check cache first
|
||||
let cache_key = self.cache_key(pointer);
|
||||
if let Ok(cache) = self.cache.read() {
|
||||
if let Some(data) = cache.get(&cache_key) {
|
||||
if let Ok(cache) = self.cache.read()
|
||||
&& let Some(data) = cache.get(&cache_key) {
|
||||
return Ok(data.clone());
|
||||
}
|
||||
}
|
||||
|
||||
// Retrieve from storage
|
||||
let data = match &pointer.storage_location {
|
||||
|
|
@ -1131,8 +1130,8 @@ impl ContentStore {
|
|||
return;
|
||||
}
|
||||
|
||||
if let Ok(mut cache) = self.cache.write() {
|
||||
if let Ok(mut size) = self.current_cache_size.write() {
|
||||
if let Ok(mut cache) = self.cache.write()
|
||||
&& let Ok(mut size) = self.current_cache_size.write() {
|
||||
// Evict if necessary
|
||||
while *size + data_size > self.max_cache_size && !cache.is_empty() {
|
||||
// Simple eviction: remove first entry
|
||||
|
|
@ -1148,7 +1147,6 @@ impl ContentStore {
|
|||
cache.insert(key.to_string(), data.to_vec());
|
||||
*size += data_size;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Retrieve from SQLite (placeholder - to be integrated with Storage)
|
||||
|
|
@ -1394,8 +1392,8 @@ impl HippocampalIndex {
|
|||
let mut match_result = IndexMatch::new(index.clone());
|
||||
|
||||
// Calculate semantic score
|
||||
if let Some(ref query_embedding) = query.semantic_embedding {
|
||||
if !index.semantic_summary.is_empty() {
|
||||
if let Some(ref query_embedding) = query.semantic_embedding
|
||||
&& !index.semantic_summary.is_empty() {
|
||||
let query_compressed = self.compress_embedding(query_embedding);
|
||||
match_result.semantic_score =
|
||||
self.cosine_similarity(&query_compressed, &index.semantic_summary);
|
||||
|
|
@ -1404,7 +1402,6 @@ impl HippocampalIndex {
|
|||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate text score
|
||||
if let Some(ref text_query) = query.text_query {
|
||||
|
|
@ -1444,25 +1441,22 @@ impl HippocampalIndex {
|
|||
/// Check if an index passes query filters
|
||||
fn passes_filters(&self, index: &MemoryIndex, query: &IndexQuery) -> bool {
|
||||
// Time range filter
|
||||
if let Some((start, end)) = query.time_range {
|
||||
if index.temporal_marker.created_at < start || index.temporal_marker.created_at > end {
|
||||
if let Some((start, end)) = query.time_range
|
||||
&& (index.temporal_marker.created_at < start || index.temporal_marker.created_at > end) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Importance flags filter
|
||||
if let Some(ref required) = query.required_flags {
|
||||
if !index.matches_importance(required.to_bits()) {
|
||||
if let Some(ref required) = query.required_flags
|
||||
&& !index.matches_importance(required.to_bits()) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Node type filter
|
||||
if let Some(ref types) = query.node_types {
|
||||
if !types.contains(&index.node_type) {
|
||||
if let Some(ref types) = query.node_types
|
||||
&& !types.contains(&index.node_type) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
|
|
@ -1579,11 +1573,10 @@ impl HippocampalIndex {
|
|||
let mut memories = Vec::with_capacity(matches.len());
|
||||
for m in matches {
|
||||
// Record access
|
||||
if let Ok(mut indices) = self.indices.write() {
|
||||
if let Some(index) = indices.get_mut(&m.index.memory_id) {
|
||||
if let Ok(mut indices) = self.indices.write()
|
||||
&& let Some(index) = indices.get_mut(&m.index.memory_id) {
|
||||
index.record_access();
|
||||
}
|
||||
}
|
||||
|
||||
match self.retrieve_content(&m.index) {
|
||||
Ok(memory) => memories.push(memory),
|
||||
|
|
@ -1887,20 +1880,19 @@ impl HippocampalIndex {
|
|||
sentiment_magnitude: f64,
|
||||
) -> Result<MemoryBarcode> {
|
||||
// Check if already indexed
|
||||
if let Ok(indices) = self.indices.read() {
|
||||
if indices.contains_key(node_id) {
|
||||
if let Ok(indices) = self.indices.read()
|
||||
&& indices.contains_key(node_id) {
|
||||
return Err(HippocampalIndexError::MigrationError(
|
||||
"Node already indexed".to_string(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
// Create the index
|
||||
let barcode = self.index_memory(node_id, content, node_type, created_at, embedding)?;
|
||||
|
||||
// Update importance flags based on existing data
|
||||
if let Ok(mut indices) = self.indices.write() {
|
||||
if let Some(index) = indices.get_mut(node_id) {
|
||||
if let Ok(mut indices) = self.indices.write()
|
||||
&& let Some(index) = indices.get_mut(node_id) {
|
||||
// Set high retention flag if applicable
|
||||
if retention_strength > 0.7 {
|
||||
index.importance_flags.set_high_retention(true);
|
||||
|
|
@ -1919,7 +1911,6 @@ impl HippocampalIndex {
|
|||
ContentType::Text,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(barcode)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -358,8 +358,8 @@ impl PredictionModel {
|
|||
fn learn(&self, content: &str) {
|
||||
let ngrams = self.extract_ngrams(content);
|
||||
|
||||
if let Ok(mut patterns) = self.patterns.write() {
|
||||
if let Ok(mut total) = self.total_count.write() {
|
||||
if let Ok(mut patterns) = self.patterns.write()
|
||||
&& let Ok(mut total) = self.total_count.write() {
|
||||
for ngram in ngrams {
|
||||
*patterns.entry(ngram).or_insert(0) += 1;
|
||||
*total += 1;
|
||||
|
|
@ -370,7 +370,6 @@ impl PredictionModel {
|
|||
self.apply_decay(&mut patterns);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn compute_prediction_error(&self, content: &str) -> f64 {
|
||||
|
|
|
|||
|
|
@ -1266,15 +1266,14 @@ impl MemoryStateInfo {
|
|||
);
|
||||
}
|
||||
MemoryState::Unavailable => {
|
||||
if let Some(until) = lifecycle.suppression_until {
|
||||
if until > now {
|
||||
if let Some(until) = lifecycle.suppression_until
|
||||
&& until > now {
|
||||
recommendations.push(format!(
|
||||
"This memory is temporarily suppressed. \
|
||||
It will become accessible again after {}.",
|
||||
until.format("%Y-%m-%d %H:%M UTC")
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
MemoryState::Dormant => {
|
||||
if duration_since_access.num_days() > 20 {
|
||||
|
|
|
|||
|
|
@ -694,18 +694,16 @@ impl Intention {
|
|||
}
|
||||
|
||||
// Check snoozed
|
||||
if let Some(snoozed_until) = self.snoozed_until {
|
||||
if Utc::now() < snoozed_until {
|
||||
if let Some(snoozed_until) = self.snoozed_until
|
||||
&& Utc::now() < snoozed_until {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Check minimum interval
|
||||
if let Some(last) = self.last_reminded_at {
|
||||
if (Utc::now() - last) < Duration::minutes(MIN_REMINDER_INTERVAL_MINUTES) {
|
||||
if let Some(last) = self.last_reminded_at
|
||||
&& (Utc::now() - last) < Duration::minutes(MIN_REMINDER_INTERVAL_MINUTES) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
|
|
@ -1267,13 +1265,11 @@ impl ProspectiveMemory {
|
|||
// Skip non-active intentions
|
||||
if intention.status != IntentionStatus::Active {
|
||||
// Check if snoozed intention should wake
|
||||
if intention.status == IntentionStatus::Snoozed {
|
||||
if let Some(until) = intention.snoozed_until {
|
||||
if Utc::now() >= until {
|
||||
if intention.status == IntentionStatus::Snoozed
|
||||
&& let Some(until) = intention.snoozed_until
|
||||
&& Utc::now() >= until {
|
||||
intention.wake();
|
||||
}
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -287,11 +287,10 @@ impl ActivationNetwork {
|
|||
self.edges.insert((source.clone(), target.clone()), edge);
|
||||
|
||||
// Update node's edge list
|
||||
if let Some(node) = self.nodes.get_mut(&source) {
|
||||
if !node.edges.contains(&target) {
|
||||
if let Some(node) = self.nodes.get_mut(&source)
|
||||
&& !node.edges.contains(&target) {
|
||||
node.edges.push(target);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Activate a node and spread activation through the network
|
||||
|
|
@ -314,11 +313,10 @@ impl ActivationNetwork {
|
|||
|
||||
while let Some((current_id, current_activation, hops, path)) = queue.pop() {
|
||||
// Skip if we've visited this node with higher activation
|
||||
if let Some(&prev_activation) = visited.get(¤t_id) {
|
||||
if prev_activation >= current_activation {
|
||||
if let Some(&prev_activation) = visited.get(¤t_id)
|
||||
&& prev_activation >= current_activation {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
visited.insert(current_id.clone(), current_activation);
|
||||
|
||||
// Check hop limit
|
||||
|
|
|
|||
|
|
@ -609,13 +609,13 @@ impl Storage {
|
|||
node_id,
|
||||
embedding.to_bytes(),
|
||||
EMBEDDING_DIMENSIONS as i32,
|
||||
"all-MiniLM-L6-v2",
|
||||
"nomic-embed-text-v1.5",
|
||||
now.to_rfc3339(),
|
||||
],
|
||||
)?;
|
||||
|
||||
writer.execute(
|
||||
"UPDATE knowledge_nodes SET has_embedding = 1, embedding_model = 'all-MiniLM-L6-v2' WHERE id = ?1",
|
||||
"UPDATE knowledge_nodes SET has_embedding = 1, embedding_model = 'nomic-embed-text-v1.5' WHERE id = ?1",
|
||||
params![node_id],
|
||||
)?;
|
||||
}
|
||||
|
|
@ -639,7 +639,7 @@ impl Storage {
|
|||
.prepare("SELECT * FROM knowledge_nodes WHERE id = ?1")?;
|
||||
|
||||
let node = stmt
|
||||
.query_row(params![id], |row| Self::row_to_node(row))
|
||||
.query_row(params![id], Self::row_to_node)
|
||||
.optional()?;
|
||||
Ok(node)
|
||||
}
|
||||
|
|
@ -1058,7 +1058,7 @@ impl Storage {
|
|||
LIMIT ?2",
|
||||
)?;
|
||||
|
||||
let nodes = stmt.query_map(params![now, limit], |row| Self::row_to_node(row))?;
|
||||
let nodes = stmt.query_map(params![now, limit], Self::row_to_node)?;
|
||||
|
||||
let mut result = Vec::new();
|
||||
for node in nodes {
|
||||
|
|
@ -1150,7 +1150,7 @@ impl Storage {
|
|||
)?;
|
||||
|
||||
let embedding_model: Option<String> = if nodes_with_embeddings > 0 {
|
||||
Some("all-MiniLM-L6-v2".to_string())
|
||||
Some("nomic-embed-text-v1.5".to_string())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
|
@ -1182,6 +1182,14 @@ impl Storage {
|
|||
.map_err(|_| StorageError::Init("Writer lock poisoned".into()))?;
|
||||
let rows = writer
|
||||
.execute("DELETE FROM knowledge_nodes WHERE id = ?1", params![id])?;
|
||||
|
||||
// Clean up vector index to prevent stale search results
|
||||
#[cfg(all(feature = "embeddings", feature = "vector-search"))]
|
||||
if rows > 0
|
||||
&& let Ok(mut index) = self.vector_index.lock() {
|
||||
let _ = index.remove(id);
|
||||
}
|
||||
|
||||
Ok(rows > 0)
|
||||
}
|
||||
|
||||
|
|
@ -1199,7 +1207,7 @@ impl Storage {
|
|||
LIMIT ?2",
|
||||
)?;
|
||||
|
||||
let nodes = stmt.query_map(params![sanitized_query, limit], |row| Self::row_to_node(row))?;
|
||||
let nodes = stmt.query_map(params![sanitized_query, limit], Self::row_to_node)?;
|
||||
|
||||
let mut result = Vec::new();
|
||||
for node in nodes {
|
||||
|
|
@ -1218,7 +1226,7 @@ impl Storage {
|
|||
LIMIT ?1 OFFSET ?2",
|
||||
)?;
|
||||
|
||||
let nodes = stmt.query_map(params![limit, offset], |row| Self::row_to_node(row))?;
|
||||
let nodes = stmt.query_map(params![limit, offset], Self::row_to_node)?;
|
||||
|
||||
let mut result = Vec::new();
|
||||
for node in nodes {
|
||||
|
|
@ -1268,7 +1276,7 @@ impl Storage {
|
|||
ORDER BY retention_strength DESC, created_at DESC
|
||||
LIMIT ?2",
|
||||
)?;
|
||||
let rows = stmt.query_map(params![node_type, limit], |row| Self::row_to_node(row))?;
|
||||
let rows = stmt.query_map(params![node_type, limit], Self::row_to_node)?;
|
||||
let mut nodes = Vec::new();
|
||||
for node in rows.flatten() {
|
||||
nodes.push(node);
|
||||
|
|
@ -1641,7 +1649,7 @@ impl Storage {
|
|||
LIMIT ?2",
|
||||
)?;
|
||||
|
||||
let nodes = stmt.query_map(params![timestamp, limit], |row| Self::row_to_node(row))?;
|
||||
let nodes = stmt.query_map(params![timestamp, limit], Self::row_to_node)?;
|
||||
|
||||
let mut result = Vec::new();
|
||||
for node in nodes {
|
||||
|
|
@ -1704,7 +1712,7 @@ impl Storage {
|
|||
.map_err(|_| StorageError::Init("Reader lock poisoned".into()))?;
|
||||
let mut stmt = reader.prepare(query)?;
|
||||
let params_refs: Vec<&dyn rusqlite::ToSql> = params.iter().map(|p| p.as_ref()).collect();
|
||||
let nodes = stmt.query_map(params_refs.as_slice(), |row| Self::row_to_node(row))?;
|
||||
let nodes = stmt.query_map(params_refs.as_slice(), Self::row_to_node)?;
|
||||
|
||||
let mut result = Vec::new();
|
||||
for node in nodes {
|
||||
|
|
@ -2404,12 +2412,11 @@ impl Storage {
|
|||
/// Generate missing embeddings
|
||||
#[cfg(all(feature = "embeddings", feature = "vector-search"))]
|
||||
fn generate_missing_embeddings(&self) -> Result<i64> {
|
||||
if !self.embedding_service.is_ready() {
|
||||
if let Err(e) = self.embedding_service.init() {
|
||||
if !self.embedding_service.is_ready()
|
||||
&& let Err(e) = self.embedding_service.init() {
|
||||
tracing::warn!("Could not initialize embedding model: {}", e);
|
||||
return Ok(0);
|
||||
}
|
||||
}
|
||||
|
||||
let nodes: Vec<(String, String)> = {
|
||||
let reader = self.reader.lock()
|
||||
|
|
@ -2615,7 +2622,7 @@ impl Storage {
|
|||
"SELECT * FROM intentions WHERE id = ?1"
|
||||
)?;
|
||||
|
||||
stmt.query_row(params![id], |row| Self::row_to_intention(row))
|
||||
stmt.query_row(params![id], Self::row_to_intention)
|
||||
.optional()
|
||||
.map_err(StorageError::from)
|
||||
}
|
||||
|
|
@ -2628,7 +2635,7 @@ impl Storage {
|
|||
"SELECT * FROM intentions WHERE status = 'active' ORDER BY priority DESC, created_at ASC"
|
||||
)?;
|
||||
|
||||
let rows = stmt.query_map([], |row| Self::row_to_intention(row))?;
|
||||
let rows = stmt.query_map([], Self::row_to_intention)?;
|
||||
let mut result = Vec::new();
|
||||
for row in rows {
|
||||
result.push(row?);
|
||||
|
|
@ -2644,7 +2651,7 @@ impl Storage {
|
|||
"SELECT * FROM intentions WHERE status = ?1 ORDER BY priority DESC, created_at ASC"
|
||||
)?;
|
||||
|
||||
let rows = stmt.query_map(params![status], |row| Self::row_to_intention(row))?;
|
||||
let rows = stmt.query_map(params![status], Self::row_to_intention)?;
|
||||
let mut result = Vec::new();
|
||||
for row in rows {
|
||||
result.push(row?);
|
||||
|
|
@ -2683,7 +2690,7 @@ impl Storage {
|
|||
"SELECT * FROM intentions WHERE status = 'active' AND deadline IS NOT NULL AND deadline < ?1 ORDER BY deadline ASC"
|
||||
)?;
|
||||
|
||||
let rows = stmt.query_map(params![now], |row| Self::row_to_intention(row))?;
|
||||
let rows = stmt.query_map(params![now], Self::row_to_intention)?;
|
||||
let mut result = Vec::new();
|
||||
for row in rows {
|
||||
result.push(row?);
|
||||
|
|
@ -2775,7 +2782,7 @@ impl Storage {
|
|||
"SELECT * FROM insights ORDER BY generated_at DESC LIMIT ?1"
|
||||
)?;
|
||||
|
||||
let rows = stmt.query_map(params![limit], |row| Self::row_to_insight(row))?;
|
||||
let rows = stmt.query_map(params![limit], Self::row_to_insight)?;
|
||||
let mut result = Vec::new();
|
||||
for row in rows {
|
||||
result.push(row?);
|
||||
|
|
@ -2791,7 +2798,7 @@ impl Storage {
|
|||
"SELECT * FROM insights WHERE feedback IS NULL ORDER BY novelty_score DESC"
|
||||
)?;
|
||||
|
||||
let rows = stmt.query_map([], |row| Self::row_to_insight(row))?;
|
||||
let rows = stmt.query_map([], Self::row_to_insight)?;
|
||||
let mut result = Vec::new();
|
||||
for row in rows {
|
||||
result.push(row?);
|
||||
|
|
@ -2874,7 +2881,7 @@ impl Storage {
|
|||
"SELECT * FROM memory_connections WHERE source_id = ?1 OR target_id = ?1 ORDER BY strength DESC"
|
||||
)?;
|
||||
|
||||
let rows = stmt.query_map(params![memory_id], |row| Self::row_to_connection(row))?;
|
||||
let rows = stmt.query_map(params![memory_id], Self::row_to_connection)?;
|
||||
let mut result = Vec::new();
|
||||
for row in rows {
|
||||
result.push(row?);
|
||||
|
|
@ -2890,7 +2897,7 @@ impl Storage {
|
|||
"SELECT * FROM memory_connections ORDER BY strength DESC"
|
||||
)?;
|
||||
|
||||
let rows = stmt.query_map([], |row| Self::row_to_connection(row))?;
|
||||
let rows = stmt.query_map([], Self::row_to_connection)?;
|
||||
let mut result = Vec::new();
|
||||
for row in rows {
|
||||
result.push(row?);
|
||||
|
|
@ -2988,7 +2995,7 @@ impl Storage {
|
|||
"SELECT * FROM memory_states WHERE memory_id = ?1"
|
||||
)?;
|
||||
|
||||
stmt.query_row(params![memory_id], |row| Self::row_to_memory_state(row))
|
||||
stmt.query_row(params![memory_id], Self::row_to_memory_state)
|
||||
.optional()
|
||||
.map_err(StorageError::from)
|
||||
}
|
||||
|
|
@ -3241,14 +3248,13 @@ impl Storage {
|
|||
let name = entry.file_name();
|
||||
let name_str = name.to_string_lossy();
|
||||
// Parse vestige-YYYYMMDD-HHMMSS.db
|
||||
if let Some(ts_part) = name_str.strip_prefix("vestige-").and_then(|s| s.strip_suffix(".db")) {
|
||||
if let Ok(naive) = chrono::NaiveDateTime::parse_from_str(ts_part, "%Y%m%d-%H%M%S") {
|
||||
if let Some(ts_part) = name_str.strip_prefix("vestige-").and_then(|s| s.strip_suffix(".db"))
|
||||
&& let Ok(naive) = chrono::NaiveDateTime::parse_from_str(ts_part, "%Y%m%d-%H%M%S") {
|
||||
let dt = naive.and_utc();
|
||||
if latest.as_ref().is_none_or(|l| dt > *l) {
|
||||
latest = Some(dt);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -3406,12 +3412,37 @@ impl Storage {
|
|||
/// Auto-GC memories below threshold (used by retention target system)
|
||||
pub fn gc_below_retention(&self, threshold: f64, min_age_days: i64) -> Result<i64> {
|
||||
let cutoff = (Utc::now() - Duration::days(min_age_days)).to_rfc3339();
|
||||
|
||||
// Collect IDs first for vector index cleanup
|
||||
#[cfg(all(feature = "embeddings", feature = "vector-search"))]
|
||||
let doomed_ids: Vec<String> = {
|
||||
let reader = self.reader.lock()
|
||||
.map_err(|_| StorageError::Init("Reader lock poisoned".into()))?;
|
||||
let mut stmt = reader.prepare(
|
||||
"SELECT id FROM knowledge_nodes WHERE retention_strength < ?1 AND created_at < ?2",
|
||||
)?;
|
||||
stmt.query_map(params![threshold, cutoff], |row| row.get(0))?
|
||||
.filter_map(|r| r.ok())
|
||||
.collect()
|
||||
};
|
||||
|
||||
let writer = self.writer.lock()
|
||||
.map_err(|_| StorageError::Init("Writer lock poisoned".into()))?;
|
||||
let deleted = writer.execute(
|
||||
"DELETE FROM knowledge_nodes WHERE retention_strength < ?1 AND created_at < ?2",
|
||||
params![threshold, cutoff],
|
||||
)? as i64;
|
||||
drop(writer);
|
||||
|
||||
// Clean up vector index
|
||||
#[cfg(all(feature = "embeddings", feature = "vector-search"))]
|
||||
if deleted > 0
|
||||
&& let Ok(mut index) = self.vector_index.lock() {
|
||||
for id in &doomed_ids {
|
||||
let _ = index.remove(id);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(deleted)
|
||||
}
|
||||
|
||||
|
|
@ -3489,7 +3520,7 @@ impl Storage {
|
|||
let mut stmt = reader.prepare(
|
||||
"SELECT * FROM knowledge_nodes WHERE waking_tag = TRUE ORDER BY waking_tag_at DESC LIMIT ?1"
|
||||
)?;
|
||||
let nodes = stmt.query_map(params![limit], |row| Self::row_to_node(row))?;
|
||||
let nodes = stmt.query_map(params![limit], Self::row_to_node)?;
|
||||
let mut result = Vec::new();
|
||||
for node in nodes {
|
||||
result.push(node?);
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue