mirror of
https://github.com/samvallad33/vestige.git
synced 2026-05-08 15:22:37 +02:00
feat: v2.0.4 "Deep Reference" — cognitive reasoning engine + 10 bug fixes
New features: - deep_reference tool (#22): 8-stage cognitive reasoning pipeline with FSRS-6 trust scoring, intent classification (FactCheck/Timeline/RootCause/Comparison/ Synthesis), spreading activation expansion, temporal supersession, trust-weighted contradiction analysis, relation assessment, dream insight integration, and algorithmic reasoning chain generation — all without calling an LLM - cross_reference (#23): backward-compatible alias for deep_reference - retrieval_mode parameter on search (precise/balanced/exhaustive) - get_batch action on memory tool (up to 20 IDs per call) - Token budget raised from 10K to 100K on search + session_context - Dates (createdAt/updatedAt) on all search results and session_context lines Bug fixes (GitHub Issue #25 — all 10 resolved): - state_transitions empty: wired record_memory_access into strengthen_batch - chain/bridges no storage fallback: added with edge deduplication - knowledge_edges dead schema: documented as deprecated - insights not persisted from dream: wired save_insight after generation - find_duplicates threshold dropped: serde alias fix - search min_retention ignored: serde aliases for snake_case params - intention time triggers null: removed dead trigger_at embedding - changelog missing dreams: added get_dream_history + event integration - phantom Related IDs: clarified message text - fsrs_cards empty: documented as harmless dead schema Security hardening: - HTTP transport CORS: permissive() → localhost-only - Auth token panic guard: &token[..8] → safe min(8) slice - UTF-8 boundary fix: floor_char_boundary on content truncation - All unwrap() removed from HTTP transport (unwrap_or_else fallback) - Dream memory_count capped at 500 (prevents O(N²) hang) - Dormant state threshold aligned (0.3 → 0.4) Stats: 23 tools, 758 tests, 0 failures, 0 warnings, 0 unwraps in production Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
parent
61091e06b9
commit
04781a95e2
28 changed files with 1797 additions and 102 deletions
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "vestige-core"
|
||||
version = "2.0.3"
|
||||
version = "2.0.4"
|
||||
edition = "2024"
|
||||
rust-version = "1.91"
|
||||
authors = ["Vestige Team"]
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ pub mod speculative;
|
|||
|
||||
// Re-exports for convenient access
|
||||
pub use adaptive_embedding::{AdaptiveEmbedder, ContentType, EmbeddingStrategy, Language};
|
||||
pub use chains::{ChainStep, ConnectionType, MemoryChainBuilder, MemoryPath, ReasoningChain};
|
||||
pub use chains::{ChainStep, Connection, ConnectionType, MemoryChainBuilder, MemoryNode, MemoryPath, ReasoningChain};
|
||||
pub use compression::{CompressedMemory, CompressionConfig, CompressionStats, MemoryCompressor};
|
||||
pub use cross_project::{
|
||||
ApplicableKnowledge, CrossProjectLearner, ProjectContext, UniversalPattern,
|
||||
|
|
|
|||
|
|
@ -315,7 +315,10 @@ const MIGRATION_V4_UP: &str = r#"
|
|||
-- TEMPORAL KNOWLEDGE GRAPH (Like Zep's Graphiti)
|
||||
-- ============================================================================
|
||||
|
||||
-- Knowledge edges for temporal reasoning
|
||||
-- DEPRECATED (v2.1.0): knowledge_edges is unused. All graph edges use
|
||||
-- memory_connections (migration V3). This table was designed for bi-temporal
|
||||
-- edge support but was never wired. Retained for schema compatibility with
|
||||
-- existing databases. Do NOT add queries against this table.
|
||||
CREATE TABLE IF NOT EXISTS knowledge_edges (
|
||||
id TEXT PRIMARY KEY,
|
||||
source_id TEXT NOT NULL,
|
||||
|
|
|
|||
|
|
@ -227,6 +227,7 @@ impl Storage {
|
|||
.lock()
|
||||
.map_err(|_| StorageError::Init("Vector index lock poisoned".to_string()))?;
|
||||
|
||||
let mut load_failures = 0u32;
|
||||
for (node_id, embedding_bytes) in embeddings {
|
||||
if let Some(embedding) = Embedding::from_bytes(&embedding_bytes) {
|
||||
// Handle Matryoshka migration: old 768-dim → truncate to 256-dim
|
||||
|
|
@ -236,10 +237,14 @@ impl Storage {
|
|||
embedding.vector
|
||||
};
|
||||
if let Err(e) = index.add(&node_id, &vector) {
|
||||
load_failures += 1;
|
||||
tracing::warn!("Failed to load embedding for {}: {}", node_id, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
if load_failures > 0 {
|
||||
tracing::error!(count = load_failures, "Vector index: {} embeddings failed to load", load_failures);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
@ -399,7 +404,11 @@ impl Storage {
|
|||
superseded_id: None,
|
||||
similarity: None,
|
||||
prediction_error: Some(prediction_error),
|
||||
reason: format!("Created new memory: {:?}. Related: {:?}", reason, related_memory_ids),
|
||||
reason: if related_memory_ids.is_empty() {
|
||||
format!("Created new memory: {:?}", reason)
|
||||
} else {
|
||||
format!("Created new memory: {:?}. Semantically similar (not linked): {:?}", reason, related_memory_ids)
|
||||
},
|
||||
})
|
||||
}
|
||||
GateDecision::Update { target_id, similarity, update_type, prediction_error } => {
|
||||
|
|
@ -667,7 +676,13 @@ impl Storage {
|
|||
/// Convert a row to KnowledgeNode
|
||||
fn row_to_node(row: &rusqlite::Row) -> rusqlite::Result<KnowledgeNode> {
|
||||
let tags_json: String = row.get("tags")?;
|
||||
let tags: Vec<String> = serde_json::from_str(&tags_json).unwrap_or_default();
|
||||
let tags: Vec<String> = match serde_json::from_str(&tags_json) {
|
||||
Ok(t) => t,
|
||||
Err(e) => {
|
||||
tracing::warn!(raw = %tags_json, "Failed to deserialize tags JSON, using empty: {}", e);
|
||||
Vec::new()
|
||||
}
|
||||
};
|
||||
|
||||
let created_at: String = row.get("created_at")?;
|
||||
let updated_at: String = row.get("updated_at")?;
|
||||
|
|
@ -955,6 +970,8 @@ impl Storage {
|
|||
pub fn strengthen_batch_on_access(&self, ids: &[&str]) -> Result<()> {
|
||||
for id in ids {
|
||||
self.strengthen_on_access(id)?;
|
||||
// Also record access in memory_states for audit trail (Bug #1 fix)
|
||||
let _ = self.record_memory_access(id);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
|
@ -3223,6 +3240,42 @@ impl Storage {
|
|||
}))
|
||||
}
|
||||
|
||||
/// Get dream history (most recent first)
|
||||
pub fn get_dream_history(&self, limit: i32) -> Result<Vec<DreamHistoryRecord>> {
|
||||
let reader = self.reader.lock()
|
||||
.map_err(|_| StorageError::Init("Reader lock poisoned".into()))?;
|
||||
let mut stmt = reader.prepare(
|
||||
"SELECT dreamed_at, duration_ms, memories_replayed, connections_found,
|
||||
insights_generated, memories_strengthened, memories_compressed,
|
||||
phase_nrem1_ms, phase_nrem3_ms, phase_rem_ms, phase_integration_ms,
|
||||
summaries_generated, emotional_memories_processed, creative_connections_found
|
||||
FROM dream_history ORDER BY dreamed_at DESC LIMIT ?1"
|
||||
)?;
|
||||
let records = stmt.query_map(params![limit], |row| {
|
||||
let dreamed_at_str: String = row.get(0)?;
|
||||
let dreamed_at = DateTime::parse_from_rfc3339(&dreamed_at_str)
|
||||
.map(|dt| dt.with_timezone(&Utc))
|
||||
.unwrap_or_else(|_| Utc::now());
|
||||
Ok(DreamHistoryRecord {
|
||||
dreamed_at,
|
||||
duration_ms: row.get(1)?,
|
||||
memories_replayed: row.get(2)?,
|
||||
connections_found: row.get(3)?,
|
||||
insights_generated: row.get(4)?,
|
||||
memories_strengthened: row.get(5)?,
|
||||
memories_compressed: row.get(6)?,
|
||||
phase_nrem1_ms: row.get(7)?,
|
||||
phase_nrem3_ms: row.get(8)?,
|
||||
phase_rem_ms: row.get(9)?,
|
||||
phase_integration_ms: row.get(10)?,
|
||||
summaries_generated: row.get(11)?,
|
||||
emotional_memories_processed: row.get(12)?,
|
||||
creative_connections_found: row.get(13)?,
|
||||
})
|
||||
})?.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||
Ok(records)
|
||||
}
|
||||
|
||||
/// Count memories created since a given timestamp
|
||||
pub fn count_memories_since(&self, since: DateTime<Utc>) -> Result<i64> {
|
||||
let reader = self.reader.lock()
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue