chore: cleanup dead code warnings and apply clippy fixes for v1.1.1

- Add #![allow(dead_code)] to deprecated tool modules (kept for
  backwards compatibility but not exposed in MCP tool list)
- Mark unused functions with #[allow(dead_code)] annotations
- Fix unused variable warnings (prefix with _)
- Apply clippy auto-fixes for redundant closures and derives
- Fix test to account for protocol version negotiation
- Reorganize tools/mod.rs to clarify active vs deprecated tools

Security review: LOW RISK - no critical vulnerabilities found
Dead code review: deprecated tools properly annotated

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
Sam Valladares 2026-01-27 01:23:27 -06:00
parent bfa91474a6
commit e06dd3d69a
27 changed files with 104 additions and 119 deletions

View file

@ -451,7 +451,7 @@ impl MemoryCompressor {
fn extract_sentences<'a>(&self, content: &'a str) -> Vec<&'a str> {
content
.split(|c| c == '.' || c == '!' || c == '?')
.split(['.', '!', '?'])
.map(|s| s.trim())
.filter(|s| s.len() > 10) // Filter very short fragments
.collect()
@ -462,7 +462,7 @@ impl MemoryCompressor {
// Length factor (prefer medium-length sentences)
let words = sentence.split_whitespace().count();
if words >= 5 && words <= 25 {
if (5..=25).contains(&words) {
score += 0.3;
}

View file

@ -1539,7 +1539,7 @@ impl MemoryDreamer {
// Hidden connection
let insight = format!(
"Connection between '{}' and '{}' found across {} memories",
common_tags.get(0).map(|s| s.as_str()).unwrap_or("A"),
common_tags.first().map(|s| s.as_str()).unwrap_or("A"),
common_tags.get(1).map(|s| s.as_str()).unwrap_or("B"),
memories.len()
);

View file

@ -290,7 +290,7 @@ impl SpeculativeRetriever {
for patterns_list in patterns.values_mut() {
for pattern in patterns_list.iter_mut() {
let days_old = (now - pattern.last_seen).num_days() as f64;
pattern.weight = pattern.weight * PATTERN_DECAY_RATE.powf(days_old);
pattern.weight *= PATTERN_DECAY_RATE.powf(days_old);
}
// Remove patterns that are too weak
@ -393,9 +393,7 @@ impl SpeculativeRetriever {
memory_id: event.memory_id.clone(),
content_preview: String::new(),
confidence: 0.6,
reasoning: format!(
"This memory was helpful when you searched for similar terms before"
),
reasoning: "This memory was helpful when you searched for similar terms before".to_string(),
trigger: PredictionTrigger::SemanticSimilarity {
query: query.clone(),
similarity: 0.8,
@ -492,7 +490,7 @@ impl SpeculativeRetriever {
for pattern in patterns_list.iter_mut() {
if pattern.predicted_id == memory_id {
pattern.weight *= factor;
pattern.success_rate = pattern.success_rate * 0.95;
pattern.success_rate *= 0.95;
}
}
}

View file

@ -387,7 +387,7 @@ impl ContextCapture {
// Get last modified time
let last_modified = fs::metadata(path)
.ok()
.and_then(|m| m.modified().ok().map(|t| DateTime::<Utc>::from(t)));
.and_then(|m| m.modified().ok().map(DateTime::<Utc>::from));
// Detect module
let module = self.detect_module(path);
@ -640,7 +640,7 @@ impl ContextCapture {
let name = line
.trim_start_matches("module ")
.split('/')
.last()
.next_back()
.unwrap_or("")
.to_string();
if !name.is_empty() {

View file

@ -133,7 +133,7 @@ impl GitAnalyzer {
let mut has_untracked = false;
for entry in statuses.iter() {
let path = entry.path().map(|p| PathBuf::from(p)).unwrap_or_default();
let path = entry.path().map(PathBuf::from).unwrap_or_default();
let status = entry.status();

View file

@ -208,10 +208,12 @@ pub struct ArchitecturalDecision {
/// Status of an architectural decision
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
#[derive(Default)]
pub enum DecisionStatus {
/// Decision is proposed but not yet implemented
Proposed,
/// Decision is accepted and being implemented
#[default]
Accepted,
/// Decision has been superseded by another
Superseded,
@ -219,11 +221,6 @@ pub enum DecisionStatus {
Deprecated,
}
impl Default for DecisionStatus {
fn default() -> Self {
Self::Accepted
}
}
// ============================================================================
// BUG FIX
@ -266,19 +263,16 @@ pub struct BugFix {
/// Severity level of a bug
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
#[derive(Default)]
pub enum BugSeverity {
Critical,
High,
#[default]
Medium,
Low,
Trivial,
}
impl Default for BugSeverity {
fn default() -> Self {
Self::Medium
}
}
// ============================================================================
// CODE PATTERN

View file

@ -274,6 +274,7 @@ impl Default for MemoryStats {
/// Result of a memory consolidation run (sleep-inspired processing)
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[derive(Default)]
pub struct ConsolidationResult {
/// Number of nodes processed
pub nodes_processed: i64,
@ -289,18 +290,6 @@ pub struct ConsolidationResult {
pub embeddings_generated: i64,
}
impl Default for ConsolidationResult {
fn default() -> Self {
Self {
nodes_processed: 0,
nodes_promoted: 0,
nodes_pruned: 0,
decay_applied: 0,
duration_ms: 0,
embeddings_generated: 0,
}
}
}
// ============================================================================
// SEARCH RESULTS
@ -351,6 +340,7 @@ pub struct SimilarityResult {
/// Result of embedding generation
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[derive(Default)]
pub struct EmbeddingResult {
/// Successfully generated embeddings
pub successful: i64,
@ -362,13 +352,3 @@ pub struct EmbeddingResult {
pub errors: Vec<String>,
}
impl Default for EmbeddingResult {
fn default() -> Self {
Self {
successful: 0,
failed: 0,
skipped: 0,
errors: vec![],
}
}
}

View file

@ -797,7 +797,7 @@ impl PredictiveMemory {
.map_err(|e| PredictiveMemoryError::LockPoisoned(e.to_string()))?;
for tag in tags {
cache.invalidate(*tag);
cache.invalidate(tag);
}
Ok(())

View file

@ -131,10 +131,12 @@ pub type Result<T> = std::result::Result<T, ProspectiveMemoryError>;
/// Priority levels for intentions
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
#[derive(Default)]
pub enum Priority {
/// Low priority - nice to remember
Low = 1,
/// Normal priority - should remember
#[default]
Normal = 2,
/// High priority - important to remember
High = 3,
@ -142,11 +144,6 @@ pub enum Priority {
Critical = 4,
}
impl Default for Priority {
fn default() -> Self {
Self::Normal
}
}
impl Priority {
/// Get numeric value for comparison
@ -182,8 +179,10 @@ impl Priority {
/// Status of an intention
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[derive(Default)]
pub enum IntentionStatus {
/// Intention is active and being monitored
#[default]
Active,
/// Intention has been triggered but not yet fulfilled
Triggered,
@ -197,11 +196,6 @@ pub enum IntentionStatus {
Snoozed,
}
impl Default for IntentionStatus {
fn default() -> Self {
Self::Active
}
}
/// Pattern for matching trigger conditions
#[derive(Debug, Clone, Serialize, Deserialize)]
@ -516,7 +510,7 @@ impl RecurrencePattern {
Utc,
);
}
candidate = candidate + Duration::days(1);
candidate += Duration::days(1);
}
from + Duration::days(7) // Fallback
}
@ -1030,7 +1024,7 @@ impl IntentionParser {
};
// Extract entity if mentioned
if let Some(entity) = self.extract_entity(&text_lower) {
if let Some(entity) = self.extract_entity(text_lower) {
return Ok((
IntentionTrigger::EventBased {
condition: format!("Meeting or conversation with {}", entity),
@ -1287,12 +1281,10 @@ impl ProspectiveMemory {
if intention
.trigger
.is_triggered(context, &context.recent_events)
{
if intention.should_remind() {
&& intention.should_remind() {
intention.mark_triggered();
triggered.push(intention.clone());
}
}
// Check for deadline escalation
if self.config.enable_escalation {
@ -1450,10 +1442,10 @@ impl ProspectiveMemory {
Ok(IntentionStats {
total_active: active,
triggered: triggered,
overdue: overdue,
triggered,
overdue,
fulfilled_lifetime: fulfilled,
high_priority: high_priority,
high_priority,
})
}

View file

@ -40,8 +40,10 @@ const MIN_ACTIVATION_THRESHOLD: f64 = 0.1;
/// Types of associative links between memories
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
#[derive(Default)]
pub enum LinkType {
/// Same topic/category
#[default]
Semantic,
/// Occurred together in time
Temporal,
@ -55,11 +57,6 @@ pub enum LinkType {
UserDefined,
}
impl Default for LinkType {
fn default() -> Self {
LinkType::Semantic
}
}
// ============================================================================
// ASSOCIATION EDGE

View file

@ -105,9 +105,11 @@ const DEFAULT_MAX_CLUSTER_SIZE: usize = 50;
/// - Logarithmic: Very slow decay, good for important memories
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "lowercase")]
#[derive(Default)]
pub enum DecayFunction {
/// Exponential decay: strength = initial * e^(-lambda * t)
/// Best for modeling biological tag decay
#[default]
Exponential,
/// Linear decay: strength = initial * (1 - t/lifetime)
/// Simple, predictable decay
@ -120,11 +122,6 @@ pub enum DecayFunction {
Logarithmic,
}
impl Default for DecayFunction {
fn default() -> Self {
DecayFunction::Exponential
}
}
impl DecayFunction {
/// Calculate decayed strength

View file

@ -295,7 +295,7 @@ impl Storage {
// Build candidate memories
let mut candidates: Vec<CandidateMemory> = Vec::new();
for (node_id, similarity) in similar.iter() {
for (node_id, _similarity) in similar.iter() {
if let Some(node) = self.get_node(node_id)? {
// Get embedding for this node
if let Some(emb) = self.get_node_embedding(node_id)? {
@ -1034,10 +1034,8 @@ impl Storage {
self.row_to_node(row)
})?;
let mut nodes = Vec::new();
for row in rows {
if let Ok(node) = row {
nodes.push(node);
}
for node in rows.flatten() {
nodes.push(node);
}
Ok(nodes)
}
@ -1051,10 +1049,8 @@ impl Storage {
)?;
let rows = stmt.query_map(params![node_type, limit], |row| self.row_to_node(row))?;
let mut nodes = Vec::new();
for row in rows {
if let Ok(node) = row {
nodes.push(node);
}
for node in rows.flatten() {
nodes.push(node);
}
Ok(nodes)
}
@ -1296,10 +1292,8 @@ impl Storage {
Ok((row.get::<_, String>(0)?, row.get::<_, String>(1)?))
})?;
for row in rows {
if let Ok(r) = row {
result_nodes.push(r);
}
for r in rows.flatten() {
result_nodes.push(r);
}
}
result_nodes
@ -1512,7 +1506,7 @@ impl Storage {
)? as i64;
#[cfg(all(feature = "embeddings", feature = "vector-search"))]
let embeddings_generated = self.generate_missing_embeddings()? as i64;
let embeddings_generated = self.generate_missing_embeddings()?;
#[cfg(not(all(feature = "embeddings", feature = "vector-search")))]
let embeddings_generated = 0i64;