diff --git a/CLAUDE.md b/CLAUDE.md index 70aa23db..e76d9442 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -36,7 +36,7 @@ internal/ reactor/ Event-driven rule engine api/ REST API server + routes web/ Embedded dashboard (single-page app, D3.js charts) - mcp/ MCP server (21 tools for Claude Code) + mcp/ MCP server (22 tools for Claude Code) store/ Store interface + SQLite implementation llm/ LLM provider interface + implementations (LM Studio, Gemini/cloud API) ingest/ Project ingestion engine @@ -136,6 +136,7 @@ You have 21 tools via the `mnemonic` MCP server: | `ingest_project` | Bulk-ingest a project directory into memory | | `exclude_path` | Add a watcher exclusion pattern at runtime | | `list_exclusions` | List all runtime watcher exclusion patterns | +| `dismiss_pattern` | Archive a stale or irrelevant pattern to stop it surfacing in recall | ### At Session Start diff --git a/README.md b/README.md index 9843ba17..9f3a4193 100644 --- a/README.md +++ b/README.md @@ -13,7 +13,7 @@ A local-first semantic memory daemon that watches your work, learns from it, and - **Autonomous** — Watches your filesystem, terminal, and clipboard. Encodes memories without you lifting a finger. - **Biological** — Memories consolidate, decay, form patterns, and become principles. It doesn't just store — it *processes*. - **Local-first** — Air-gapped, SQLite-backed, never phones home. Your data stays on your machine. -- **21 MCP tools** — Drop-in memory layer for Claude Code and other AI agents. +- **22 MCP tools** — Drop-in memory layer for Claude Code and other AI agents. - **Self-updating** — Built-in update mechanism checks GitHub Releases and applies updates in-place. - **Cross-platform** — macOS, Linux, and Windows. Daemon management via launchd, systemd, or Windows Services. @@ -106,7 +106,7 @@ For the full deep dive, see [ARCHITECTURE.md](ARCHITECTURE.md). ## MCP Integration -Mnemonic exposes 21 tools via the [Model Context Protocol](https://modelcontextprotocol.io/) for Claude Code and other AI agents: +Mnemonic exposes 22 tools via the [Model Context Protocol](https://modelcontextprotocol.io/) for Claude Code and other AI agents: **Claude Code config** (`~/.claude/settings.local.json`): @@ -222,7 +222,7 @@ internal/ agent/ 8 cognitive agents + orchestrator + reactor api/ HTTP + WebSocket server web/ Embedded dashboard (single-page app) - mcp/ MCP server (21 tools) + mcp/ MCP server (22 tools) store/ Store interface + SQLite (FTS5 + vector search) llm/ LLM provider interface (LM Studio, Gemini, cloud APIs) ingest/ Project ingestion engine diff --git a/internal/agent/consolidation/agent.go b/internal/agent/consolidation/agent.go index 875d8b9a..3d8dab6d 100644 --- a/internal/agent/consolidation/agent.go +++ b/internal/agent/consolidation/agent.go @@ -1518,53 +1518,55 @@ func (ca *ConsolidationAgent) decayPatterns(ctx context.Context) (int, error) { decayed := 0 for i := range patterns { p := &patterns[i] - if p.State != "active" { + if p.State != "active" && p.State != "fading" { continue } - // Apply baseline decay — self-sustaining patterns get reduced decay + // Compute evidence health ratio for all patterns with evidence. + totalEvidence := len(p.EvidenceIDs) + var evidenceRatio float32 = 1.0 + if totalEvidence > 0 { + activeEvidence := 0 + for _, memID := range p.EvidenceIDs { + mem, err := ca.store.GetMemory(ctx, memID) + if err == nil && (mem.State == store.MemoryStateActive || mem.State == store.MemoryStateFading) { + activeEvidence++ + } + } + evidenceRatio = float32(activeEvidence) / float32(totalEvidence) + } else { + evidenceRatio = 0 + } + + // Apply baseline decay — self-sustaining requires healthy evidence minEvidence := cfgInt(ca.config.SelfSustainingMinEvidence, 10) minStrength := cfgFloat32(ca.config.SelfSustainingMinStrength, 0.9) - if len(p.EvidenceIDs) >= minEvidence && p.Strength >= minStrength { + if len(p.EvidenceIDs) >= minEvidence && p.Strength >= minStrength && evidenceRatio >= 0.5 { p.Strength *= cfgFloat32(ca.config.SelfSustainingDecay, 0.9999) } else { p.Strength *= cfgFloat32(ca.config.PatternBaselineDecay, 0.998) } - // Additional evidence-based decay for patterns not accessed within 3 days - recency := p.LastAccessed - if recency.IsZero() { - recency = p.CreatedAt - } - stale := recency.IsZero() || time.Since(recency).Hours() >= 72 - - if stale { - totalEvidence := len(p.EvidenceIDs) - if totalEvidence == 0 { + // Evidence-based decay applies to all patterns (not just stale ones). + // Patterns with dead evidence should decay regardless of access recency. + if totalEvidence == 0 { + p.Strength *= cfgFloat32(ca.config.StaleDecayAggressive, 0.90) + } else { + switch { + case evidenceRatio >= 0.5: + // Healthy evidence — no additional decay beyond baseline + case evidenceRatio >= 0.2: + p.Strength *= cfgFloat32(ca.config.StaleDecayModerate, 0.95) + default: p.Strength *= cfgFloat32(ca.config.StaleDecayAggressive, 0.90) - } else { - activeEvidence := 0 - for _, memID := range p.EvidenceIDs { - mem, err := ca.store.GetMemory(ctx, memID) - if err == nil && (mem.State == store.MemoryStateActive || mem.State == store.MemoryStateFading) { - activeEvidence++ - } - } - evidenceRatio := float32(activeEvidence) / float32(totalEvidence) - switch { - case evidenceRatio >= 0.5: - p.Strength *= cfgFloat32(ca.config.StaleDecayHealthy, 0.98) - case evidenceRatio >= 0.2: - p.Strength *= cfgFloat32(ca.config.StaleDecayModerate, 0.95) - default: - p.Strength *= cfgFloat32(ca.config.StaleDecayAggressive, 0.90) - } } } - // Below 0.1 → transition to fading - if p.Strength < 0.1 { + // State transitions: active → fading at 0.1, fading → archived at 0.05 + if p.State == "active" && p.Strength < 0.1 { p.State = "fading" + } else if p.State == "fading" && p.Strength < 0.05 { + p.State = "archived" } p.UpdatedAt = time.Now() diff --git a/internal/agent/retrieval/agent.go b/internal/agent/retrieval/agent.go index 4944c159..342db650 100644 --- a/internal/agent/retrieval/agent.go +++ b/internal/agent/retrieval/agent.go @@ -356,16 +356,17 @@ func (ra *RetrievalAgent) Query(ctx context.Context, req QueryRequest) (QueryRes if embedding != nil { if req.IncludePatterns { - patterns, err := ra.store.SearchPatternsByEmbedding(ctx, embedding, intOr(ra.config.PatternSearchLimit, 5)) - if err != nil { - ra.log.Warn("pattern search failed", "query_id", queryID, "error", err) + var patterns []store.Pattern + var pErr error + if req.Project != "" { + patterns, pErr = ra.store.SearchPatternsByEmbeddingInProject(ctx, embedding, req.Project, intOr(ra.config.PatternSearchLimit, 5)) } else { - // Filter by project if specified - for _, p := range patterns { - if req.Project == "" || p.Project == "" || p.Project == req.Project { - matchedPatterns = append(matchedPatterns, p) - } - } + patterns, pErr = ra.store.SearchPatternsByEmbedding(ctx, embedding, intOr(ra.config.PatternSearchLimit, 5)) + } + if pErr != nil { + ra.log.Warn("pattern search failed", "query_id", queryID, "error", pErr) + } else { + matchedPatterns = patterns } } diff --git a/internal/mcp/server.go b/internal/mcp/server.go index 91a8068c..f009dae7 100644 --- a/internal/mcp/server.go +++ b/internal/mcp/server.go @@ -283,6 +283,8 @@ func (srv *MCPServer) handleToolCall(ctx context.Context, req *jsonRPCRequest) * result, toolErr = srv.handleAmend(ctx, params.Arguments) case "check_memory": result, toolErr = srv.handleCheckMemory(ctx, params.Arguments) + case "dismiss_pattern": + result, toolErr = srv.handleDismissPattern(ctx, params.Arguments) default: return errorResponse(req.ID, -32602, fmt.Sprintf("Unknown tool: %s", params.Name)) } @@ -2480,3 +2482,18 @@ func (srv *MCPServer) handleCheckMemory(ctx context.Context, args map[string]int return toolResult(sb.String()), nil } + +// handleDismissPattern archives a pattern by ID. +func (srv *MCPServer) handleDismissPattern(_ context.Context, args map[string]interface{}) (interface{}, error) { + patternID, _ := args["pattern_id"].(string) + if patternID == "" { + return nil, fmt.Errorf("pattern_id is required") + } + + if err := srv.store.ArchivePattern(context.Background(), patternID); err != nil { + return nil, fmt.Errorf("archiving pattern %s: %w", patternID, err) + } + + srv.log.Info("pattern dismissed", "pattern_id", patternID, "session_id", srv.sessionID) + return toolResult(fmt.Sprintf("Pattern %s archived", patternID)), nil +} diff --git a/internal/mcp/server_test.go b/internal/mcp/server_test.go index 1582a563..b5f73b8d 100644 --- a/internal/mcp/server_test.go +++ b/internal/mcp/server_test.go @@ -127,8 +127,8 @@ func TestHandleToolsList(t *testing.T) { t.Fatalf("tools is not an array, got %T", toolsInterface) } - if len(toolsArray) != 21 { - t.Fatalf("expected 21 tools, got %d", len(toolsArray)) + if len(toolsArray) != 22 { + t.Fatalf("expected 22 tools, got %d", len(toolsArray)) } // Verify tool names @@ -154,6 +154,7 @@ func TestHandleToolsList(t *testing.T) { "check_memory": false, "exclude_path": false, "list_exclusions": false, + "dismiss_pattern": false, } for _, toolInterface := range toolsArray { diff --git a/internal/mcp/tools.go b/internal/mcp/tools.go index 914a6487..b932aa07 100644 --- a/internal/mcp/tools.go +++ b/internal/mcp/tools.go @@ -188,6 +188,23 @@ func forgetToolDef() ToolDefinition { } } +func dismissPatternToolDef() ToolDefinition { + return ToolDefinition{ + Name: "dismiss_pattern", + Description: "Archive a pattern by ID. Use this to dismiss stale or irrelevant patterns that keep surfacing in recall results.", + InputSchema: map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{ + "pattern_id": map[string]interface{}{ + "type": "string", + "description": "The ID of the pattern to archive", + }, + }, + "required": []string{"pattern_id"}, + }, + } +} + func statusToolDef() ToolDefinition { return ToolDefinition{ Name: "status", @@ -574,5 +591,6 @@ func allToolDefs() []ToolDefinition { listExclusionsToolDef(), amendToolDef(), checkMemoryToolDef(), + dismissPatternToolDef(), } } diff --git a/internal/store/sqlite/patterns.go b/internal/store/sqlite/patterns.go index a4682614..2fd77f93 100644 --- a/internal/store/sqlite/patterns.go +++ b/internal/store/sqlite/patterns.go @@ -94,16 +94,17 @@ func (s *SQLiteStore) UpdatePattern(ctx context.Context, p store.Pattern) error } // ListPatterns lists patterns, optionally filtered by project. +// Returns active and fading patterns (fading are needed by decay logic). func (s *SQLiteStore) ListPatterns(ctx context.Context, project string, limit int) ([]store.Pattern, error) { var query string - var args []interface{} + var args []any if project == "" { - query = `SELECT ` + patternColumns + ` FROM patterns WHERE state = 'active' ORDER BY strength DESC LIMIT ?` - args = []interface{}{limit} + query = `SELECT ` + patternColumns + ` FROM patterns WHERE state IN ('active', 'fading') ORDER BY strength DESC LIMIT ?` + args = []any{limit} } else { - query = `SELECT ` + patternColumns + ` FROM patterns WHERE state = 'active' AND project = ? ORDER BY strength DESC LIMIT ?` - args = []interface{}{project, limit} + query = `SELECT ` + patternColumns + ` FROM patterns WHERE state IN ('active', 'fading') AND project = ? ORDER BY strength DESC LIMIT ?` + args = []any{project, limit} } rows, err := s.db.QueryContext(ctx, query, args...) @@ -248,6 +249,76 @@ func scanPatternRows(rows *sql.Rows) ([]store.Pattern, error) { return patterns, nil } +// SearchPatternsByEmbeddingInProject searches patterns scoped to a project. +// When project is non-empty, only patterns belonging to that project are returned. +func (s *SQLiteStore) SearchPatternsByEmbeddingInProject(ctx context.Context, embedding []float32, project string, limit int) ([]store.Pattern, error) { + if len(embedding) == 0 { + return nil, fmt.Errorf("embedding cannot be empty") + } + if project == "" { + return s.SearchPatternsByEmbedding(ctx, embedding, limit) + } + + rows, err := s.db.QueryContext(ctx, + `SELECT id, embedding FROM patterns WHERE state = 'active' AND project = ? AND embedding IS NOT NULL AND length(embedding) > 0`, project) + if err != nil { + return nil, fmt.Errorf("failed to query pattern embeddings: %w", err) + } + defer func() { _ = rows.Close() }() + + type candidate struct { + id string + score float32 + } + var candidates []candidate + + for rows.Next() { + var id string + var blob []byte + if err := rows.Scan(&id, &blob); err != nil { + continue + } + emb := decodeEmbedding(blob) + if len(emb) == 0 { + continue + } + score := cosineSimilarity(embedding, emb) + candidates = append(candidates, candidate{id: id, score: score}) + } + + sort.Slice(candidates, func(i, j int) bool { + return candidates[i].score > candidates[j].score + }) + if len(candidates) > limit { + candidates = candidates[:limit] + } + + var patterns []store.Pattern + for _, c := range candidates { + p, err := s.GetPattern(ctx, c.id) + if err != nil { + continue + } + patterns = append(patterns, p) + } + + return patterns, nil +} + +// ArchivePattern archives a single pattern by ID. +func (s *SQLiteStore) ArchivePattern(ctx context.Context, id string) error { + result, err := s.db.ExecContext(ctx, + `UPDATE patterns SET state = 'archived', updated_at = datetime('now') WHERE id = ?`, id) + if err != nil { + return fmt.Errorf("archiving pattern %s: %w", id, err) + } + n, _ := result.RowsAffected() + if n == 0 { + return fmt.Errorf("pattern %s: %w", id, store.ErrNotFound) + } + return nil +} + // ArchiveAllPatterns transitions all active patterns to archived state. func (s *SQLiteStore) ArchiveAllPatterns(ctx context.Context) (int, error) { result, err := s.db.ExecContext(ctx, diff --git a/internal/store/store.go b/internal/store/store.go index 2756fbb8..51caad97 100644 --- a/internal/store/store.go +++ b/internal/store/store.go @@ -475,6 +475,8 @@ type Store interface { UpdatePattern(ctx context.Context, p Pattern) error ListPatterns(ctx context.Context, project string, limit int) ([]Pattern, error) SearchPatternsByEmbedding(ctx context.Context, embedding []float32, limit int) ([]Pattern, error) + SearchPatternsByEmbeddingInProject(ctx context.Context, embedding []float32, project string, limit int) ([]Pattern, error) + ArchivePattern(ctx context.Context, id string) error ArchiveAllPatterns(ctx context.Context) (int, error) // --- Abstraction operations --- diff --git a/internal/store/storetest/mock.go b/internal/store/storetest/mock.go index a45aea4c..18db1b72 100644 --- a/internal/store/storetest/mock.go +++ b/internal/store/storetest/mock.go @@ -237,6 +237,10 @@ func (MockStore) ListPatterns(context.Context, string, int) ([]store.Pattern, er func (MockStore) SearchPatternsByEmbedding(context.Context, []float32, int) ([]store.Pattern, error) { return nil, nil } +func (MockStore) SearchPatternsByEmbeddingInProject(context.Context, []float32, string, int) ([]store.Pattern, error) { + return nil, nil +} +func (MockStore) ArchivePattern(context.Context, string) error { return nil } func (MockStore) ArchiveAllPatterns(context.Context) (int, error) { return 0, nil } // --- Abstraction operations ---