Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion CLAUDE.md
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ internal/
reactor/ Event-driven rule engine
api/ REST API server + routes
web/ Embedded dashboard (single-page app, D3.js charts)
mcp/ MCP server (21 tools for Claude Code)
mcp/ MCP server (22 tools for Claude Code)
store/ Store interface + SQLite implementation
llm/ LLM provider interface + implementations (LM Studio, Gemini/cloud API)
ingest/ Project ingestion engine
Expand Down Expand Up @@ -136,6 +136,7 @@ You have 21 tools via the `mnemonic` MCP server:
| `ingest_project` | Bulk-ingest a project directory into memory |
| `exclude_path` | Add a watcher exclusion pattern at runtime |
| `list_exclusions` | List all runtime watcher exclusion patterns |
| `dismiss_pattern` | Archive a stale or irrelevant pattern to stop it surfacing in recall |

### At Session Start

Expand Down
6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ A local-first semantic memory daemon that watches your work, learns from it, and
- **Autonomous** — Watches your filesystem, terminal, and clipboard. Encodes memories without you lifting a finger.
- **Biological** — Memories consolidate, decay, form patterns, and become principles. It doesn't just store — it *processes*.
- **Local-first** — Air-gapped, SQLite-backed, never phones home. Your data stays on your machine.
- **21 MCP tools** — Drop-in memory layer for Claude Code and other AI agents.
- **22 MCP tools** — Drop-in memory layer for Claude Code and other AI agents.
- **Self-updating** — Built-in update mechanism checks GitHub Releases and applies updates in-place.
- **Cross-platform** — macOS, Linux, and Windows. Daemon management via launchd, systemd, or Windows Services.

Expand Down Expand Up @@ -106,7 +106,7 @@ For the full deep dive, see [ARCHITECTURE.md](ARCHITECTURE.md).

## MCP Integration

Mnemonic exposes 21 tools via the [Model Context Protocol](https://modelcontextprotocol.io/) for Claude Code and other AI agents:
Mnemonic exposes 22 tools via the [Model Context Protocol](https://modelcontextprotocol.io/) for Claude Code and other AI agents:

**Claude Code config** (`~/.claude/settings.local.json`):

Expand Down Expand Up @@ -222,7 +222,7 @@ internal/
agent/ 8 cognitive agents + orchestrator + reactor
api/ HTTP + WebSocket server
web/ Embedded dashboard (single-page app)
mcp/ MCP server (21 tools)
mcp/ MCP server (22 tools)
store/ Store interface + SQLite (FTS5 + vector search)
llm/ LLM provider interface (LM Studio, Gemini, cloud APIs)
ingest/ Project ingestion engine
Expand Down
66 changes: 34 additions & 32 deletions internal/agent/consolidation/agent.go
Original file line number Diff line number Diff line change
Expand Up @@ -1518,53 +1518,55 @@ func (ca *ConsolidationAgent) decayPatterns(ctx context.Context) (int, error) {
decayed := 0
for i := range patterns {
p := &patterns[i]
if p.State != "active" {
if p.State != "active" && p.State != "fading" {
continue
}

// Apply baseline decay — self-sustaining patterns get reduced decay
// Compute evidence health ratio for all patterns with evidence.
totalEvidence := len(p.EvidenceIDs)
var evidenceRatio float32 = 1.0
if totalEvidence > 0 {
activeEvidence := 0
for _, memID := range p.EvidenceIDs {
mem, err := ca.store.GetMemory(ctx, memID)
if err == nil && (mem.State == store.MemoryStateActive || mem.State == store.MemoryStateFading) {
activeEvidence++
}
}
evidenceRatio = float32(activeEvidence) / float32(totalEvidence)
} else {
evidenceRatio = 0
}

// Apply baseline decay — self-sustaining requires healthy evidence
minEvidence := cfgInt(ca.config.SelfSustainingMinEvidence, 10)
minStrength := cfgFloat32(ca.config.SelfSustainingMinStrength, 0.9)
if len(p.EvidenceIDs) >= minEvidence && p.Strength >= minStrength {
if len(p.EvidenceIDs) >= minEvidence && p.Strength >= minStrength && evidenceRatio >= 0.5 {
p.Strength *= cfgFloat32(ca.config.SelfSustainingDecay, 0.9999)
} else {
p.Strength *= cfgFloat32(ca.config.PatternBaselineDecay, 0.998)
}

// Additional evidence-based decay for patterns not accessed within 3 days
recency := p.LastAccessed
if recency.IsZero() {
recency = p.CreatedAt
}
stale := recency.IsZero() || time.Since(recency).Hours() >= 72

if stale {
totalEvidence := len(p.EvidenceIDs)
if totalEvidence == 0 {
// Evidence-based decay applies to all patterns (not just stale ones).
// Patterns with dead evidence should decay regardless of access recency.
if totalEvidence == 0 {
p.Strength *= cfgFloat32(ca.config.StaleDecayAggressive, 0.90)
} else {
switch {
case evidenceRatio >= 0.5:
// Healthy evidence — no additional decay beyond baseline
case evidenceRatio >= 0.2:
p.Strength *= cfgFloat32(ca.config.StaleDecayModerate, 0.95)
default:
p.Strength *= cfgFloat32(ca.config.StaleDecayAggressive, 0.90)
} else {
activeEvidence := 0
for _, memID := range p.EvidenceIDs {
mem, err := ca.store.GetMemory(ctx, memID)
if err == nil && (mem.State == store.MemoryStateActive || mem.State == store.MemoryStateFading) {
activeEvidence++
}
}
evidenceRatio := float32(activeEvidence) / float32(totalEvidence)
switch {
case evidenceRatio >= 0.5:
p.Strength *= cfgFloat32(ca.config.StaleDecayHealthy, 0.98)
case evidenceRatio >= 0.2:
p.Strength *= cfgFloat32(ca.config.StaleDecayModerate, 0.95)
default:
p.Strength *= cfgFloat32(ca.config.StaleDecayAggressive, 0.90)
}
}
}

// Below 0.1 → transition to fading
if p.Strength < 0.1 {
// State transitions: active → fading at 0.1, fading → archived at 0.05
if p.State == "active" && p.Strength < 0.1 {
p.State = "fading"
} else if p.State == "fading" && p.Strength < 0.05 {
p.State = "archived"
}

p.UpdatedAt = time.Now()
Expand Down
19 changes: 10 additions & 9 deletions internal/agent/retrieval/agent.go
Original file line number Diff line number Diff line change
Expand Up @@ -356,16 +356,17 @@ func (ra *RetrievalAgent) Query(ctx context.Context, req QueryRequest) (QueryRes

if embedding != nil {
if req.IncludePatterns {
patterns, err := ra.store.SearchPatternsByEmbedding(ctx, embedding, intOr(ra.config.PatternSearchLimit, 5))
if err != nil {
ra.log.Warn("pattern search failed", "query_id", queryID, "error", err)
var patterns []store.Pattern
var pErr error
if req.Project != "" {
patterns, pErr = ra.store.SearchPatternsByEmbeddingInProject(ctx, embedding, req.Project, intOr(ra.config.PatternSearchLimit, 5))
} else {
// Filter by project if specified
for _, p := range patterns {
if req.Project == "" || p.Project == "" || p.Project == req.Project {
matchedPatterns = append(matchedPatterns, p)
}
}
patterns, pErr = ra.store.SearchPatternsByEmbedding(ctx, embedding, intOr(ra.config.PatternSearchLimit, 5))
}
if pErr != nil {
ra.log.Warn("pattern search failed", "query_id", queryID, "error", pErr)
} else {
matchedPatterns = patterns
}
}

Expand Down
17 changes: 17 additions & 0 deletions internal/mcp/server.go
Original file line number Diff line number Diff line change
Expand Up @@ -283,6 +283,8 @@ func (srv *MCPServer) handleToolCall(ctx context.Context, req *jsonRPCRequest) *
result, toolErr = srv.handleAmend(ctx, params.Arguments)
case "check_memory":
result, toolErr = srv.handleCheckMemory(ctx, params.Arguments)
case "dismiss_pattern":
result, toolErr = srv.handleDismissPattern(ctx, params.Arguments)
default:
return errorResponse(req.ID, -32602, fmt.Sprintf("Unknown tool: %s", params.Name))
}
Expand Down Expand Up @@ -2480,3 +2482,18 @@ func (srv *MCPServer) handleCheckMemory(ctx context.Context, args map[string]int

return toolResult(sb.String()), nil
}

// handleDismissPattern archives a pattern by ID.
func (srv *MCPServer) handleDismissPattern(_ context.Context, args map[string]interface{}) (interface{}, error) {
patternID, _ := args["pattern_id"].(string)
if patternID == "" {
return nil, fmt.Errorf("pattern_id is required")
}

if err := srv.store.ArchivePattern(context.Background(), patternID); err != nil {
return nil, fmt.Errorf("archiving pattern %s: %w", patternID, err)
}

srv.log.Info("pattern dismissed", "pattern_id", patternID, "session_id", srv.sessionID)
return toolResult(fmt.Sprintf("Pattern %s archived", patternID)), nil
}
5 changes: 3 additions & 2 deletions internal/mcp/server_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -127,8 +127,8 @@ func TestHandleToolsList(t *testing.T) {
t.Fatalf("tools is not an array, got %T", toolsInterface)
}

if len(toolsArray) != 21 {
t.Fatalf("expected 21 tools, got %d", len(toolsArray))
if len(toolsArray) != 22 {
t.Fatalf("expected 22 tools, got %d", len(toolsArray))
}

// Verify tool names
Expand All @@ -154,6 +154,7 @@ func TestHandleToolsList(t *testing.T) {
"check_memory": false,
"exclude_path": false,
"list_exclusions": false,
"dismiss_pattern": false,
}

for _, toolInterface := range toolsArray {
Expand Down
18 changes: 18 additions & 0 deletions internal/mcp/tools.go
Original file line number Diff line number Diff line change
Expand Up @@ -188,6 +188,23 @@ func forgetToolDef() ToolDefinition {
}
}

func dismissPatternToolDef() ToolDefinition {
return ToolDefinition{
Name: "dismiss_pattern",
Description: "Archive a pattern by ID. Use this to dismiss stale or irrelevant patterns that keep surfacing in recall results.",
InputSchema: map[string]interface{}{
"type": "object",
"properties": map[string]interface{}{
"pattern_id": map[string]interface{}{
"type": "string",
"description": "The ID of the pattern to archive",
},
},
"required": []string{"pattern_id"},
},
}
}

func statusToolDef() ToolDefinition {
return ToolDefinition{
Name: "status",
Expand Down Expand Up @@ -574,5 +591,6 @@ func allToolDefs() []ToolDefinition {
listExclusionsToolDef(),
amendToolDef(),
checkMemoryToolDef(),
dismissPatternToolDef(),
}
}
81 changes: 76 additions & 5 deletions internal/store/sqlite/patterns.go
Original file line number Diff line number Diff line change
Expand Up @@ -94,16 +94,17 @@ func (s *SQLiteStore) UpdatePattern(ctx context.Context, p store.Pattern) error
}

// ListPatterns lists patterns, optionally filtered by project.
// Returns active and fading patterns (fading are needed by decay logic).
func (s *SQLiteStore) ListPatterns(ctx context.Context, project string, limit int) ([]store.Pattern, error) {
var query string
var args []interface{}
var args []any

if project == "" {
query = `SELECT ` + patternColumns + ` FROM patterns WHERE state = 'active' ORDER BY strength DESC LIMIT ?`
args = []interface{}{limit}
query = `SELECT ` + patternColumns + ` FROM patterns WHERE state IN ('active', 'fading') ORDER BY strength DESC LIMIT ?`
args = []any{limit}
} else {
query = `SELECT ` + patternColumns + ` FROM patterns WHERE state = 'active' AND project = ? ORDER BY strength DESC LIMIT ?`
args = []interface{}{project, limit}
query = `SELECT ` + patternColumns + ` FROM patterns WHERE state IN ('active', 'fading') AND project = ? ORDER BY strength DESC LIMIT ?`
args = []any{project, limit}
}

rows, err := s.db.QueryContext(ctx, query, args...)
Expand Down Expand Up @@ -248,6 +249,76 @@ func scanPatternRows(rows *sql.Rows) ([]store.Pattern, error) {
return patterns, nil
}

// SearchPatternsByEmbeddingInProject searches patterns scoped to a project.
// When project is non-empty, only patterns belonging to that project are returned.
func (s *SQLiteStore) SearchPatternsByEmbeddingInProject(ctx context.Context, embedding []float32, project string, limit int) ([]store.Pattern, error) {
if len(embedding) == 0 {
return nil, fmt.Errorf("embedding cannot be empty")
}
if project == "" {
return s.SearchPatternsByEmbedding(ctx, embedding, limit)
}

rows, err := s.db.QueryContext(ctx,
`SELECT id, embedding FROM patterns WHERE state = 'active' AND project = ? AND embedding IS NOT NULL AND length(embedding) > 0`, project)
if err != nil {
return nil, fmt.Errorf("failed to query pattern embeddings: %w", err)
}
defer func() { _ = rows.Close() }()

type candidate struct {
id string
score float32
}
var candidates []candidate

for rows.Next() {
var id string
var blob []byte
if err := rows.Scan(&id, &blob); err != nil {
continue
}
emb := decodeEmbedding(blob)
if len(emb) == 0 {
continue
}
score := cosineSimilarity(embedding, emb)
candidates = append(candidates, candidate{id: id, score: score})
}

sort.Slice(candidates, func(i, j int) bool {
return candidates[i].score > candidates[j].score
})
if len(candidates) > limit {
candidates = candidates[:limit]
}

var patterns []store.Pattern
for _, c := range candidates {
p, err := s.GetPattern(ctx, c.id)
if err != nil {
continue
}
patterns = append(patterns, p)
}

return patterns, nil
}

// ArchivePattern archives a single pattern by ID.
func (s *SQLiteStore) ArchivePattern(ctx context.Context, id string) error {
result, err := s.db.ExecContext(ctx,
`UPDATE patterns SET state = 'archived', updated_at = datetime('now') WHERE id = ?`, id)
if err != nil {
return fmt.Errorf("archiving pattern %s: %w", id, err)
}
n, _ := result.RowsAffected()
if n == 0 {
return fmt.Errorf("pattern %s: %w", id, store.ErrNotFound)
}
return nil
}

// ArchiveAllPatterns transitions all active patterns to archived state.
func (s *SQLiteStore) ArchiveAllPatterns(ctx context.Context) (int, error) {
result, err := s.db.ExecContext(ctx,
Expand Down
2 changes: 2 additions & 0 deletions internal/store/store.go
Original file line number Diff line number Diff line change
Expand Up @@ -475,6 +475,8 @@ type Store interface {
UpdatePattern(ctx context.Context, p Pattern) error
ListPatterns(ctx context.Context, project string, limit int) ([]Pattern, error)
SearchPatternsByEmbedding(ctx context.Context, embedding []float32, limit int) ([]Pattern, error)
SearchPatternsByEmbeddingInProject(ctx context.Context, embedding []float32, project string, limit int) ([]Pattern, error)
ArchivePattern(ctx context.Context, id string) error
ArchiveAllPatterns(ctx context.Context) (int, error)

// --- Abstraction operations ---
Expand Down
4 changes: 4 additions & 0 deletions internal/store/storetest/mock.go
Original file line number Diff line number Diff line change
Expand Up @@ -237,6 +237,10 @@ func (MockStore) ListPatterns(context.Context, string, int) ([]store.Pattern, er
func (MockStore) SearchPatternsByEmbedding(context.Context, []float32, int) ([]store.Pattern, error) {
return nil, nil
}
func (MockStore) SearchPatternsByEmbeddingInProject(context.Context, []float32, string, int) ([]store.Pattern, error) {
return nil, nil
}
func (MockStore) ArchivePattern(context.Context, string) error { return nil }
func (MockStore) ArchiveAllPatterns(context.Context) (int, error) { return 0, nil }

// --- Abstraction operations ---
Expand Down