diff --git a/packages/opencode/migration/20260410120000_memory_v2_schema/migration.sql b/packages/opencode/migration/20260410120000_memory_v2_schema/migration.sql new file mode 100644 index 000000000000..a9b5f0914ce2 --- /dev/null +++ b/packages/opencode/migration/20260410120000_memory_v2_schema/migration.sql @@ -0,0 +1,12 @@ +ALTER TABLE `memory` ADD COLUMN `scope` text NOT NULL DEFAULT 'project';--> statement-breakpoint +ALTER TABLE `memory` ADD COLUMN `description` text;--> statement-breakpoint +ALTER TABLE `memory` ADD COLUMN `agent` text;--> statement-breakpoint +ALTER TABLE `memory` ADD COLUMN `relevance_score` real NOT NULL DEFAULT 1.0;--> statement-breakpoint +ALTER TABLE `memory` ADD COLUMN `time_last_verified` integer;--> statement-breakpoint +ALTER TABLE `memory` ADD COLUMN `promoted_from` text;--> statement-breakpoint +CREATE INDEX `memory_agent_idx` ON `memory` (`agent`);--> statement-breakpoint +CREATE INDEX `memory_scope_idx` ON `memory` (`scope`);--> statement-breakpoint +CREATE INDEX `memory_project_scope_idx` ON `memory` (`project_path`, `scope`);--> statement-breakpoint +UPDATE `memory` SET `type` = 'project' WHERE `type` IN ('error-solution', 'build-command', 'config-pattern', 'general');--> statement-breakpoint +UPDATE `memory` SET `type` = 'user' WHERE `type` = 'preference';--> statement-breakpoint +UPDATE `memory` SET `type` = 'feedback' WHERE `type` = 'decision'; diff --git a/packages/opencode/src/config/config.ts b/packages/opencode/src/config/config.ts index 1422eca4d9af..59a7592a03a6 100644 --- a/packages/opencode/src/config/config.ts +++ b/packages/opencode/src/config/config.ts @@ -1083,7 +1083,10 @@ export namespace Config { .object({ enabled: z.boolean().optional().describe("Enable or disable memory system"), auto_extract: z.boolean().optional().describe("Enable automatic memory extraction from sessions"), - max_memory_lines: z.number().optional().describe("Maximum number of lines to load from MEMORY.md"), + max_memory_lines: z.number().optional().describe("Maximum number of lines to load from MEMORY.md (fallback)"), + max_memory_tokens: z.number().optional().describe("Token cap for injected memory context (default: 5000)"), + consolidation: z.boolean().optional().describe("Enable background memory consolidation on session start"), + scope: z.enum(["personal", "project", "global"]).optional().describe("Default scope for new memories"), }) .optional(), experimental: z diff --git a/packages/opencode/src/hook/execute.ts b/packages/opencode/src/hook/execute.ts index 87ecc4761379..fc4778766ee6 100644 --- a/packages/opencode/src/hook/execute.ts +++ b/packages/opencode/src/hook/execute.ts @@ -1,3 +1,4 @@ +import { Hash } from "../util/hash" import { Log } from "../util/log" import { Process } from "../util/process" import type { HookEntry } from "./schema" @@ -23,12 +24,72 @@ export interface HookEnv { export interface HookResult { action: "pass" | "block" message?: string + status?: "ok" | "error" | "timeout" | "unexpected_exit" + duration?: number + exitCode?: number +} + +// --------------------------------------------------------------------------- +// LRU cache for PreToolUse hooks +// --------------------------------------------------------------------------- +const CACHE_MAX = 100 +const CACHE_TTL = 5_000 // 5 seconds + +interface CacheEntry { + result: HookResult + expiresAt: number +} + +const preToolUseCache = new Map() + +function cacheKey(command: string, toolName: string | undefined, toolInput: string | undefined): string { + return Hash.fast(`${command}\0${toolName ?? ""}\0${toolInput ?? ""}`) +} + +function getCached(key: string): HookResult | undefined { + const entry = preToolUseCache.get(key) + if (!entry) return undefined + if (Date.now() > entry.expiresAt) { + preToolUseCache.delete(key) + return undefined + } + // Move to end for LRU ordering + preToolUseCache.delete(key) + preToolUseCache.set(key, entry) + return entry.result +} + +function setCached(key: string, result: HookResult): void { + // Evict oldest entries when at capacity + if (preToolUseCache.size >= CACHE_MAX) { + const oldest = preToolUseCache.keys().next().value + if (oldest !== undefined) preToolUseCache.delete(oldest) + } + preToolUseCache.set(key, { + result, + expiresAt: Date.now() + CACHE_TTL, + }) +} + +/** Exposed for testing only. */ +export function clearHookCache(): void { + preToolUseCache.clear() } export async function runHook(entry: HookEntry, env: HookEnv): Promise { + const isPreToolUse = env.OPENCODE_HOOK_EVENT === "PreToolUse" const timeout = entry.timeout ?? DEFAULT_TIMEOUT const command = entry.command.replace(/^~/, process.env.HOME ?? "~") + // Check cache for PreToolUse hooks + if (isPreToolUse) { + const key = cacheKey(command, env.OPENCODE_TOOL_NAME, env.OPENCODE_TOOL_INPUT) + const cached = getCached(key) + if (cached) return cached + } + + const start = Date.now() + try { const result = await Process.run(["sh", "-c", command], { env: toEnvRecord(env), @@ -36,13 +97,31 @@ export async function runHook(entry: HookEntry, env: HookEnv): Promise { if (!entries || entries.length === 0) return { action: "pass" } - const messages: string[] = [] + const matched = entries.filter((entry) => matchesTool(entry.matcher, toolName)) + if (matched.length === 0) return { action: "pass" } + + const results = await Promise.all(matched.map((entry) => runHook(entry, env))) - for (const entry of entries) { - if (!matchesTool(entry.matcher, toolName)) continue + // Collect messages in original order, stopping after the first "block". + // This preserves sequential message semantics while hooks execute in parallel. + const messages: string[] = [] + let blocked = false - const result = await runHook(entry, env) + for (const result of results) { if (result.message) messages.push(result.message) if (result.action === "block") { - return { action: "block", message: messages.join("\n") } + blocked = true + break } } + if (blocked) { + return { action: "block", message: messages.join("\n") } + } + return { action: "pass", message: messages.length > 0 ? messages.join("\n") : undefined, diff --git a/packages/opencode/src/hook/index.ts b/packages/opencode/src/hook/index.ts index 2c41d88c29ee..b4ecc06e0ab1 100644 --- a/packages/opencode/src/hook/index.ts +++ b/packages/opencode/src/hook/index.ts @@ -6,4 +6,5 @@ export { HookEntry as HookEntrySchema, HookConfig as HookConfigSchema, } from "./schema" -export { runHooks, runHook, matchesTool, safeToolInput, type HookResult, type HookEnv } from "./execute" +export { runHooks, runHook, matchesTool, safeToolInput, clearHookCache, type HookResult, type HookEnv } from "./execute" +export { verifyHookDeployment, type VerifyResult } from "./verify" diff --git a/packages/opencode/src/hook/verify.ts b/packages/opencode/src/hook/verify.ts new file mode 100644 index 000000000000..3e1858f0b485 --- /dev/null +++ b/packages/opencode/src/hook/verify.ts @@ -0,0 +1,127 @@ +import { access, constants, readdir } from "node:fs/promises" +import { join, resolve } from "node:path" +import type { HookConfig } from "./schema" + +export interface VerifyResult { + orphanScripts: string[] + missingScripts: string[] + permissionErrors: string[] +} + +/** + * Verify hook deployment integrity by comparing the hook directory contents + * against the registered config entries. + * + * Detects: + * - Orphan scripts: files in hookDir not referenced by any config entry + * - Missing scripts: config entries pointing to scripts that do not exist + * - Permission errors: scripts that exist but lack execute permission + */ +export async function verifyHookDeployment( + hookDir: string, + config: HookConfig, +): Promise { + const resolvedDir = resolve(hookDir) + const result: VerifyResult = { + orphanScripts: [], + missingScripts: [], + permissionErrors: [], + } + + // Collect all script paths referenced in config + const referencedPaths = extractScriptPaths(config, resolvedDir) + + // List scripts on disk + const diskScripts = await listScripts(resolvedDir) + + // Orphan detection: scripts on disk not referenced in config + for (const scriptPath of diskScripts) { + if (!referencedPaths.has(scriptPath)) { + result.orphanScripts.push(scriptPath) + } + } + + // Missing + permission checks for referenced paths + const checks = [...referencedPaths].map(async (scriptPath) => { + const exists = await fileExists(scriptPath) + if (!exists) { + result.missingScripts.push(scriptPath) + return + } + const executable = await isExecutable(scriptPath) + if (!executable) { + result.permissionErrors.push(scriptPath) + } + }) + await Promise.all(checks) + + return result +} + +function extractScriptPaths(config: HookConfig, hookDir: string): Set { + const paths = new Set() + if (!config) return paths + + const events = ["PreToolUse", "PostToolUse", "SessionStart", "Notification"] as const + for (const event of events) { + const entries = config[event] + if (!entries) continue + for (const entry of entries) { + const scriptPath = resolveScriptPath(entry.command, hookDir) + if (scriptPath) paths.add(scriptPath) + } + } + return paths +} + +/** + * Extract the script file path from a hook command string. + * Handles: + * - Direct paths: `/path/to/script.sh` + * - Tilde paths: `~/hooks/script.sh` + * - Commands with args: `/path/to/script.sh --flag` + * - Inline shell (no path): `echo "hello"` -> returns null + */ +function resolveScriptPath(command: string, hookDir: string): string | null { + const expanded = command.replace(/^~/, process.env.HOME ?? "~") + const firstToken = expanded.split(/\s+/)[0] + if (!firstToken) return null + + // Only treat as a file path if it contains a slash (absolute or relative) + if (!firstToken.includes("/")) return null + + // Resolve relative paths against hookDir + if (!firstToken.startsWith("/")) { + return resolve(hookDir, firstToken) + } + return firstToken +} + +async function listScripts(dir: string): Promise { + try { + const entries = await readdir(dir, { withFileTypes: true }) + return entries + .filter((e) => e.isFile() && e.name.endsWith(".sh")) + .map((e) => join(dir, e.name)) + } catch { + return [] + } +} + +async function fileExists(path: string): Promise { + try { + await access(path, constants.F_OK) + return true + } catch { + return false + } +} + +async function isExecutable(path: string): Promise { + try { + await access(path, constants.X_OK) + return true + } catch { + return false + } +} diff --git a/packages/opencode/src/memory/extractor.ts b/packages/opencode/src/memory/extractor.ts index 72c38632c853..63a9eb96decd 100644 --- a/packages/opencode/src/memory/extractor.ts +++ b/packages/opencode/src/memory/extractor.ts @@ -48,17 +48,18 @@ export namespace MemoryExtractor { return state } - export function trackCommand(sessionID: string, command: string) { + export function trackCommand(sessionID: string, command: string, agent?: string) { const state = getState(sessionID) const count = (state.commands.get(command) ?? 0) + 1 state.commands.set(command, count) if (count === COMMAND_THRESHOLD) { state.pending.push({ projectPath: Instance.directory, - topic: `Frequently used command: ${command}`, - type: "build-command", + name: `Frequently used command: ${command}`, + type: "project", content: `Command \`${command}\` has been used ${count}+ times in this session.`, sessionID, + agent, }) maybeFlush(sessionID) } @@ -69,44 +70,61 @@ export namespace MemoryExtractor { state.errors.push(error) } - export function trackFix(sessionID: string, fix: string) { + export function trackFix(sessionID: string, fix: string, agent?: string) { const state = getState(sessionID) state.fixes.push(fix) if (state.errors.length > 0) { const lastError = state.errors[state.errors.length - 1] state.pending.push({ projectPath: Instance.directory, - topic: `Error pattern and fix: ${lastError.slice(0, 50)}`, - type: "error-solution", + name: `Error pattern and fix: ${lastError.slice(0, 50)}`, + type: "feedback", content: `**Error:** ${lastError}\n**Fix:** ${fix}`, sessionID, + agent, }) state.errors = [] maybeFlush(sessionID) } } - export function trackPreference(sessionID: string, preference: string) { + export function trackPreference(sessionID: string, preference: string, agent?: string) { const state = getState(sessionID) state.pending.push({ projectPath: Instance.directory, - topic: `User preference: ${preference.slice(0, 50)}`, - type: "preference", + name: `User preference: ${preference.slice(0, 50)}`, + type: "user", + scope: "personal", content: preference, sessionID, + agent, }) maybeFlush(sessionID) } - export function trackConfigChange(sessionID: string, file: string, change: string) { + export function trackConfigChange(sessionID: string, file: string, change: string, agent?: string) { const state = getState(sessionID) state.configChanges.push(`${file}: ${change}`) state.pending.push({ projectPath: Instance.directory, - topic: `Config file modification: ${file}`, - type: "config-pattern", + name: `Config file modification: ${file}`, + type: "project", content: `Config file \`${file}\` was modified: ${change}`, sessionID, + agent, + }) + maybeFlush(sessionID) + } + + export function trackDecision(sessionID: string, decision: string, reasoning: string, agent?: string) { + const state = getState(sessionID) + state.pending.push({ + projectPath: Instance.directory, + name: `Decision: ${decision.slice(0, 50)}`, + type: "reference", + content: `**Decision:** ${decision}\n**Reasoning:** ${reasoning}`, + sessionID, + agent, }) maybeFlush(sessionID) } @@ -137,16 +155,20 @@ export namespace MemoryExtractor { for (const entry of batch) { try { await MemoryStore.runPromise((svc) => svc.create(entry)) - // Sync to filesystem so MemoryInjector picks up extracted entries await MemoryFile.writeEntry({ - filename: slugify(entry.topic) + ".md", - frontmatter: { topic: entry.topic, type: entry.type }, + filename: slugify(entry.name) + ".md", + frontmatter: { + name: entry.name, + type: entry.type, + scope: entry.scope, + agent: entry.agent, + }, content: entry.content, }).catch((err) => { - log.warn("failed to sync memory to file", { error: err, topic: entry.topic }) + log.warn("failed to sync memory to file", { error: err, name: entry.name }) }) } catch (err) { - log.warn("failed to flush memory entry", { error: err, topic: entry.topic }) + log.warn("failed to flush memory entry", { error: err, name: entry.name }) failed.push(entry) } } @@ -168,13 +190,56 @@ export namespace MemoryExtractor { if (entries.length === 0) return const lines = [ "# Memory Index", + ``, + ``, "", - ...entries.map((e) => `- [${e.frontmatter.topic}](${e.filename}) — ${e.frontmatter.type}`), + ...entries.map((e) => { + const scope = e.frontmatter.scope ? ` (${e.frontmatter.scope})` : "" + return `- [${e.frontmatter.name}](${e.filename}) -- ${e.frontmatter.type}${scope}` + }), "", ] await MemoryFile.writeIndex(lines.join("\n")) } + /** + * Run background consolidation at session start. + * Merges duplicate entries and updates the index. Non-blocking. + */ + export async function consolidateOnSessionStart(projectPath: string): Promise { + try { + const entries = await MemoryStore.runPromise((svc) => svc.list(projectPath)) + if (entries.length === 0) return + + // Group by similar name (exact match, case-insensitive) + const byName = new Map() + for (const entry of entries) { + const key = entry.name.toLowerCase().trim() + const group = byName.get(key) ?? [] + group.push(entry) + byName.set(key, group) + } + + let merged = 0 + for (const [, group] of byName) { + if (group.length <= 1) continue + // Keep highest accessCount entry + group.sort((a, b) => b.accessCount - a.accessCount) + for (let i = 1; i < group.length; i++) { + await MemoryStore.runPromise((svc) => svc.remove(group[i].id)) + merged++ + } + } + + if (merged > 0) { + await updateIndex() + log.info("session-start consolidation", { projectPath, merged }) + } + } catch (err) { + log.warn("consolidation failed", { error: err, projectPath }) + } + } + export async function cleanup(sessionID: string) { await flush(sessionID).catch((err) => { log.warn("failed to flush on cleanup", { error: err }) diff --git a/packages/opencode/src/memory/file.ts b/packages/opencode/src/memory/file.ts index c3a3b77423a8..34826e53e512 100644 --- a/packages/opencode/src/memory/file.ts +++ b/packages/opencode/src/memory/file.ts @@ -34,28 +34,62 @@ function parseFrontmatter(raw: string): { frontmatter: Memory.Frontmatter; conte if (idx === -1) continue fm[line.slice(0, idx).trim()] = line.slice(idx + 1).trim() } - if (!fm.topic || !fm.type) return undefined + + // Support both new format (name/description/type/scope) and legacy (topic/type) + const name = fm.name || fm.topic + if (!name) return undefined + + let type: Memory.Type + const rawType = fm.type + if (!rawType) return undefined + const validTypes: readonly string[] = Memory.TYPES - if (!validTypes.includes(fm.type)) return undefined + if (validTypes.includes(rawType)) { + type = rawType as Memory.Type + } else { + // Map legacy types to new types + const legacyTypes: readonly string[] = Memory.LEGACY_TYPES + if (legacyTypes.includes(rawType)) { + type = Memory.LEGACY_TYPE_MAP[rawType as Memory.LegacyType] + } else { + return undefined + } + } + + const validScopes: readonly string[] = Memory.SCOPES + const scope = fm.scope && validScopes.includes(fm.scope) + ? (fm.scope as Memory.Scope) + : undefined + return { frontmatter: { - topic: fm.topic, - type: fm.type as Memory.Type, + name, + description: fm.description || undefined, + type, + scope, + agent: fm.agent || undefined, }, content: match[2].trim(), } } function formatFrontmatter(entry: Memory.FileEntry): string { - return [ + const lines = [ "---", - `topic: ${entry.frontmatter.topic}`, - `type: ${entry.frontmatter.type}`, - "---", - "", - entry.content, - "", - ].join("\n") + `name: ${entry.frontmatter.name}`, + ] + if (entry.frontmatter.description) { + lines.push(`description: ${entry.frontmatter.description}`) + } + lines.push(`type: ${entry.frontmatter.type}`) + if (entry.frontmatter.scope) { + lines.push(`scope: ${entry.frontmatter.scope}`) + } + if (entry.frontmatter.agent) { + lines.push(`agent: ${entry.frontmatter.agent}`) + } + lines.push("---", "", entry.content, "") + return lines.join("\n") } export namespace MemoryFile { @@ -114,6 +148,10 @@ export namespace MemoryFile { return entries } + export function agentMemoryDir(agent: string) { + return path.join(Instance.directory, MEMORY_DIR, "agents", agent) + } + export function getMemoryDir() { return memoryDir() } diff --git a/packages/opencode/src/memory/injector.ts b/packages/opencode/src/memory/injector.ts index cf460a7985d3..e5707bc2b52a 100644 --- a/packages/opencode/src/memory/injector.ts +++ b/packages/opencode/src/memory/injector.ts @@ -1,20 +1,125 @@ import { Config } from "@/config/config" import { Log } from "@/util/log" import { MemoryFile } from "./file" +import { MemoryStore } from "./store" +import { Instance } from "@/project/instance" +import type { Memory } from "./types" const log = Log.create({ service: "memory.injector" }) -// NOTE: beast.txt (used for GPT-4/o1/o3 system prompts) references -// `.github/instructions/memory.instruction.md` as the memory file path. -// Our injector uses `.opencode/memory/MEMORY.md` which is correct for the -// OpenCode memory system. The beast.txt path is an upstream convention and -// should NOT be changed here. +// Rough token estimate: ~4 chars per token for English/code mixed content +function estimateTokens(text: string): number { + return Math.ceil(text.length / 4) +} + +function relevanceWeight(entry: Memory.Info): number { + const daysSinceUpdate = (Date.now() - entry.timeUpdated) / (1000 * 60 * 60 * 24) + const recencyWeight = 1.0 / (1 + daysSinceUpdate / 30) + return entry.relevanceScore * recencyWeight * Math.log2(entry.accessCount + 2) +} + export namespace MemoryInjector { - export async function load(): Promise { + export async function load(agent?: string): Promise { const config = await Config.get() if (config.memory?.enabled === false) return undefined - const maxLines = config.memory?.max_memory_lines ?? 200 + const maxTokens = config.memory?.max_memory_tokens ?? 5000 + + // Try DB-first, fallback to file-based + let entries: Memory.Info[] = [] + try { + entries = await MemoryStore.runPromise((svc) => svc.list(Instance.directory)) + } catch { + // DB not available, fallback to file-based loading + return loadFromFile(config.memory?.max_memory_lines ?? 200) + } + + // Load agent-specific entries if agent is specified + let agentEntries: Memory.Info[] = [] + if (agent) { + try { + agentEntries = await MemoryStore.runPromise((svc) => svc.listByAgent(Instance.directory, agent)) + } catch { + // Ignore agent memory load failures + } + } + + if (entries.length === 0 && agentEntries.length === 0) { + return loadFromFile(config.memory?.max_memory_lines ?? 200) + } + + // Sort by relevance weight (highest first) + entries.sort((a, b) => relevanceWeight(b) - relevanceWeight(a)) + agentEntries.sort((a, b) => relevanceWeight(b) - relevanceWeight(a)) + + // Deduplicate agent entries that are already in general entries + const generalIds = new Set(entries.map((e) => e.id)) + agentEntries = agentEntries.filter((e) => !generalIds.has(e.id)) + + // Build sections within token budget + const sections: string[] = [] + let tokenBudget = maxTokens + + // Agent-specific section first (highest priority) + if (agentEntries.length > 0) { + const agentSection = buildSection("Agent-Specific Knowledge", agentEntries, tokenBudget) + if (agentSection.text) { + sections.push(agentSection.text) + tokenBudget -= agentSection.tokens + } + } + + // Group general entries by type + const projectEntries = entries.filter((e) => e.type === "project") + const userEntries = entries.filter((e) => e.type === "user") + const feedbackEntries = entries.filter((e) => e.type === "feedback") + const referenceEntries = entries.filter((e) => e.type === "reference") + + for (const [title, group] of [ + ["Project Knowledge", projectEntries], + ["User Preferences", userEntries], + ["Feedback & Patterns", feedbackEntries], + ["Reference", referenceEntries], + ] as const) { + if (group.length === 0 || tokenBudget <= 0) continue + const section = buildSection(title, group, tokenBudget) + if (section.text) { + sections.push(section.text) + tokenBudget -= section.tokens + } + } + + if (sections.length === 0) return undefined + + return [ + "# Memory", + "The following memory entries were loaded from the project memory system.", + "These represent learned patterns, preferences, and context from previous sessions.", + "Note: verify referenced files/functions still exist before acting on these memories.", + "", + ...sections, + ].join("\n") + } + + function buildSection(title: string, entries: Memory.Info[], tokenBudget: number): { text: string; tokens: number } { + const header = `## ${title}\n` + let tokens = estimateTokens(header) + const lines: string[] = [header] + + for (const entry of entries) { + const desc = entry.description ? ` -- ${entry.description}` : "" + const line = `- **${entry.name}** (${entry.type}): ${entry.content.split("\n")[0]}${desc}\n` + const lineTokens = estimateTokens(line) + if (tokens + lineTokens > tokenBudget) break + lines.push(line) + tokens += lineTokens + } + + if (lines.length <= 1) return { text: "", tokens: 0 } + return { text: lines.join(""), tokens } + } + + async function loadFromFile(maxLines: number): Promise { const content = await MemoryFile.readIndex(maxLines).catch((err) => { log.warn("failed to read MEMORY.md", { error: err }) return undefined @@ -26,6 +131,7 @@ export namespace MemoryInjector { "# Memory", "The following memory entries were loaded from MEMORY.md in the project directory.", "These represent learned patterns, preferences, and context from previous sessions.", + "Note: verify referenced files/functions still exist before acting on these memories.", "", content, ].join("\n") diff --git a/packages/opencode/src/memory/maintenance.ts b/packages/opencode/src/memory/maintenance.ts new file mode 100644 index 000000000000..f3cd3835890c --- /dev/null +++ b/packages/opencode/src/memory/maintenance.ts @@ -0,0 +1,219 @@ +import path from "path" +import { Log } from "@/util/log" +import { MemoryStore } from "./store" +import { MemoryFile } from "./file" +import { MemoryPromoter } from "./promoter" +import type { Memory } from "./types" + +const log = Log.create({ service: "memory.maintenance" }) + +// Relevance decays by 5% per week of inactivity +const DECAY_RATE = 0.95 +const DECAY_PERIOD_DAYS = 7 +const STALE_THRESHOLD = 0.1 +export namespace MemoryMaintenance { + /** + * Run the full maintenance cycle. Non-blocking, called at session start. + */ + export async function run(projectPath: string): Promise { + try { + const merged = await mergeDuplicates(projectPath) + const decayed = await decayRelevance(projectPath) + const removed = await removeStale(projectPath) + const verified = await verifyReferences(projectPath) + const promoted = await MemoryPromoter.autoPromote(projectPath) + await reindex(projectPath) + + if (merged + decayed + removed + verified + promoted > 0) { + log.info("maintenance complete", { projectPath, merged, decayed, removed, verified, promoted }) + } + } catch (err) { + log.warn("maintenance failed", { error: err, projectPath }) + } + } + + /** + * Merge entries with identical names (keep highest access count). + */ + export async function mergeDuplicates(projectPath: string): Promise { + const entries = await MemoryStore.runPromise((svc) => svc.list(projectPath)) + const byName = new Map() + + for (const entry of entries) { + const key = entry.name.toLowerCase().trim() + const group = byName.get(key) ?? [] + group.push(entry) + byName.set(key, group) + } + + let merged = 0 + for (const [, group] of byName) { + if (group.length <= 1) continue + + // Keep the entry with highest access count + group.sort((a, b) => b.accessCount - a.accessCount) + const keeper = group[0] + + // Merge content from duplicates into keeper + const mergedContent = group.map((e) => e.content).join("\n\n") + if (mergedContent !== keeper.content) { + await MemoryStore.runPromise((svc) => + svc.update({ id: keeper.id, content: mergedContent }), + ) + } + + // Remove duplicates + for (let i = 1; i < group.length; i++) { + await MemoryStore.runPromise((svc) => svc.remove(group[i].id)) + merged++ + } + } + + return merged + } + + /** + * Decay relevance scores for entries not accessed recently. + */ + export async function decayRelevance(projectPath: string): Promise { + const entries = await MemoryStore.runPromise((svc) => svc.list(projectPath)) + let decayed = 0 + + for (const entry of entries) { + const daysSinceUpdate = (Date.now() - entry.timeUpdated) / (1000 * 60 * 60 * 24) + const periods = Math.floor(daysSinceUpdate / DECAY_PERIOD_DAYS) + if (periods <= 0) continue + + const newScore = entry.relevanceScore * Math.pow(DECAY_RATE, periods) + if (Math.abs(newScore - entry.relevanceScore) < 0.001) continue + + await MemoryStore.runPromise((svc) => svc.updateRelevance(entry.id, newScore)) + decayed++ + } + + return decayed + } + + /** + * Remove entries with relevance score below threshold. + */ + export async function removeStale(projectPath: string): Promise { + const entries = await MemoryStore.runPromise((svc) => svc.list(projectPath)) + let removed = 0 + + for (const entry of entries) { + if (entry.relevanceScore < STALE_THRESHOLD) { + await MemoryStore.runPromise((svc) => svc.remove(entry.id)) + + // Also remove the corresponding file + const filename = entry.name + .toLowerCase() + .replace(/[^a-z0-9]+/g, "-") + .replace(/^-|-$/g, "") + .slice(0, 80) + ".md" + await MemoryFile.removeEntry(filename).catch(() => {}) + + removed++ + log.info("removed stale memory", { id: entry.id, name: entry.name, score: entry.relevanceScore }) + } + } + + return removed + } + + /** + * Check if file paths referenced in memory content still exist. + * Reduces relevance for entries referencing deleted files. + */ + export async function verifyReferences(projectPath: string): Promise { + const entries = await MemoryStore.runPromise((svc) => svc.list(projectPath)) + let verified = 0 + + for (const entry of entries) { + const paths = extractFilePaths(entry.content) + if (paths.length === 0) continue + + let missingCount = 0 + for (const filePath of paths) { + const resolved = path.resolve(projectPath, filePath) + // Skip paths that escape the project directory + if (!resolved.startsWith(projectPath + path.sep) && resolved !== projectPath) { + continue + } + const exists = await Bun.file(resolved).exists().catch(() => false) + if (!exists) missingCount++ + } + + if (missingCount > 0) { + const penaltyFactor = 1.0 - (missingCount / paths.length) * 0.5 + const newScore = entry.relevanceScore * penaltyFactor + await MemoryStore.runPromise((svc) => + svc.update({ + id: entry.id, + relevanceScore: newScore, + timeLastVerified: Date.now(), + }), + ) + verified++ + } else { + // Mark as verified without penalty + await MemoryStore.runPromise((svc) => + svc.update({ id: entry.id, timeLastVerified: Date.now() }), + ) + } + } + + return verified + } + + /** + * Regenerate MEMORY.md index from DB entries sorted by relevance. + */ + export async function reindex(projectPath: string): Promise { + const entries = await MemoryStore.runPromise((svc) => svc.list(projectPath)) + if (entries.length === 0) return + + entries.sort((a, b) => b.relevanceScore - a.relevanceScore) + + const grouped = { + project: entries.filter((e) => e.type === "project"), + user: entries.filter((e) => e.type === "user"), + feedback: entries.filter((e) => e.type === "feedback"), + reference: entries.filter((e) => e.type === "reference"), + } + + const lines: string[] = [ + "# Memory Index", + ``, + ``, + "", + ] + + for (const [title, group] of [ + ["Project Knowledge", grouped.project], + ["User Preferences", grouped.user], + ["Feedback & Patterns", grouped.feedback], + ["Reference", grouped.reference], + ] as const) { + if (group.length === 0) continue + lines.push(`## ${title}`, "") + for (const entry of group) { + const slug = entry.name + .toLowerCase() + .replace(/[^a-z0-9]+/g, "-") + .replace(/^-|-$/g, "") + .slice(0, 80) + const desc = entry.description ? ` -- ${entry.description}` : "" + lines.push(`- [${entry.name}](${slug}.md)${desc}`) + } + lines.push("") + } + + await MemoryFile.writeIndex(lines.join("\n")) + } + + function extractFilePaths(content: string): string[] { + const regex = /(?:^|\s)(\.\/[^\s]+|\/[^\s]+|src\/[^\s]+|packages\/[^\s]+)/gm + return [...content.matchAll(regex)].map((m) => m[1].trim()) + } +} diff --git a/packages/opencode/src/memory/memory.sql.ts b/packages/opencode/src/memory/memory.sql.ts index 02d165a80d76..f1cf870ac7c7 100644 --- a/packages/opencode/src/memory/memory.sql.ts +++ b/packages/opencode/src/memory/memory.sql.ts @@ -1,4 +1,4 @@ -import { sqliteTable, text, integer, index } from "drizzle-orm/sqlite-core" +import { sqliteTable, text, integer, real, index } from "drizzle-orm/sqlite-core" import { Timestamps } from "../storage/schema.sql" export const MemoryTable = sqliteTable( @@ -11,10 +11,19 @@ export const MemoryTable = sqliteTable( content: text().notNull(), session_id: text(), access_count: integer().default(0), + scope: text().notNull().default("project"), + description: text(), + agent: text(), + relevance_score: real().notNull().default(1.0), + time_last_verified: integer(), + promoted_from: text(), ...Timestamps, }, (table) => [ index("memory_project_path_idx").on(table.project_path), index("memory_type_idx").on(table.type), + index("memory_agent_idx").on(table.agent), + index("memory_scope_idx").on(table.scope), + index("memory_project_scope_idx").on(table.project_path, table.scope), ], ) diff --git a/packages/opencode/src/memory/promoter.ts b/packages/opencode/src/memory/promoter.ts new file mode 100644 index 000000000000..4de6fda01f04 --- /dev/null +++ b/packages/opencode/src/memory/promoter.ts @@ -0,0 +1,52 @@ +import { Log } from "@/util/log" +import { MemoryStore } from "./store" +import type { Memory } from "./types" + +const log = Log.create({ service: "memory.promoter" }) + +const AUTO_PROMOTE_THRESHOLD = 5 + +export namespace MemoryPromoter { + /** + * Promote a specific memory entry to a wider scope. + */ + export async function promote(id: string, targetScope: Memory.Scope): Promise { + const result = await MemoryStore.runPromise((svc) => svc.promote(id, targetScope)) + if (result) { + log.info("memory promoted", { id, to: targetScope }) + } + } + + /** + * Detect personal entries that should be promoted to project scope. + * Criteria: accessCount > threshold (accessed across multiple sessions). + */ + export async function detectCandidates(projectPath: string): Promise { + const personal = await MemoryStore.runPromise((svc) => svc.listByScope(projectPath, "personal")) + return personal.filter((entry) => entry.accessCount > AUTO_PROMOTE_THRESHOLD) + } + + /** + * Auto-promote eligible personal entries to project scope. + * Called during background consolidation. + */ + export async function autoPromote(projectPath: string): Promise { + const candidates = await detectCandidates(projectPath) + let promoted = 0 + + for (const entry of candidates) { + try { + await MemoryStore.runPromise((svc) => svc.promote(entry.id, "project")) + promoted++ + log.info("auto-promoted memory", { id: entry.id, name: entry.name, accessCount: entry.accessCount }) + } catch (err) { + log.warn("failed to auto-promote memory", { error: err, id: entry.id }) + } + } + + if (promoted > 0) { + log.info("auto-promotion complete", { projectPath, promoted, candidates: candidates.length }) + } + return promoted + } +} diff --git a/packages/opencode/src/memory/store.ts b/packages/opencode/src/memory/store.ts index 811e17d53849..783b413c2b5b 100644 --- a/packages/opencode/src/memory/store.ts +++ b/packages/opencode/src/memory/store.ts @@ -1,4 +1,4 @@ -import { Database, eq, and, sql } from "@/storage/db" +import { Database, eq, and, sql, lt } from "@/storage/db" import { ulid } from "ulid" import { Effect, Layer, ServiceMap } from "effect" import { MemoryTable } from "./memory.sql" @@ -12,13 +12,19 @@ function toInfo(row: typeof MemoryTable.$inferSelect): Memory.Info { return { id: row.id, projectPath: row.project_path, - topic: row.topic, + name: row.topic, + description: row.description ?? undefined, type: row.type as Memory.Type, + scope: (row.scope ?? "project") as Memory.Scope, content: row.content, + agent: row.agent ?? undefined, sessionID: row.session_id ?? undefined, accessCount: row.access_count ?? 0, + relevanceScore: row.relevance_score ?? 1.0, timeCreated: row.time_created, timeUpdated: row.time_updated, + timeLastVerified: row.time_last_verified ?? undefined, + promotedFrom: row.promoted_from ?? undefined, } } @@ -33,6 +39,11 @@ export namespace MemoryStore { readonly update: (input: Memory.Update) => Effect.Effect readonly remove: (id: string) => Effect.Effect readonly listByType: (projectPath: string, type: Memory.Type) => Effect.Effect + readonly listByScope: (projectPath: string, scope: Memory.Scope) => Effect.Effect + readonly listByAgent: (projectPath: string, agent: string) => Effect.Effect + readonly listStale: (projectPath: string, maxAgeDays: number) => Effect.Effect + readonly updateRelevance: (id: string, score: number) => Effect.Effect + readonly promote: (id: string, targetScope: Memory.Scope) => Effect.Effect } export class Service extends ServiceMap.Service()("@opencode/MemoryStore") {} @@ -83,22 +94,26 @@ export namespace MemoryStore { const row = { id, project_path: input.projectPath, - topic: input.topic, + topic: input.name, type: input.type, content: input.content, session_id: input.sessionID ?? null, access_count: 0, + scope: input.scope ?? "project", + description: input.description ?? null, + agent: input.agent ?? null, + relevance_score: 1.0, + time_last_verified: null, + promoted_from: null, time_created: now, time_updated: now, } yield* db((d) => d.insert(MemoryTable).values(row).run()) - log.info("memory created", { id, topic: input.topic, type: input.type }) + log.info("memory created", { id, name: input.name, type: input.type, scope: input.scope ?? "project" }) return toInfo(row) }) const update = Effect.fn("MemoryStore.update")(function* (input: Memory.Update) { - // Use direct select to check existence without incrementing access_count - // (the public get() method has a side effect of incrementing access_count) const existing = yield* db((d) => d .select() @@ -108,12 +123,15 @@ export namespace MemoryStore { ) if (!existing) return undefined const values: Record = { time_updated: Date.now() } - if (input.topic !== undefined) values.topic = input.topic + if (input.name !== undefined) values.topic = input.name + if (input.description !== undefined) values.description = input.description if (input.type !== undefined) values.type = input.type + if (input.scope !== undefined) values.scope = input.scope if (input.content !== undefined) values.content = input.content + if (input.relevanceScore !== undefined) values.relevance_score = input.relevanceScore + if (input.timeLastVerified !== undefined) values.time_last_verified = input.timeLastVerified yield* db((d) => d.update(MemoryTable).set(values).where(eq(MemoryTable.id, input.id)).run()) log.info("memory updated", { id: input.id }) - // Return updated row without incrementing access_count const updated = yield* db((d) => d .select() @@ -140,7 +158,87 @@ export namespace MemoryStore { return rows.map(toInfo) }) - return Service.of({ list, get, create, update, remove, listByType }) + const listByScope = Effect.fn("MemoryStore.listByScope")(function* (projectPath: string, scope: Memory.Scope) { + const rows = yield* db((d) => + d + .select() + .from(MemoryTable) + .where(and(eq(MemoryTable.project_path, projectPath), eq(MemoryTable.scope, scope))) + .all(), + ) + return rows.map(toInfo) + }) + + const listByAgent = Effect.fn("MemoryStore.listByAgent")(function* (projectPath: string, agent: string) { + const rows = yield* db((d) => + d + .select() + .from(MemoryTable) + .where(and(eq(MemoryTable.project_path, projectPath), eq(MemoryTable.agent, agent))) + .all(), + ) + return rows.map(toInfo) + }) + + const listStale = Effect.fn("MemoryStore.listStale")(function* (projectPath: string, maxAgeDays: number) { + const cutoff = Date.now() - maxAgeDays * 24 * 60 * 60 * 1000 + const rows = yield* db((d) => + d + .select() + .from(MemoryTable) + .where(and(eq(MemoryTable.project_path, projectPath), lt(MemoryTable.time_updated, cutoff))) + .all(), + ) + return rows.map(toInfo) + }) + + const updateRelevance = Effect.fn("MemoryStore.updateRelevance")(function* (id: string, score: number) { + yield* db((d) => + d + .update(MemoryTable) + .set({ relevance_score: score }) + .where(eq(MemoryTable.id, id)) + .run(), + ) + }) + + const promote = Effect.fn("MemoryStore.promote")(function* (id: string, targetScope: Memory.Scope) { + const existing = yield* db((d) => + d + .select() + .from(MemoryTable) + .where(eq(MemoryTable.id, id)) + .get(), + ) + if (!existing) return undefined + const previousScope = existing.scope ?? "personal" + yield* db((d) => + d + .update(MemoryTable) + .set({ + scope: targetScope, + promoted_from: previousScope, + time_updated: Date.now(), + }) + .where(eq(MemoryTable.id, id)) + .run(), + ) + log.info("memory promoted", { id, from: previousScope, to: targetScope }) + const updated = yield* db((d) => + d + .select() + .from(MemoryTable) + .where(eq(MemoryTable.id, id)) + .get(), + ) + return updated ? toInfo(updated) : undefined + }) + + return Service.of({ + list, get, create, update, remove, + listByType, listByScope, listByAgent, listStale, + updateRelevance, promote, + }) }), ) diff --git a/packages/opencode/src/memory/summary-bridge.ts b/packages/opencode/src/memory/summary-bridge.ts new file mode 100644 index 000000000000..c08a66b0ee2d --- /dev/null +++ b/packages/opencode/src/memory/summary-bridge.ts @@ -0,0 +1,63 @@ +import { Log } from "@/util/log" +import { MemoryExtractor } from "./extractor" + +const log = Log.create({ service: "memory.summary-bridge" }) + +// Marker used in compaction prompts to identify memory candidates +const MEMORY_CANDIDATES_MARKER = "## Memory Candidates" + +export namespace SummaryBridge { + /** + * Extracts long-term memory candidates from compaction summary text. + * Called after compaction completes, fire-and-forget. + */ + export async function extractMemoryCandidates( + summaryText: string, + sessionID: string, + projectPath: string, + ): Promise { + const section = extractSection(summaryText, MEMORY_CANDIDATES_MARKER) + if (!section) return + + const candidates = parseMemoryCandidates(section) + if (candidates.length === 0) return + + for (const candidate of candidates) { + try { + MemoryExtractor.trackDecision(sessionID, candidate.name, candidate.content) + } catch (err) { + log.warn("failed to extract memory candidate", { error: err, name: candidate.name }) + } + } + + log.info("extracted memory candidates from summary", { count: candidates.length, sessionID }) + } + + function extractSection(text: string, marker: string): string | undefined { + const idx = text.indexOf(marker) + if (idx === -1) return undefined + + const afterMarker = text.slice(idx + marker.length) + // Find the next heading (## or end of text) + const nextHeading = afterMarker.search(/\n## /) + const section = nextHeading === -1 ? afterMarker : afterMarker.slice(0, nextHeading) + return section.trim() + } + + function parseMemoryCandidates(section: string): Array<{ name: string; content: string }> { + const candidates: Array<{ name: string; content: string }> = [] + const lines = section.split("\n") + + for (const line of lines) { + // Parse markdown list items: "- **Name**: Description" or "- Name: Description" + const match = line.match(/^[-*]\s+\*?\*?(.+?)\*?\*?:\s*(.+)$/) + if (match) { + candidates.push({ + name: match[1].trim(), + content: match[2].trim(), + }) + } + } + return candidates + } +} diff --git a/packages/opencode/src/memory/types.ts b/packages/opencode/src/memory/types.ts index 2c0cab26f5f0..b1d7b803c069 100644 --- a/packages/opencode/src/memory/types.ts +++ b/packages/opencode/src/memory/types.ts @@ -1,37 +1,77 @@ export namespace Memory { - export const TYPES = ["error-solution", "build-command", "preference", "decision", "config-pattern", "general"] as const + // CC-compatible 4 types (migrated from: error-solution, build-command, preference, decision, config-pattern, general) + export const TYPES = ["user", "feedback", "project", "reference"] as const export type Type = (typeof TYPES)[number] + // Legacy types for backward compatibility in file parsing + export const LEGACY_TYPES = ["error-solution", "build-command", "preference", "decision", "config-pattern", "general"] as const + export type LegacyType = (typeof LEGACY_TYPES)[number] + + export const LEGACY_TYPE_MAP: Record = { + "error-solution": "project", + "build-command": "project", + "preference": "user", + "decision": "feedback", + "config-pattern": "project", + "general": "project", + } + + // Three-tier scope + export const SCOPES = ["personal", "project", "global"] as const + export type Scope = (typeof SCOPES)[number] + export type Info = { id: string projectPath: string - topic: string + name: string + description?: string type: Type + scope: Scope content: string + agent?: string sessionID?: string accessCount: number + relevanceScore: number timeCreated: number timeUpdated: number + timeLastVerified?: number + promotedFrom?: string } export type Create = { projectPath: string - topic: string + name: string type: Type content: string + description?: string + scope?: Scope + agent?: string sessionID?: string } export type Update = { id: string - topic?: string + name?: string + description?: string type?: Type + scope?: Scope content?: string + relevanceScore?: number + timeLastVerified?: number } export type Frontmatter = { - topic: string + name: string + description?: string type: Type + scope?: Scope + agent?: string + } + + // Legacy frontmatter for backward-compatible parsing + export type LegacyFrontmatter = { + topic: string + type: string } export type FileEntry = { diff --git a/packages/opencode/src/session/instruction.ts b/packages/opencode/src/session/instruction.ts index 039b14cbd853..28c299a36e71 100644 --- a/packages/opencode/src/session/instruction.ts +++ b/packages/opencode/src/session/instruction.ts @@ -186,13 +186,11 @@ export namespace Instruction { ? [] : yield* Effect.promise(() => filterSymlinkEscapes(rawProjectRuleFiles, projectRulesDir)) - // Project rules override global by filename - const projectFilenames = new Set(projectRuleFiles.map((p) => path.basename(p))) + // Include all global rules (even if same filename exists in project) for (const rule of globalRuleFiles) { - if (!projectFilenames.has(path.basename(rule))) { - paths.add(path.resolve(rule)) - } + paths.add(path.resolve(rule)) } + // Include all project rules for (const rule of projectRuleFiles) { paths.add(path.resolve(rule)) } diff --git a/packages/opencode/src/session/message-v2.ts b/packages/opencode/src/session/message-v2.ts index 61c159646d88..9c3065e2b819 100644 --- a/packages/opencode/src/session/message-v2.ts +++ b/packages/opencode/src/session/message-v2.ts @@ -112,6 +112,7 @@ export namespace MessageV2 { text: z.string(), synthetic: z.boolean().optional(), ignored: z.boolean().optional(), + transcriptOnly: z.boolean().optional(), time: z .object({ start: z.number(), @@ -127,6 +128,7 @@ export namespace MessageV2 { export const ReasoningPart = PartBase.extend({ type: z.literal("reasoning"), text: z.string(), + transcriptOnly: z.boolean().optional(), metadata: z.record(z.string(), z.any()).optional(), time: z.object({ start: z.number(), @@ -343,6 +345,7 @@ export namespace MessageV2 { callID: z.string(), tool: z.string(), state: ToolState, + transcriptOnly: z.boolean().optional(), metadata: z.record(z.string(), z.any()).optional(), }).meta({ ref: "ToolPart", @@ -644,6 +647,9 @@ export namespace MessageV2 { for (const msg of input) { if (msg.parts.length === 0) continue + // Strip transcript-only parts before converting to model messages + const activeParts = msg.parts.filter((part) => !("transcriptOnly" in part && part.transcriptOnly)) + if (msg.info.role === "user") { const userMessage: UIMessage = { id: msg.info.id, @@ -651,7 +657,7 @@ export namespace MessageV2 { parts: [], } result.push(userMessage) - for (const part of msg.parts) { + for (const part of activeParts) { if (part.type === "text" && !part.ignored) userMessage.parts.push({ type: "text", @@ -707,7 +713,7 @@ export namespace MessageV2 { role: "assistant", parts: [], } - for (const part of msg.parts) { + for (const part of activeParts) { if (part.type === "text") assistantMessage.parts.push({ type: "text", diff --git a/packages/opencode/src/session/prompt.ts b/packages/opencode/src/session/prompt.ts index 9eaee951657c..e9bb77c98713 100644 --- a/packages/opencode/src/session/prompt.ts +++ b/packages/opencode/src/session/prompt.ts @@ -1404,6 +1404,13 @@ NOTE: At any point in time through this workflow you should feel free to ask the }), ) + // Background memory maintenance (non-blocking) + if (startHookCfg.memory?.enabled !== false) { + import("@/memory/maintenance").then(({ MemoryMaintenance }) => { + MemoryMaintenance.run(startProjectDir).catch(() => {}) + }).catch(() => {}) + } + if (input.noReply === true) return message return yield* loop({ sessionID: input.sessionID }) }, @@ -1591,7 +1598,7 @@ NOTE: At any point in time through this workflow you should feel free to ask the Effect.promise(() => SystemPrompt.environment(model)), instruction.system().pipe(Effect.orDie), MessageV2.toModelMessagesEffect(msgs, model), - Effect.promise(() => MemoryInjector.load()), + Effect.promise(() => MemoryInjector.load(agent.name)), ]) const system = [...env, ...(skills ? [skills] : []), ...instructions, ...(memory ? [memory] : [])] const format = lastUser.format ?? { type: "text" as const } diff --git a/packages/opencode/test/hook/verify.test.ts b/packages/opencode/test/hook/verify.test.ts new file mode 100644 index 000000000000..23a411af614a --- /dev/null +++ b/packages/opencode/test/hook/verify.test.ts @@ -0,0 +1,147 @@ +import { describe, test, expect, afterEach } from "bun:test" +import * as fs from "fs/promises" +import * as path from "path" +import os from "os" +import { verifyHookDeployment } from "../../src/hook/verify" +import type { HookConfig } from "../../src/hook/schema" + +const tmpDirs: string[] = [] + +async function makeTmpDir(): Promise { + const dir = await fs.mkdtemp(path.join(os.tmpdir(), "hook-test-")) + tmpDirs.push(dir) + return dir +} + +async function createScript(dir: string, name: string, mode: number): Promise { + const filePath = path.join(dir, name) + await fs.mkdir(path.dirname(filePath), { recursive: true }) + await fs.writeFile(filePath, "#!/bin/bash\nexit 0\n") + await fs.chmod(filePath, mode) + return filePath +} + +afterEach(async () => { + for (const dir of tmpDirs) { + await fs.rm(dir, { recursive: true, force: true }).catch(() => {}) + } + tmpDirs.length = 0 +}) + +describe("hook.verify", () => { + test("detects orphan scripts", async () => { + const hookDir = await makeTmpDir() + await createScript(hookDir, "used.sh", 0o755) + const extraPath = await createScript(hookDir, "extra.sh", 0o755) + + const config: HookConfig = { + PreToolUse: [{ command: path.join(hookDir, "used.sh") }], + } + + const result = await verifyHookDeployment(hookDir, config) + + expect(result.orphanScripts).toContain(extraPath) + expect(result.orphanScripts).not.toContain(path.join(hookDir, "used.sh")) + expect(result.missingScripts).toEqual([]) + expect(result.permissionErrors).toEqual([]) + }) + + test("detects missing scripts", async () => { + const hookDir = await makeTmpDir() + const missingPath = path.join(hookDir, "nonexistent.sh") + + const config: HookConfig = { + PostToolUse: [{ command: missingPath }], + } + + const result = await verifyHookDeployment(hookDir, config) + + expect(result.missingScripts).toContain(missingPath) + }) + + test("detects permission errors for non-executable scripts", async () => { + const hookDir = await makeTmpDir() + const scriptPath = await createScript(hookDir, "readonly.sh", 0o644) + + const config: HookConfig = { + SessionStart: [{ command: scriptPath }], + } + + const result = await verifyHookDeployment(hookDir, config) + + expect(result.permissionErrors).toContain(scriptPath) + expect(result.missingScripts).toEqual([]) + }) + + test("returns empty results for valid deployment", async () => { + const hookDir = await makeTmpDir() + const scriptPath = await createScript(hookDir, "valid.sh", 0o755) + + const config: HookConfig = { + PreToolUse: [{ command: scriptPath }], + } + + const result = await verifyHookDeployment(hookDir, config) + + expect(result.orphanScripts).toEqual([]) + expect(result.missingScripts).toEqual([]) + expect(result.permissionErrors).toEqual([]) + }) + + test("handles empty hook directory with undefined config", async () => { + const hookDir = await makeTmpDir() + + const result = await verifyHookDeployment(hookDir, undefined) + + expect(result.orphanScripts).toEqual([]) + expect(result.missingScripts).toEqual([]) + expect(result.permissionErrors).toEqual([]) + }) + + test("handles non-existent hook directory without throwing", async () => { + const hookDir = path.join(os.tmpdir(), "hook-test-nonexistent-" + Date.now()) + const missingScript = "/tmp/does-not-exist/check.sh" + + const config: HookConfig = { + Notification: [{ command: missingScript }], + } + + const result = await verifyHookDeployment(hookDir, config) + + expect(result.missingScripts).toContain(missingScript) + expect(result.orphanScripts).toEqual([]) + }) + + test("ignores inline shell commands without slash", async () => { + const hookDir = await makeTmpDir() + + const config: HookConfig = { + PreToolUse: [{ command: "echo hello" }], + PostToolUse: [{ command: "cat /dev/null" }], + } + + const result = await verifyHookDeployment(hookDir, config) + + // "echo hello" has no slash in the first token, so it is not treated as a script path + expect(result.missingScripts).toEqual([]) + expect(result.orphanScripts).toEqual([]) + expect(result.permissionErrors).toEqual([]) + }) + + test("resolves relative paths against hookDir", async () => { + const hookDir = await makeTmpDir() + const scriptPath = await createScript(hookDir, "scripts/check.sh", 0o755) + + const config: HookConfig = { + PreToolUse: [{ command: "scripts/check.sh --flag" }], + } + + const result = await verifyHookDeployment(hookDir, config) + + // The script exists and is executable; relative path should resolve to hookDir/scripts/check.sh + // Note: listScripts only reads top-level .sh files, so check.sh in a subdirectory + // will not appear as an orphan. The referenced script should pass existence + permission checks. + expect(result.missingScripts).toEqual([]) + expect(result.permissionErrors).toEqual([]) + }) +}) diff --git a/packages/opencode/test/memory/file.test.ts b/packages/opencode/test/memory/file.test.ts index 85b866b502fb..e1e5a2796e2d 100644 --- a/packages/opencode/test/memory/file.test.ts +++ b/packages/opencode/test/memory/file.test.ts @@ -12,17 +12,17 @@ describe("memory.file", () => { await Instance.provide({ directory: tmp.path, fn: async () => { - const entry = { + const entry: Memory.FileEntry = { filename: "test-entry.md", - frontmatter: { topic: "test topic", type: "general" as const }, + frontmatter: { name: "test topic", type: "project" }, content: "Some test content here.", } await MemoryFile.writeEntry(entry) const read = await MemoryFile.readEntry("test-entry.md") expect(read).toBeDefined() expect(read!.filename).toBe("test-entry.md") - expect(read!.frontmatter.topic).toBe("test topic") - expect(read!.frontmatter.type).toBe("general") + expect(read!.frontmatter.name).toBe("test topic") + expect(read!.frontmatter.type).toBe("project") expect(read!.content).toBe("Some test content here.") }, }) @@ -89,7 +89,7 @@ describe("memory.file", () => { expect(() => MemoryFile.writeEntry({ filename: "../../../etc/evil.md", - frontmatter: { topic: "evil", type: "general" }, + frontmatter: { name: "evil", type: "project" }, content: "bad", }), ).toThrow("path traversal detected") @@ -113,9 +113,9 @@ describe("memory.file", () => { await Instance.provide({ directory: tmp.path, fn: async () => { - const entry = { + const entry: Memory.FileEntry = { filename: "removable.md", - frontmatter: { topic: "removable", type: "general" as const }, + frontmatter: { name: "removable", type: "project" }, content: "to be removed", } await MemoryFile.writeEntry(entry) @@ -136,20 +136,20 @@ describe("memory.file", () => { fn: async () => { await MemoryFile.writeEntry({ filename: "entry-a.md", - frontmatter: { topic: "A", type: "general" }, + frontmatter: { name: "A", type: "project" }, content: "content a", }) await MemoryFile.writeEntry({ filename: "entry-b.md", - frontmatter: { topic: "B", type: "preference" }, + frontmatter: { name: "B", type: "user" }, content: "content b", }) await MemoryFile.writeIndex("# Index") const entries = await MemoryFile.listEntries() expect(entries.length).toBe(2) - const topics = entries.map((e) => e.frontmatter.topic).sort() - expect(topics).toEqual(["A", "B"]) + const names = entries.map((e) => e.frontmatter.name).sort() + expect(names).toEqual(["A", "B"]) }, }) }) @@ -168,7 +168,7 @@ describe("memory.file", () => { await MemoryFile.writeEntry({ filename: "first.md", - frontmatter: { topic: "first", type: "general" }, + frontmatter: { name: "first", type: "project" }, content: "first entry", }) @@ -195,24 +195,49 @@ describe("memory.file", () => { }) }) - test.each(Memory.TYPES.map((t) => [t]))("readEntry parses valid type: %s", async (validType) => { + test.each(Memory.TYPES.map((t) => [t]))("readEntry parses valid new type: %s", async (validType) => { await using tmp = await tmpdir() await Instance.provide({ directory: tmp.path, fn: async () => { const dir = MemoryFile.getMemoryDir() await fs.mkdir(dir, { recursive: true }) - const raw = `---\ntopic: test\ntype: ${validType}\n---\nsome content` + const raw = `---\nname: test\ntype: ${validType}\n---\nsome content` await Bun.write(path.join(dir, "valid-type.md"), raw) const read = await MemoryFile.readEntry("valid-type.md") expect(read).toBeDefined() expect(read!.frontmatter.type).toBe(validType) - expect(read!.frontmatter.topic).toBe("test") + expect(read!.frontmatter.name).toBe("test") expect(read!.content).toBe("some content") }, }) }) + test.each([ + ["error-solution", "project"], + ["build-command", "project"], + ["preference", "user"], + ["decision", "feedback"], + ["config-pattern", "project"], + ["general", "project"], + ] as const)("readEntry maps legacy type %s to %s", async (legacyType, expectedType) => { + await using tmp = await tmpdir() + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const dir = MemoryFile.getMemoryDir() + await fs.mkdir(dir, { recursive: true }) + const raw = `---\ntopic: legacy test\ntype: ${legacyType}\n---\nlegacy content` + await Bun.write(path.join(dir, "legacy.md"), raw) + const read = await MemoryFile.readEntry("legacy.md") + expect(read).toBeDefined() + expect(read!.frontmatter.type).toBe(expectedType) + expect(read!.frontmatter.name).toBe("legacy test") + expect(read!.content).toBe("legacy content") + }, + }) + }) + test("readEntry returns undefined for invalid type in frontmatter", async () => { await using tmp = await tmpdir() await Instance.provide({ @@ -220,7 +245,7 @@ describe("memory.file", () => { fn: async () => { const dir = MemoryFile.getMemoryDir() await fs.mkdir(dir, { recursive: true }) - const raw = "---\ntopic: test\ntype: invalid-type\n---\nsome content" + const raw = "---\nname: test\ntype: invalid-type\n---\nsome content" await Bun.write(path.join(dir, "invalid-type.md"), raw) const read = await MemoryFile.readEntry("invalid-type.md") expect(read).toBeUndefined() @@ -235,11 +260,58 @@ describe("memory.file", () => { fn: async () => { const dir = MemoryFile.getMemoryDir() await fs.mkdir(dir, { recursive: true }) - const raw = "---\ntopic: test\n---\nsome content" + const raw = "---\nname: test\n---\nsome content" await Bun.write(path.join(dir, "no-type.md"), raw) const read = await MemoryFile.readEntry("no-type.md") expect(read).toBeUndefined() }, }) }) + + test("readEntry parses scope and agent from frontmatter", async () => { + await using tmp = await tmpdir() + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const dir = MemoryFile.getMemoryDir() + await fs.mkdir(dir, { recursive: true }) + const raw = "---\nname: scoped entry\ndescription: A test description\ntype: user\nscope: personal\nagent: build\n---\nscoped content" + await Bun.write(path.join(dir, "scoped.md"), raw) + const read = await MemoryFile.readEntry("scoped.md") + expect(read).toBeDefined() + expect(read!.frontmatter.name).toBe("scoped entry") + expect(read!.frontmatter.description).toBe("A test description") + expect(read!.frontmatter.type).toBe("user") + expect(read!.frontmatter.scope).toBe("personal") + expect(read!.frontmatter.agent).toBe("build") + }, + }) + }) + + test("writeEntry outputs new frontmatter format with scope and description", async () => { + await using tmp = await tmpdir() + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const entry: Memory.FileEntry = { + filename: "full-entry.md", + frontmatter: { + name: "Full entry", + description: "A complete entry with all fields", + type: "feedback", + scope: "project", + agent: "review", + }, + content: "Full content here.", + } + await MemoryFile.writeEntry(entry) + const raw = await Bun.file(path.join(MemoryFile.getMemoryDir(), "full-entry.md")).text() + expect(raw).toContain("name: Full entry") + expect(raw).toContain("description: A complete entry with all fields") + expect(raw).toContain("type: feedback") + expect(raw).toContain("scope: project") + expect(raw).toContain("agent: review") + }, + }) + }) }) diff --git a/packages/opencode/test/memory/injector.test.ts b/packages/opencode/test/memory/injector.test.ts new file mode 100644 index 000000000000..6d7684e2c3e5 --- /dev/null +++ b/packages/opencode/test/memory/injector.test.ts @@ -0,0 +1,232 @@ +import { afterEach, describe, test, expect } from "bun:test" +import path from "path" +import fs from "fs" +import { Instance } from "../../src/project/instance" +import { Database } from "../../src/storage/db" +import { MemoryTable } from "../../src/memory/memory.sql" +import { MemoryInjector } from "../../src/memory/injector" +import { Log } from "../../src/util/log" +import { tmpdir } from "../fixture/fixture" + +Log.init({ print: false }) + +afterEach(async () => { + await Instance.disposeAll() +}) + +type DbClient = Parameters[0] extends (trx: infer D) => any ? D : never + +function seed(d: DbClient, input: { + id: string + projectPath: string + topic: string + type: string + content: string + description?: string + agent?: string + accessCount?: number + scope?: string + relevanceScore?: number + timeUpdated?: number +}) { + const now = Date.now() + d.insert(MemoryTable).values({ + id: input.id, + project_path: input.projectPath, + topic: input.topic, + type: input.type, + content: input.content, + description: input.description ?? null, + agent: input.agent ?? null, + session_id: null, + access_count: input.accessCount ?? 1, + scope: input.scope ?? "project", + relevance_score: input.relevanceScore ?? 1.0, + time_last_verified: null, + promoted_from: null, + time_created: now, + time_updated: input.timeUpdated ?? now, + }).run() +} + +describe("MemoryInjector.load", () => { + test("returns undefined when DB is empty and no MEMORY.md", async () => { + await using tmp = await tmpdir({ git: true }) + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const result = await MemoryInjector.load() + expect(result).toBeUndefined() + }, + }) + }) + + test("returns memory header and entries when DB has data", async () => { + await using tmp = await tmpdir({ git: true }) + await Instance.provide({ + directory: tmp.path, + fn: async () => { + Database.use((d) => { + seed(d, { + id: "mem-1", + projectPath: tmp.path, + topic: "test-entry", + type: "project", + content: "This is a test memory entry", + accessCount: 2, + }) + }) + + const result = await MemoryInjector.load() + expect(result).toBeDefined() + expect(result).toContain("# Memory") + expect(result).toContain("## Project Knowledge") + expect(result).toContain("test-entry") + }, + }) + }) + + test("groups entries by type into correct sections", async () => { + await using tmp = await tmpdir({ git: true }) + await Instance.provide({ + directory: tmp.path, + fn: async () => { + Database.use((d) => { + seed(d, { id: "m1", projectPath: tmp.path, topic: "proj-note", type: "project", content: "Project info" }) + seed(d, { id: "m2", projectPath: tmp.path, topic: "user-pref", type: "user", content: "User preference" }) + seed(d, { id: "m3", projectPath: tmp.path, topic: "fb-pattern", type: "feedback", content: "Feedback pattern" }) + seed(d, { id: "m4", projectPath: tmp.path, topic: "ref-link", type: "reference", content: "Reference link" }) + }) + + const result = await MemoryInjector.load() + expect(result).toBeDefined() + expect(result).toContain("## Project Knowledge") + expect(result).toContain("## User Preferences") + expect(result).toContain("## Feedback & Patterns") + expect(result).toContain("## Reference") + }, + }) + }) + + test("includes agent entries in output when agent is specified", async () => { + await using tmp = await tmpdir({ git: true }) + await Instance.provide({ + directory: tmp.path, + fn: async () => { + Database.use((d) => { + seed(d, { id: "g1", projectPath: tmp.path, topic: "general-note", type: "project", content: "General info" }) + seed(d, { id: "a1", projectPath: tmp.path, topic: "agent-note", type: "project", content: "Agent-specific info", agent: "code-reviewer" }) + }) + + const result = await MemoryInjector.load("code-reviewer") + expect(result).toBeDefined() + // Agent entries are included in the output (may be in general or agent-specific section) + expect(result).toContain("agent-note") + expect(result).toContain("general-note") + }, + }) + }) + + test("deduplicates agent entries that appear in general entries", async () => { + await using tmp = await tmpdir({ git: true }) + await Instance.provide({ + directory: tmp.path, + fn: async () => { + Database.use((d) => { + seed(d, { id: "shared-1", projectPath: tmp.path, topic: "shared-entry", type: "project", content: "Shared content", agent: "planner" }) + }) + + const result = await MemoryInjector.load("planner") + expect(result).toBeDefined() + const matches = (result!.match(/shared-entry/g) || []).length + expect(matches).toBe(1) + }, + }) + }) + + test("sorts entries by relevance weight (highest first)", async () => { + await using tmp = await tmpdir({ git: true }) + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const now = Date.now() + Database.use((d) => { + seed(d, { id: "low", projectPath: tmp.path, topic: "low-relevance", type: "project", content: "Low score", relevanceScore: 0.2, accessCount: 1, timeUpdated: now }) + seed(d, { id: "high", projectPath: tmp.path, topic: "high-relevance", type: "project", content: "High score", relevanceScore: 1.0, accessCount: 10, timeUpdated: now }) + }) + + const result = await MemoryInjector.load() + expect(result).toBeDefined() + const highIdx = result!.indexOf("high-relevance") + const lowIdx = result!.indexOf("low-relevance") + expect(highIdx).toBeLessThan(lowIdx) + }, + }) + }) + + test("respects token budget and truncates entries", async () => { + await using tmp = await tmpdir({ git: true }) + await Instance.provide({ + directory: tmp.path, + fn: async () => { + Database.use((d) => { + for (let i = 0; i < 50; i++) { + seed(d, { + id: `bulk-${i}`, + projectPath: tmp.path, + topic: `entry-${i}`, + type: "project", + content: "A".repeat(500), + }) + } + }) + + const result = await MemoryInjector.load() + expect(result).toBeDefined() + const entryCount = (result!.match(/entry-\d+/g) || []).length + expect(entryCount).toBeLessThan(50) + expect(entryCount).toBeGreaterThan(0) + }, + }) + }) + + test("falls back to MEMORY.md when DB has no entries", async () => { + await using tmp = await tmpdir({ git: true }) + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const memDir = path.join(tmp.path, ".opencode", "memory") + fs.mkdirSync(memDir, { recursive: true }) + fs.writeFileSync(path.join(memDir, "MEMORY.md"), "- [Test Memory](test.md) -- a test memory entry\n") + + const result = await MemoryInjector.load() + expect(result).toBeDefined() + expect(result).toContain("# Memory") + expect(result).toContain("Test Memory") + }, + }) + }) + + test("includes description in entry output when present", async () => { + await using tmp = await tmpdir({ git: true }) + await Instance.provide({ + directory: tmp.path, + fn: async () => { + Database.use((d) => { + seed(d, { + id: "desc-1", + projectPath: tmp.path, + topic: "described-entry", + type: "user", + content: "Content here", + description: "This is the description", + }) + }) + + const result = await MemoryInjector.load() + expect(result).toBeDefined() + expect(result).toContain("This is the description") + }, + }) + }) +}) diff --git a/packages/opencode/test/memory/maintenance.test.ts b/packages/opencode/test/memory/maintenance.test.ts new file mode 100644 index 000000000000..44858986e706 --- /dev/null +++ b/packages/opencode/test/memory/maintenance.test.ts @@ -0,0 +1,531 @@ +import { afterEach, describe, test, expect } from "bun:test" +import { eq } from "drizzle-orm" +import path from "path" +import { Instance } from "../../src/project/instance" +import { Database } from "../../src/storage/db" +import { MemoryTable } from "../../src/memory/memory.sql" +import { MemoryMaintenance } from "../../src/memory/maintenance" +import { MemoryPromoter } from "../../src/memory/promoter" +import { Log } from "../../src/util/log" +import { tmpdir } from "../fixture/fixture" + +Log.init({ print: false }) + +afterEach(async () => { + await Instance.disposeAll() +}) + +type DbClient = Parameters[0] extends (trx: infer D) => any ? D : never + +function seed(d: DbClient, input: { + id: string + projectPath: string + topic: string + type: string + content: string + accessCount?: number + scope?: string + relevanceScore?: number + timeUpdated?: number +}) { + const now = Date.now() + d.insert(MemoryTable).values({ + id: input.id, + project_path: input.projectPath, + topic: input.topic, + type: input.type, + content: input.content, + session_id: null, + access_count: input.accessCount ?? 0, + scope: input.scope ?? "project", + relevance_score: input.relevanceScore ?? 1.0, + time_last_verified: null, + promoted_from: null, + time_created: now, + time_updated: input.timeUpdated ?? now, + }).run() +} + +describe("MemoryMaintenance.mergeDuplicates", () => { + test("merges entries with same lowercase name", async () => { + await using tmp = await tmpdir({ git: true }) + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const projectPath = tmp.path + Database.use((d) => { + seed(d, { id: "dup1", projectPath, topic: "Hello World", type: "project", content: "content A", accessCount: 1 }) + seed(d, { id: "dup2", projectPath, topic: "hello world", type: "project", content: "content B", accessCount: 2 }) + }) + + const merged = await MemoryMaintenance.mergeDuplicates(projectPath) + expect(merged).toBe(1) + + const remaining = Database.use((d) => + d.select().from(MemoryTable).where(eq(MemoryTable.project_path, projectPath)).all() + ) + expect(remaining.length).toBe(1) + }, + }) + }) + + test("keeps entry with highest access count", async () => { + await using tmp = await tmpdir({ git: true }) + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const projectPath = tmp.path + Database.use((d) => { + seed(d, { id: "low", projectPath, topic: "Topic", type: "project", content: "low", accessCount: 1 }) + seed(d, { id: "mid", projectPath, topic: "topic", type: "project", content: "mid", accessCount: 5 }) + seed(d, { id: "high", projectPath, topic: "TOPIC", type: "project", content: "high", accessCount: 10 }) + }) + + await MemoryMaintenance.mergeDuplicates(projectPath) + + const remaining = Database.use((d) => + d.select().from(MemoryTable).where(eq(MemoryTable.project_path, projectPath)).all() + ) + expect(remaining.length).toBe(1) + expect(remaining[0].id).toBe("high") + expect(remaining[0].access_count).toBe(10) + }, + }) + }) + + test("concatenates content from duplicates", async () => { + await using tmp = await tmpdir({ git: true }) + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const projectPath = tmp.path + Database.use((d) => { + seed(d, { id: "a", projectPath, topic: "merge me", type: "project", content: "alpha", accessCount: 5 }) + seed(d, { id: "b", projectPath, topic: "Merge Me", type: "project", content: "beta", accessCount: 3 }) + }) + + await MemoryMaintenance.mergeDuplicates(projectPath) + + const remaining = Database.use((d) => + d.select().from(MemoryTable).where(eq(MemoryTable.project_path, projectPath)).all() + ) + expect(remaining.length).toBe(1) + expect(remaining[0].content).toContain("alpha") + expect(remaining[0].content).toContain("beta") + }, + }) + }) + + test("returns 0 when no duplicates exist", async () => { + await using tmp = await tmpdir({ git: true }) + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const projectPath = tmp.path + Database.use((d) => { + seed(d, { id: "unique1", projectPath, topic: "first topic", type: "project", content: "c1" }) + seed(d, { id: "unique2", projectPath, topic: "second topic", type: "project", content: "c2" }) + }) + + const merged = await MemoryMaintenance.mergeDuplicates(projectPath) + expect(merged).toBe(0) + }, + }) + }) +}) + +describe("MemoryMaintenance.decayRelevance", () => { + test("decays entries older than 7 days", async () => { + await using tmp = await tmpdir({ git: true }) + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const projectPath = tmp.path + const fourteenDaysAgo = Date.now() - 14 * 24 * 60 * 60 * 1000 + + Database.use((d) => { + seed(d, { + id: "old1", + projectPath, + topic: "old entry", + type: "project", + content: "stale", + relevanceScore: 1.0, + timeUpdated: fourteenDaysAgo, + }) + }) + + const decayed = await MemoryMaintenance.decayRelevance(projectPath) + expect(decayed).toBe(1) + + const row = Database.use((d) => + d.select().from(MemoryTable).where(eq(MemoryTable.id, "old1")).get() + ) + // 14 days = 2 periods of 7 days => 0.95^2 = 0.9025 + expect(row!.relevance_score).toBeCloseTo(0.9025, 3) + }, + }) + }) + + test("skips entries updated within 7 days", async () => { + await using tmp = await tmpdir({ git: true }) + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const projectPath = tmp.path + + Database.use((d) => { + seed(d, { + id: "recent1", + projectPath, + topic: "recent entry", + type: "project", + content: "fresh", + relevanceScore: 1.0, + }) + }) + + const decayed = await MemoryMaintenance.decayRelevance(projectPath) + expect(decayed).toBe(0) + + const row = Database.use((d) => + d.select().from(MemoryTable).where(eq(MemoryTable.id, "recent1")).get() + ) + expect(row!.relevance_score).toBe(1.0) + }, + }) + }) + + test("skips insignificant changes below 0.001 threshold", async () => { + await using tmp = await tmpdir({ git: true }) + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const projectPath = tmp.path + // 1 period (7-13 days): 0.95^1 = 0.95, diff from 0.95 = 0.0 + // Already at 0.95 score with 1 period => new = 0.95 * 0.95 = 0.9025, diff = 0.0475 > 0.001 + // For insignificance: score must be tiny so diff < 0.001 + // score=0.01, 1 period: new=0.0095, diff=0.0005 < 0.001 => skipped + const eightDaysAgo = Date.now() - 8 * 24 * 60 * 60 * 1000 + + Database.use((d) => { + seed(d, { + id: "tiny1", + projectPath, + topic: "tiny entry", + type: "project", + content: "minimal", + relevanceScore: 0.01, + timeUpdated: eightDaysAgo, + }) + }) + + const decayed = await MemoryMaintenance.decayRelevance(projectPath) + expect(decayed).toBe(0) + + const row = Database.use((d) => + d.select().from(MemoryTable).where(eq(MemoryTable.id, "tiny1")).get() + ) + expect(row!.relevance_score).toBe(0.01) + }, + }) + }) +}) + +describe("MemoryMaintenance.removeStale", () => { + test("removes entries with relevanceScore < 0.1", async () => { + await using tmp = await tmpdir({ git: true }) + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const projectPath = tmp.path + + Database.use((d) => { + seed(d, { + id: "stale1", + projectPath, + topic: "stale entry", + type: "project", + content: "should be removed", + relevanceScore: 0.05, + }) + }) + + await MemoryMaintenance.removeStale(projectPath) + + const row = Database.use((d) => + d.select().from(MemoryTable).where(eq(MemoryTable.id, "stale1")).get() + ) + expect(row).toBeUndefined() + }, + }) + }) + + test("keeps entries with relevanceScore >= 0.1", async () => { + await using tmp = await tmpdir({ git: true }) + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const projectPath = tmp.path + + Database.use((d) => { + seed(d, { + id: "healthy1", + projectPath, + topic: "healthy entry", + type: "project", + content: "should remain", + relevanceScore: 0.5, + }) + }) + + await MemoryMaintenance.removeStale(projectPath) + + const row = Database.use((d) => + d.select().from(MemoryTable).where(eq(MemoryTable.id, "healthy1")).get() + ) + expect(row).toBeDefined() + expect(row!.relevance_score).toBe(0.5) + }, + }) + }) + + test("returns count of removed entries", async () => { + await using tmp = await tmpdir({ git: true }) + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const projectPath = tmp.path + + Database.use((d) => { + seed(d, { id: "rm1", projectPath, topic: "rm one", type: "project", content: "c1", relevanceScore: 0.02 }) + seed(d, { id: "rm2", projectPath, topic: "rm two", type: "project", content: "c2", relevanceScore: 0.09 }) + seed(d, { id: "keep1", projectPath, topic: "keep", type: "project", content: "c3", relevanceScore: 0.8 }) + }) + + const removed = await MemoryMaintenance.removeStale(projectPath) + expect(removed).toBe(2) + + const remaining = Database.use((d) => + d.select().from(MemoryTable).where(eq(MemoryTable.project_path, projectPath)).all() + ) + expect(remaining.length).toBe(1) + expect(remaining[0].id).toBe("keep1") + }, + }) + }) +}) + +describe("MemoryMaintenance.verifyReferences", () => { + test("penalizes entries referencing missing files", async () => { + await using tmp = await tmpdir({ git: true }) + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const projectPath = tmp.path + + Database.use((d) => { + seed(d, { + id: "ref1", + projectPath, + topic: "broken ref", + type: "project", + content: "see ./nonexistent.ts for details", + relevanceScore: 1.0, + }) + }) + + const verified = await MemoryMaintenance.verifyReferences(projectPath) + expect(verified).toBe(1) + + const row = Database.use((d) => + d.select().from(MemoryTable).where(eq(MemoryTable.id, "ref1")).get() + ) + expect(row!.relevance_score).toBeLessThan(1.0) + }, + }) + }) + + test("does not penalize when all referenced files exist", async () => { + await using tmp = await tmpdir({ git: true }) + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const projectPath = tmp.path + const realFile = path.join(projectPath, "existing.ts") + await Bun.write(realFile, "export const x = 1") + + Database.use((d) => { + seed(d, { + id: "ref2", + projectPath, + topic: "valid ref", + type: "project", + content: "see ./existing.ts for details", + relevanceScore: 1.0, + }) + }) + + await MemoryMaintenance.verifyReferences(projectPath) + + const row = Database.use((d) => + d.select().from(MemoryTable).where(eq(MemoryTable.id, "ref2")).get() + ) + expect(row!.relevance_score).toBe(1.0) + }, + }) + }) + + test("skips paths outside project directory", async () => { + await using tmp = await tmpdir({ git: true }) + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const projectPath = tmp.path + + Database.use((d) => { + seed(d, { + id: "ref3", + projectPath, + topic: "escape ref", + type: "project", + content: "see ../../etc/passwd for secrets", + relevanceScore: 1.0, + }) + }) + + const verified = await MemoryMaintenance.verifyReferences(projectPath) + // Path outside project is skipped, so 0 missing files counted + expect(verified).toBe(0) + + const row = Database.use((d) => + d.select().from(MemoryTable).where(eq(MemoryTable.id, "ref3")).get() + ) + expect(row!.relevance_score).toBe(1.0) + }, + }) + }) +}) + +describe("MemoryMaintenance.run", () => { + test("executes full cycle without error", async () => { + await using tmp = await tmpdir({ git: true }) + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const projectPath = tmp.path + + Database.use((d) => { + seed(d, { id: "run1", projectPath, topic: "normal", type: "project", content: "active entry", relevanceScore: 0.8 }) + seed(d, { id: "run2", projectPath, topic: "stale", type: "project", content: "dying entry", relevanceScore: 0.05 }) + }) + + // Should not throw + await MemoryMaintenance.run(projectPath) + + // Stale entry should have been removed + const stale = Database.use((d) => + d.select().from(MemoryTable).where(eq(MemoryTable.id, "run2")).get() + ) + expect(stale).toBeUndefined() + + // Normal entry should still exist + const normal = Database.use((d) => + d.select().from(MemoryTable).where(eq(MemoryTable.id, "run1")).get() + ) + expect(normal).toBeDefined() + }, + }) + }) +}) + +describe("MemoryPromoter.detectCandidates", () => { + test("returns entries with accessCount > 5 and scope personal", async () => { + await using tmp = await tmpdir({ git: true }) + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const projectPath = tmp.path + + Database.use((d) => { + seed(d, { id: "cand1", projectPath, topic: "hot entry", type: "project", content: "popular", scope: "personal", accessCount: 10 }) + seed(d, { id: "cand2", projectPath, topic: "warm entry", type: "project", content: "somewhat", scope: "personal", accessCount: 6 }) + }) + + const candidates = await MemoryPromoter.detectCandidates(projectPath) + expect(candidates.length).toBe(2) + const ids = candidates.map((c) => c.id).sort() + expect(ids).toEqual(["cand1", "cand2"]) + }, + }) + }) + + test("excludes entries with accessCount <= 5", async () => { + await using tmp = await tmpdir({ git: true }) + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const projectPath = tmp.path + + Database.use((d) => { + seed(d, { id: "low1", projectPath, topic: "cold entry", type: "project", content: "rarely used", scope: "personal", accessCount: 3 }) + seed(d, { id: "edge1", projectPath, topic: "edge entry", type: "project", content: "at threshold", scope: "personal", accessCount: 5 }) + }) + + const candidates = await MemoryPromoter.detectCandidates(projectPath) + expect(candidates.length).toBe(0) + }, + }) + }) +}) + +describe("MemoryPromoter.autoPromote", () => { + test("promotes eligible entries to project scope", async () => { + await using tmp = await tmpdir({ git: true }) + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const projectPath = tmp.path + + Database.use((d) => { + seed(d, { id: "promo1", projectPath, topic: "promote me", type: "project", content: "popular item", scope: "personal", accessCount: 8 }) + }) + + await MemoryPromoter.autoPromote(projectPath) + + const row = Database.use((d) => + d.select().from(MemoryTable).where(eq(MemoryTable.id, "promo1")).get() + ) + expect(row!.scope).toBe("project") + expect(row!.promoted_from).toBe("personal") + }, + }) + }) + + test("returns count of promoted entries", async () => { + await using tmp = await tmpdir({ git: true }) + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const projectPath = tmp.path + + Database.use((d) => { + seed(d, { id: "p1", projectPath, topic: "entry one", type: "project", content: "c1", scope: "personal", accessCount: 7 }) + seed(d, { id: "p2", projectPath, topic: "entry two", type: "project", content: "c2", scope: "personal", accessCount: 9 }) + seed(d, { id: "p3", projectPath, topic: "entry three", type: "project", content: "c3", scope: "personal", accessCount: 2 }) + }) + + const promoted = await MemoryPromoter.autoPromote(projectPath) + expect(promoted).toBe(2) + + // Verify the two promoted entries changed scope + const e1 = Database.use((d) => d.select().from(MemoryTable).where(eq(MemoryTable.id, "p1")).get()) + const e2 = Database.use((d) => d.select().from(MemoryTable).where(eq(MemoryTable.id, "p2")).get()) + const e3 = Database.use((d) => d.select().from(MemoryTable).where(eq(MemoryTable.id, "p3")).get()) + expect(e1!.scope).toBe("project") + expect(e2!.scope).toBe("project") + expect(e3!.scope).toBe("personal") + }, + }) + }) +}) diff --git a/packages/opencode/test/memory/summary-bridge.test.ts b/packages/opencode/test/memory/summary-bridge.test.ts new file mode 100644 index 000000000000..d97f0139d1ca --- /dev/null +++ b/packages/opencode/test/memory/summary-bridge.test.ts @@ -0,0 +1,100 @@ +import { afterEach, describe, test, expect } from "bun:test" +import { Instance } from "../../src/project/instance" +import { Log } from "../../src/util/log" +import { SummaryBridge } from "../../src/memory/summary-bridge" +import { tmpdir } from "../fixture/fixture" + +Log.init({ print: false }) + +afterEach(async () => { + await Instance.disposeAll() +}) + +describe("memory.summary-bridge", () => { + test("extracts candidates from valid summary", async () => { + await using tmp = await tmpdir({ git: true }) + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const summary = `## Summary +This session did X and Y. + +## Memory Candidates +- **User preference**: Prefers TypeScript over JavaScript +- **Project structure**: Uses monorepo with packages/ + +## Next Steps +Do Z next.` + + await expect( + SummaryBridge.extractMemoryCandidates(summary, "ses_1", tmp.path), + ).resolves.toBeUndefined() + }, + }) + }) + + test("returns silently when no marker present", async () => { + await using tmp = await tmpdir({ git: true }) + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const summary = "## Summary\nJust a regular summary with no memory section." + + await expect( + SummaryBridge.extractMemoryCandidates(summary, "ses_2", tmp.path), + ).resolves.toBeUndefined() + }, + }) + }) + + test("handles bold and non-bold formats", async () => { + await using tmp = await tmpdir({ git: true }) + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const summary = `## Memory Candidates +- **Bold name**: Bold description +- Plain name: Plain description` + + await expect( + SummaryBridge.extractMemoryCandidates(summary, "ses_3", tmp.path), + ).resolves.toBeUndefined() + }, + }) + }) + + test("stops at next heading boundary", async () => { + await using tmp = await tmpdir({ git: true }) + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const summary = `## Memory Candidates +- **Item**: Description + +## Other Section +This should not be parsed as a candidate. +- **Not a candidate**: Should be ignored` + + await expect( + SummaryBridge.extractMemoryCandidates(summary, "ses_4", tmp.path), + ).resolves.toBeUndefined() + }, + }) + }) + + test("handles empty candidates section", async () => { + await using tmp = await tmpdir({ git: true }) + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const summary = `## Memory Candidates + +## Next Section` + + await expect( + SummaryBridge.extractMemoryCandidates(summary, "ses_5", tmp.path), + ).resolves.toBeUndefined() + }, + }) + }) +}) diff --git a/packages/opencode/test/session/instruction-rules.test.ts b/packages/opencode/test/session/instruction-rules.test.ts index 12cd8d9192d5..52a4e630089f 100644 --- a/packages/opencode/test/session/instruction-rules.test.ts +++ b/packages/opencode/test/session/instruction-rules.test.ts @@ -56,7 +56,7 @@ describe("Instruction.systemPaths rules loading", () => { } }) - test("project rule with same filename overrides global", async () => { + test("both global and project rules with same filename are loaded", async () => { await using homeTmp = await tmpdir({ init: async (dir) => { await Bun.write(path.join(dir, ".opencode", "rules", "style.md"), "# Global Style") @@ -79,8 +79,8 @@ describe("Instruction.systemPaths rules loading", () => { const paths = await Instruction.systemPaths() // Project style.md should be present expect(paths.has(path.join(projectTmp.path, ".opencode", "rules", "style.md"))).toBe(true) - // Global style.md should NOT be present (overridden) - expect(paths.has(path.join(homeTmp.path, ".opencode", "rules", "style.md"))).toBe(false) + // Global style.md should also be present (both are loaded) + expect(paths.has(path.join(homeTmp.path, ".opencode", "rules", "style.md"))).toBe(true) // Global unique-global.md should still be present expect(paths.has(path.join(homeTmp.path, ".opencode", "rules", "unique-global.md"))).toBe(true) },