-
Notifications
You must be signed in to change notification settings - Fork 0
feat(memory): Memory V2 schema — scoped types, relevance scoring, maintenance #156
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
99b0303
a74f196
d3d4bfd
3e4ddd8
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,12 @@ | ||
| ALTER TABLE `memory` ADD COLUMN `scope` text NOT NULL DEFAULT 'project';--> statement-breakpoint | ||
| ALTER TABLE `memory` ADD COLUMN `description` text;--> statement-breakpoint | ||
| ALTER TABLE `memory` ADD COLUMN `agent` text;--> statement-breakpoint | ||
| ALTER TABLE `memory` ADD COLUMN `relevance_score` real NOT NULL DEFAULT 1.0;--> statement-breakpoint | ||
| ALTER TABLE `memory` ADD COLUMN `time_last_verified` integer;--> statement-breakpoint | ||
| ALTER TABLE `memory` ADD COLUMN `promoted_from` text;--> statement-breakpoint | ||
| CREATE INDEX `memory_agent_idx` ON `memory` (`agent`);--> statement-breakpoint | ||
| CREATE INDEX `memory_scope_idx` ON `memory` (`scope`);--> statement-breakpoint | ||
| CREATE INDEX `memory_project_scope_idx` ON `memory` (`project_path`, `scope`);--> statement-breakpoint | ||
| UPDATE `memory` SET `type` = 'project' WHERE `type` IN ('error-solution', 'build-command', 'config-pattern', 'general');--> statement-breakpoint | ||
| UPDATE `memory` SET `type` = 'user' WHERE `type` = 'preference';--> statement-breakpoint | ||
| UPDATE `memory` SET `type` = 'feedback' WHERE `type` = 'decision'; |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -1,3 +1,4 @@ | ||
| import { Hash } from "../util/hash" | ||
| import { Log } from "../util/log" | ||
| import { Process } from "../util/process" | ||
| import type { HookEntry } from "./schema" | ||
|
|
@@ -23,40 +24,132 @@ export interface HookEnv { | |
| export interface HookResult { | ||
| action: "pass" | "block" | ||
| message?: string | ||
| status?: "ok" | "error" | "timeout" | "unexpected_exit" | ||
| duration?: number | ||
| exitCode?: number | ||
| } | ||
|
|
||
| // --------------------------------------------------------------------------- | ||
| // LRU cache for PreToolUse hooks | ||
| // --------------------------------------------------------------------------- | ||
| const CACHE_MAX = 100 | ||
| const CACHE_TTL = 5_000 // 5 seconds | ||
|
|
||
| interface CacheEntry { | ||
| result: HookResult | ||
| expiresAt: number | ||
| } | ||
|
|
||
| const preToolUseCache = new Map<string, CacheEntry>() | ||
|
|
||
| function cacheKey(command: string, toolName: string | undefined, toolInput: string | undefined): string { | ||
| return Hash.fast(`${command}\0${toolName ?? ""}\0${toolInput ?? ""}`) | ||
| } | ||
|
|
||
| function getCached(key: string): HookResult | undefined { | ||
| const entry = preToolUseCache.get(key) | ||
| if (!entry) return undefined | ||
| if (Date.now() > entry.expiresAt) { | ||
| preToolUseCache.delete(key) | ||
| return undefined | ||
| } | ||
| // Move to end for LRU ordering | ||
| preToolUseCache.delete(key) | ||
| preToolUseCache.set(key, entry) | ||
| return entry.result | ||
| } | ||
|
|
||
| function setCached(key: string, result: HookResult): void { | ||
| // Evict oldest entries when at capacity | ||
| if (preToolUseCache.size >= CACHE_MAX) { | ||
| const oldest = preToolUseCache.keys().next().value | ||
| if (oldest !== undefined) preToolUseCache.delete(oldest) | ||
| } | ||
| preToolUseCache.set(key, { | ||
| result, | ||
| expiresAt: Date.now() + CACHE_TTL, | ||
| }) | ||
| } | ||
|
|
||
| /** Exposed for testing only. */ | ||
| export function clearHookCache(): void { | ||
| preToolUseCache.clear() | ||
| } | ||
|
|
||
| export async function runHook(entry: HookEntry, env: HookEnv): Promise<HookResult> { | ||
| const isPreToolUse = env.OPENCODE_HOOK_EVENT === "PreToolUse" | ||
| const timeout = entry.timeout ?? DEFAULT_TIMEOUT | ||
| const command = entry.command.replace(/^~/, process.env.HOME ?? "~") | ||
|
|
||
| // Check cache for PreToolUse hooks | ||
| if (isPreToolUse) { | ||
| const key = cacheKey(command, env.OPENCODE_TOOL_NAME, env.OPENCODE_TOOL_INPUT) | ||
| const cached = getCached(key) | ||
| if (cached) return cached | ||
| } | ||
|
|
||
| const start = Date.now() | ||
|
|
||
| try { | ||
| const result = await Process.run(["sh", "-c", command], { | ||
| env: toEnvRecord(env), | ||
| abort: AbortSignal.timeout(timeout), | ||
| nothrow: true, | ||
| }) | ||
|
|
||
| const duration = Date.now() - start | ||
| const stderr = result.stderr.toString().trim() | ||
|
|
||
| if (result.code === 0) { | ||
| return { action: "pass", message: stderr || undefined } | ||
| const hookResult: HookResult = { | ||
| action: "pass", | ||
| message: stderr || undefined, | ||
| status: "ok", | ||
| duration, | ||
| exitCode: 0, | ||
| } | ||
| if (isPreToolUse) { | ||
| setCached(cacheKey(command, env.OPENCODE_TOOL_NAME, env.OPENCODE_TOOL_INPUT), hookResult) | ||
| } | ||
| return hookResult | ||
| } | ||
| if (result.code === 2) { | ||
| return { action: "block", message: stderr || "Blocked by hook" } | ||
| // Do NOT cache block results -- security decisions must always be fresh | ||
| return { | ||
| action: "block", | ||
| message: stderr || "Blocked by hook", | ||
| status: "ok", | ||
| duration, | ||
| exitCode: 2, | ||
| } | ||
| } | ||
|
|
||
| log.warn("hook exited with unexpected code", { | ||
| command: entry.command, | ||
| code: result.code, | ||
| stderr, | ||
| }) | ||
| return { action: "pass" } | ||
| return { | ||
| action: "pass", | ||
| status: "unexpected_exit", | ||
| duration, | ||
| exitCode: result.code, | ||
| } | ||
| } catch (error) { | ||
| const duration = Date.now() - start | ||
| const isTimeout = | ||
| error instanceof Error && | ||
| (error.name === "TimeoutError" || error.name === "AbortError") | ||
|
|
||
| log.warn("hook execution failed", { | ||
| command: entry.command, | ||
| error: error instanceof Error ? error.message : String(error), | ||
| }) | ||
| return { action: "pass" } | ||
| return { | ||
| action: "pass", | ||
| status: isTimeout ? "timeout" : "error", | ||
| duration, | ||
| } | ||
| } | ||
| } | ||
|
|
||
|
|
@@ -87,18 +180,28 @@ export async function runHooks( | |
| ): Promise<HookResult> { | ||
| if (!entries || entries.length === 0) return { action: "pass" } | ||
|
|
||
| const messages: string[] = [] | ||
| const matched = entries.filter((entry) => matchesTool(entry.matcher, toolName)) | ||
| if (matched.length === 0) return { action: "pass" } | ||
|
|
||
| const results = await Promise.all(matched.map((entry) => runHook(entry, env))) | ||
|
|
||
| for (const entry of entries) { | ||
| if (!matchesTool(entry.matcher, toolName)) continue | ||
| // Collect messages in original order, stopping after the first "block". | ||
| // This preserves sequential message semantics while hooks execute in parallel. | ||
| const messages: string[] = [] | ||
| let blocked = false | ||
|
|
||
| const result = await runHook(entry, env) | ||
| for (const result of results) { | ||
| if (result.message) messages.push(result.message) | ||
| if (result.action === "block") { | ||
| return { action: "block", message: messages.join("\n") } | ||
| blocked = true | ||
| break | ||
| } | ||
| } | ||
|
Comment on lines
+183
to
199
|
||
|
|
||
| if (blocked) { | ||
| return { action: "block", message: messages.join("\n") } | ||
| } | ||
|
|
||
| return { | ||
| action: "pass", | ||
| message: messages.length > 0 ? messages.join("\n") : undefined, | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,127 @@ | ||
| import { access, constants, readdir } from "node:fs/promises" | ||
| import { join, resolve } from "node:path" | ||
| import type { HookConfig } from "./schema" | ||
|
|
||
| export interface VerifyResult { | ||
| orphanScripts: string[] | ||
| missingScripts: string[] | ||
| permissionErrors: string[] | ||
| } | ||
|
|
||
| /** | ||
| * Verify hook deployment integrity by comparing the hook directory contents | ||
| * against the registered config entries. | ||
| * | ||
| * Detects: | ||
| * - Orphan scripts: files in hookDir not referenced by any config entry | ||
| * - Missing scripts: config entries pointing to scripts that do not exist | ||
| * - Permission errors: scripts that exist but lack execute permission | ||
| */ | ||
| export async function verifyHookDeployment( | ||
| hookDir: string, | ||
| config: HookConfig, | ||
| ): Promise<VerifyResult> { | ||
| const resolvedDir = resolve(hookDir) | ||
| const result: VerifyResult = { | ||
| orphanScripts: [], | ||
| missingScripts: [], | ||
| permissionErrors: [], | ||
| } | ||
|
|
||
| // Collect all script paths referenced in config | ||
| const referencedPaths = extractScriptPaths(config, resolvedDir) | ||
|
|
||
| // List scripts on disk | ||
| const diskScripts = await listScripts(resolvedDir) | ||
|
|
||
| // Orphan detection: scripts on disk not referenced in config | ||
| for (const scriptPath of diskScripts) { | ||
| if (!referencedPaths.has(scriptPath)) { | ||
| result.orphanScripts.push(scriptPath) | ||
| } | ||
| } | ||
|
|
||
| // Missing + permission checks for referenced paths | ||
| const checks = [...referencedPaths].map(async (scriptPath) => { | ||
| const exists = await fileExists(scriptPath) | ||
| if (!exists) { | ||
| result.missingScripts.push(scriptPath) | ||
| return | ||
| } | ||
| const executable = await isExecutable(scriptPath) | ||
| if (!executable) { | ||
| result.permissionErrors.push(scriptPath) | ||
| } | ||
| }) | ||
| await Promise.all(checks) | ||
|
|
||
| return result | ||
| } | ||
|
|
||
| function extractScriptPaths(config: HookConfig, hookDir: string): Set<string> { | ||
| const paths = new Set<string>() | ||
| if (!config) return paths | ||
|
|
||
| const events = ["PreToolUse", "PostToolUse", "SessionStart", "Notification"] as const | ||
| for (const event of events) { | ||
| const entries = config[event] | ||
| if (!entries) continue | ||
| for (const entry of entries) { | ||
| const scriptPath = resolveScriptPath(entry.command, hookDir) | ||
| if (scriptPath) paths.add(scriptPath) | ||
| } | ||
| } | ||
| return paths | ||
| } | ||
|
|
||
| /** | ||
| * Extract the script file path from a hook command string. | ||
| * Handles: | ||
| * - Direct paths: `/path/to/script.sh` | ||
| * - Tilde paths: `~/hooks/script.sh` | ||
| * - Commands with args: `/path/to/script.sh --flag` | ||
| * - Inline shell (no path): `echo "hello"` -> returns null | ||
| */ | ||
| function resolveScriptPath(command: string, hookDir: string): string | null { | ||
| const expanded = command.replace(/^~/, process.env.HOME ?? "~") | ||
| const firstToken = expanded.split(/\s+/)[0] | ||
| if (!firstToken) return null | ||
|
|
||
| // Only treat as a file path if it contains a slash (absolute or relative) | ||
| if (!firstToken.includes("/")) return null | ||
|
|
||
| // Resolve relative paths against hookDir | ||
| if (!firstToken.startsWith("/")) { | ||
| return resolve(hookDir, firstToken) | ||
| } | ||
| return firstToken | ||
| } | ||
|
|
||
| async function listScripts(dir: string): Promise<string[]> { | ||
| try { | ||
| const entries = await readdir(dir, { withFileTypes: true }) | ||
| return entries | ||
| .filter((e) => e.isFile() && e.name.endsWith(".sh")) | ||
| .map((e) => join(dir, e.name)) | ||
| } catch { | ||
| return [] | ||
| } | ||
| } | ||
|
|
||
| async function fileExists(path: string): Promise<boolean> { | ||
| try { | ||
| await access(path, constants.F_OK) | ||
| return true | ||
| } catch { | ||
| return false | ||
| } | ||
| } | ||
|
|
||
| async function isExecutable(path: string): Promise<boolean> { | ||
| try { | ||
| await access(path, constants.X_OK) | ||
| return true | ||
| } catch { | ||
| return false | ||
| } | ||
| } |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
New config options
memory.consolidationandmemory.scopeare added to the schema but appear unused in the codebase (no references found). Either wire them into memory creation/maintenance behavior (default scope for new memories; ability to toggle consolidation/maintenance) or remove them to avoid dead config surface area.