Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
565 changes: 313 additions & 252 deletions generated/competitive/narsil-mcp.md

Large diffs are not rendered by default.

2 changes: 2 additions & 0 deletions src/batch.js
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import { flowData } from './flow.js';
import {
contextData,
explainData,
exportsData,
fileDepsData,
fnDepsData,
fnImpactData,
Expand All @@ -34,6 +35,7 @@ export const BATCH_COMMANDS = {
query: { fn: fnDepsData, sig: 'name' },
impact: { fn: impactAnalysisData, sig: 'file' },
deps: { fn: fileDepsData, sig: 'file' },
exports: { fn: exportsData, sig: 'file' },
flow: { fn: flowData, sig: 'name' },
dataflow: { fn: dataflowData, sig: 'name' },
complexity: { fn: complexityData, sig: 'dbOnly' },
Expand Down
224 changes: 142 additions & 82 deletions src/builder.js
Original file line number Diff line number Diff line change
Expand Up @@ -338,6 +338,76 @@ function getChangedFiles(db, allFiles, rootDir) {
return { changed, removed, isFullBuild: false };
}

/**
* Purge all graph data for the specified files.
* Deletes: embeddings → edges (in+out) → node_metrics → function_complexity → dataflow → nodes.
* Handles missing tables gracefully (embeddings, complexity, dataflow may not exist in older DBs).
*
* @param {import('better-sqlite3').Database} db - Open writable database
* @param {string[]} files - Relative file paths to purge
* @param {object} [options]
* @param {boolean} [options.purgeHashes=true] - Also delete file_hashes entries
*/
export function purgeFilesFromGraph(db, files, options = {}) {
const { purgeHashes = true } = options;
if (!files || files.length === 0) return;

// Check if embeddings table exists
let hasEmbeddings = false;
try {
db.prepare('SELECT 1 FROM embeddings LIMIT 1').get();
hasEmbeddings = true;
} catch {
/* table doesn't exist */
}

const deleteEmbeddingsForFile = hasEmbeddings
? db.prepare('DELETE FROM embeddings WHERE node_id IN (SELECT id FROM nodes WHERE file = ?)')
: null;
const deleteNodesForFile = db.prepare('DELETE FROM nodes WHERE file = ?');
const deleteEdgesForFile = db.prepare(`
DELETE FROM edges WHERE source_id IN (SELECT id FROM nodes WHERE file = @f)
OR target_id IN (SELECT id FROM nodes WHERE file = @f)
`);
const deleteMetricsForFile = db.prepare(
'DELETE FROM node_metrics WHERE node_id IN (SELECT id FROM nodes WHERE file = ?)',
);
let deleteComplexityForFile;
try {
deleteComplexityForFile = db.prepare(
'DELETE FROM function_complexity WHERE node_id IN (SELECT id FROM nodes WHERE file = ?)',
);
} catch {
deleteComplexityForFile = null;
}
let deleteDataflowForFile;
try {
deleteDataflowForFile = db.prepare(
'DELETE FROM dataflow WHERE source_id IN (SELECT id FROM nodes WHERE file = ?) OR target_id IN (SELECT id FROM nodes WHERE file = ?)',
);
} catch {
deleteDataflowForFile = null;
}
let deleteHashForFile;
if (purgeHashes) {
try {
deleteHashForFile = db.prepare('DELETE FROM file_hashes WHERE file = ?');
} catch {
deleteHashForFile = null;
}
}

for (const relPath of files) {
deleteEmbeddingsForFile?.run(relPath);
deleteEdgesForFile.run({ f: relPath });
deleteMetricsForFile.run(relPath);
deleteComplexityForFile?.run(relPath);
deleteDataflowForFile?.run(relPath, relPath);
deleteNodesForFile.run(relPath);
if (purgeHashes) deleteHashForFile?.run(relPath);
}
}

export async function buildGraph(rootDir, opts = {}) {
const dbPath = path.join(rootDir, '.codegraph', 'graph.db');
const db = openDb(dbPath);
Expand Down Expand Up @@ -384,19 +454,46 @@ export async function buildGraph(rootDir, opts = {}) {
);
}

const collected = collectFiles(rootDir, [], config, new Set());
const files = collected.files;
const discoveredDirs = collected.directories;
info(`Found ${files.length} files to parse`);

// Check for incremental build
const { changed, removed, isFullBuild } = incremental
? getChangedFiles(db, files, rootDir)
: { changed: files.map((f) => ({ file: f })), removed: [], isFullBuild: true };

// Separate metadata-only updates (mtime/size self-heal) from real changes
const parseChanges = changed.filter((c) => !c.metadataOnly);
const metadataUpdates = changed.filter((c) => c.metadataOnly);
// ── Scoped rebuild: rebuild only specified files ──────────────────
let files, discoveredDirs, parseChanges, metadataUpdates, removed, isFullBuild;

if (opts.scope) {
const scopedFiles = opts.scope.map((f) => normalizePath(f));
const existing = [];
const missing = [];
for (const rel of scopedFiles) {
const abs = path.join(rootDir, rel);
if (fs.existsSync(abs)) {
existing.push({ file: abs, relPath: rel });
} else {
missing.push(rel);
}
}
files = existing.map((e) => e.file);
// Derive discoveredDirs from scoped files' parent directories
discoveredDirs = new Set(existing.map((e) => path.dirname(e.file)));
parseChanges = existing;
metadataUpdates = [];
removed = missing;
isFullBuild = false;
info(`Scoped rebuild: ${existing.length} files to rebuild, ${missing.length} to purge`);
} else {
const collected = collectFiles(rootDir, [], config, new Set());
files = collected.files;
discoveredDirs = collected.directories;
info(`Found ${files.length} files to parse`);

// Check for incremental build
const increResult = incremental
? getChangedFiles(db, files, rootDir)
: { changed: files.map((f) => ({ file: f })), removed: [], isFullBuild: true };
removed = increResult.removed;
isFullBuild = increResult.isFullBuild;

// Separate metadata-only updates (mtime/size self-heal) from real changes
parseChanges = increResult.changed.filter((c) => !c.metadataOnly);
metadataUpdates = increResult.changed.filter((c) => c.metadataOnly);
}

if (!isFullBuild && parseChanges.length === 0 && removed.length === 0) {
// Still update metadata for self-healing even when no real changes
Expand Down Expand Up @@ -446,29 +543,33 @@ export async function buildGraph(rootDir, opts = {}) {
// Find files with edges pointing TO changed/removed files.
// Their nodes stay intact (preserving IDs), but outgoing edges are
// deleted so they can be rebuilt during the edge-building pass.
const changedRelPaths = new Set();
for (const item of parseChanges) {
changedRelPaths.add(item.relPath || normalizePath(path.relative(rootDir, item.file)));
}
for (const relPath of removed) {
changedRelPaths.add(relPath);
}

// When opts.noReverseDeps is true (e.g. agent rollback to same version),
// skip this cascade — the agent knows exports didn't change.
const reverseDeps = new Set();
if (changedRelPaths.size > 0) {
const findReverseDeps = db.prepare(`
SELECT DISTINCT n_src.file FROM edges e
JOIN nodes n_src ON e.source_id = n_src.id
JOIN nodes n_tgt ON e.target_id = n_tgt.id
WHERE n_tgt.file = ? AND n_src.file != n_tgt.file AND n_src.kind != 'directory'
`);
for (const relPath of changedRelPaths) {
for (const row of findReverseDeps.all(relPath)) {
if (!changedRelPaths.has(row.file) && !reverseDeps.has(row.file)) {
// Verify the file still exists on disk
const absPath = path.join(rootDir, row.file);
if (fs.existsSync(absPath)) {
reverseDeps.add(row.file);
if (!opts.noReverseDeps) {
const changedRelPaths = new Set();
for (const item of parseChanges) {
changedRelPaths.add(item.relPath || normalizePath(path.relative(rootDir, item.file)));
}
for (const relPath of removed) {
changedRelPaths.add(relPath);
}

if (changedRelPaths.size > 0) {
const findReverseDeps = db.prepare(`
SELECT DISTINCT n_src.file FROM edges e
JOIN nodes n_src ON e.source_id = n_src.id
JOIN nodes n_tgt ON e.target_id = n_tgt.id
WHERE n_tgt.file = ? AND n_src.file != n_tgt.file AND n_src.kind != 'directory'
`);
for (const relPath of changedRelPaths) {
for (const row of findReverseDeps.all(relPath)) {
if (!changedRelPaths.has(row.file) && !reverseDeps.has(row.file)) {
// Verify the file still exists on disk
const absPath = path.join(rootDir, row.file);
if (fs.existsSync(absPath)) {
reverseDeps.add(row.file);
}
}
}
}
Expand All @@ -482,57 +583,16 @@ export async function buildGraph(rootDir, opts = {}) {
debug(`Changed files: ${parseChanges.map((c) => c.relPath).join(', ')}`);
if (removed.length > 0) debug(`Removed files: ${removed.join(', ')}`);
// Remove embeddings/metrics/edges/nodes for changed and removed files
// Embeddings must be deleted BEFORE nodes (we need node IDs to find them)
const deleteEmbeddingsForFile = hasEmbeddings
? db.prepare('DELETE FROM embeddings WHERE node_id IN (SELECT id FROM nodes WHERE file = ?)')
: null;
const deleteNodesForFile = db.prepare('DELETE FROM nodes WHERE file = ?');
const deleteEdgesForFile = db.prepare(`
DELETE FROM edges WHERE source_id IN (SELECT id FROM nodes WHERE file = @f)
OR target_id IN (SELECT id FROM nodes WHERE file = @f)
`);
const deleteOutgoingEdgesForFile = db.prepare(
'DELETE FROM edges WHERE source_id IN (SELECT id FROM nodes WHERE file = ?)',
);
const deleteMetricsForFile = db.prepare(
'DELETE FROM node_metrics WHERE node_id IN (SELECT id FROM nodes WHERE file = ?)',
const changePaths = parseChanges.map(
(item) => item.relPath || normalizePath(path.relative(rootDir, item.file)),
);
let deleteComplexityForFile;
try {
deleteComplexityForFile = db.prepare(
'DELETE FROM function_complexity WHERE node_id IN (SELECT id FROM nodes WHERE file = ?)',
);
} catch {
deleteComplexityForFile = null;
}
let deleteDataflowForFile;
try {
deleteDataflowForFile = db.prepare(
'DELETE FROM dataflow WHERE source_id IN (SELECT id FROM nodes WHERE file = ?) OR target_id IN (SELECT id FROM nodes WHERE file = ?)',
);
} catch {
deleteDataflowForFile = null;
}
for (const relPath of removed) {
deleteEmbeddingsForFile?.run(relPath);
deleteEdgesForFile.run({ f: relPath });
deleteMetricsForFile.run(relPath);
deleteComplexityForFile?.run(relPath);
deleteDataflowForFile?.run(relPath, relPath);
deleteNodesForFile.run(relPath);
}
for (const item of parseChanges) {
const relPath = item.relPath || normalizePath(path.relative(rootDir, item.file));
deleteEmbeddingsForFile?.run(relPath);
deleteEdgesForFile.run({ f: relPath });
deleteMetricsForFile.run(relPath);
deleteComplexityForFile?.run(relPath);
deleteDataflowForFile?.run(relPath, relPath);
deleteNodesForFile.run(relPath);
}
purgeFilesFromGraph(db, [...removed, ...changePaths], { purgeHashes: false });

// Process reverse deps: delete only outgoing edges (nodes/IDs preserved)
// then add them to the parse list so they participate in edge building
const deleteOutgoingEdgesForFile = db.prepare(
'DELETE FROM edges WHERE source_id IN (SELECT id FROM nodes WHERE file = ?)',
);
for (const relPath of reverseDeps) {
deleteOutgoingEdgesForFile.run(relPath);
}
Expand Down
21 changes: 21 additions & 0 deletions src/cli.js
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ import {
diffImpact,
explain,
fileDeps,
fileExports,
fnDeps,
fnImpact,
impactAnalysis,
Expand Down Expand Up @@ -217,6 +218,26 @@ program
});
});

program
.command('exports <file>')
.description('Show exported symbols with per-symbol consumers (who calls each export)')
.option('-d, --db <path>', 'Path to graph.db')
.option('-T, --no-tests', 'Exclude test/spec files from results')
.option('--include-tests', 'Include test/spec files (overrides excludeTests config)')
.option('-j, --json', 'Output as JSON')
.option('--limit <number>', 'Max results to return')
.option('--offset <number>', 'Skip N results (default: 0)')
.option('--ndjson', 'Newline-delimited JSON output')
.action((file, opts) => {
fileExports(file, opts.db, {
noTests: resolveNoTests(opts),
json: opts.json,
limit: opts.limit ? parseInt(opts.limit, 10) : undefined,
offset: opts.offset ? parseInt(opts.offset, 10) : undefined,
ndjson: opts.ndjson,
});
});

program
.command('fn-impact <name>')
.description('Function-level impact: what functions break if this one changes')
Expand Down
2 changes: 2 additions & 0 deletions src/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -111,9 +111,11 @@ export {
diffImpactData,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

purgeFilesFromGraph not exported but mentioned in PR description as "Exports purgeFilesFromGraph from the programmatic API"

Suggested change
diffImpactData,
export { buildGraph, collectFiles, loadPathAliases, purgeFilesFromGraph, resolveImportPath } from './builder.js';

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Fixed in 817b579. The PR description has been updated to remove all scoped-rebuild references — this PR only adds the exports command. The purgeFilesFromGraph export was already removed in 651ddb2.

diffImpactMermaid,
explainData,
exportsData,
FALSE_POSITIVE_CALLER_THRESHOLD,
FALSE_POSITIVE_NAMES,
fileDepsData,
fileExports,
fnDepsData,
fnImpactData,
impactAnalysisData,
Expand Down
22 changes: 22 additions & 0 deletions src/mcp.js
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,20 @@ const BASE_TOOLS = [
required: ['file'],
},
},
{
name: 'file_exports',
description:
'Show exported symbols of a file with per-symbol consumers — who calls each export and from where',
inputSchema: {
type: 'object',
properties: {
file: { type: 'string', description: 'File path (partial match supported)' },
no_tests: { type: 'boolean', description: 'Exclude test files', default: false },
...PAGINATION_PROPS,
},
required: ['file'],
},
},
{
name: 'impact_analysis',
description: 'Show files affected by changes to a given file (transitive)',
Expand Down Expand Up @@ -740,6 +754,7 @@ export async function startMCPServer(customDbPath, options = {}) {
fnImpactData,
pathData,
contextData,
exportsData,
explainData,
whereData,
diffImpactData,
Expand Down Expand Up @@ -825,6 +840,13 @@ export async function startMCPServer(customDbPath, options = {}) {
offset: args.offset ?? 0,
});
break;
case 'file_exports':
result = exportsData(args.file, dbPath, {
noTests: args.no_tests,
limit: Math.min(args.limit ?? MCP_DEFAULTS.file_exports, MCP_MAX_LIMIT),
offset: args.offset ?? 0,
});
break;
case 'impact_analysis':
result = impactAnalysisData(args.file, dbPath, {
noTests: args.no_tests,
Expand Down
1 change: 1 addition & 0 deletions src/paginate.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ export const MCP_DEFAULTS = {
context: 5,
explain: 10,
file_deps: 20,
file_exports: 20,
diff_impact: 30,
impact_analysis: 20,
semantic_search: 20,
Expand Down
Loading