Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions src/batch.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
*/

import { complexityData } from './complexity.js';
import { dataflowData } from './dataflow.js';
import { flowData } from './flow.js';
import {
contextData,
Expand Down Expand Up @@ -36,6 +37,7 @@ export const BATCH_COMMANDS = {
impact: { fn: impactAnalysisData, sig: 'file' },
deps: { fn: fileDepsData, sig: 'file' },
flow: { fn: flowData, sig: 'name' },
dataflow: { fn: dataflowData, sig: 'name' },
complexity: { fn: complexityData, sig: 'dbOnly' },
};

Expand Down
24 changes: 23 additions & 1 deletion src/builder.js
Original file line number Diff line number Diff line change
Expand Up @@ -435,7 +435,7 @@ export async function buildGraph(rootDir, opts = {}) {

if (isFullBuild) {
const deletions =
'PRAGMA foreign_keys = OFF; DELETE FROM node_metrics; DELETE FROM edges; DELETE FROM function_complexity; DELETE FROM nodes; PRAGMA foreign_keys = ON;';
'PRAGMA foreign_keys = OFF; DELETE FROM node_metrics; DELETE FROM edges; DELETE FROM function_complexity; DELETE FROM dataflow; DELETE FROM nodes; PRAGMA foreign_keys = ON;';
db.exec(
hasEmbeddings
? `${deletions.replace('PRAGMA foreign_keys = ON;', '')} DELETE FROM embeddings; PRAGMA foreign_keys = ON;`
Expand Down Expand Up @@ -505,11 +505,20 @@ export async function buildGraph(rootDir, opts = {}) {
} catch {
deleteComplexityForFile = null;
}
let deleteDataflowForFile;
try {
deleteDataflowForFile = db.prepare(
'DELETE FROM dataflow WHERE source_id IN (SELECT id FROM nodes WHERE file = ?) OR target_id IN (SELECT id FROM nodes WHERE file = ?)',
);
} catch {
deleteDataflowForFile = null;
}
for (const relPath of removed) {
deleteEmbeddingsForFile?.run(relPath);
deleteEdgesForFile.run({ f: relPath });
deleteMetricsForFile.run(relPath);
deleteComplexityForFile?.run(relPath);
deleteDataflowForFile?.run(relPath, relPath);
deleteNodesForFile.run(relPath);
}
for (const item of parseChanges) {
Expand All @@ -518,6 +527,7 @@ export async function buildGraph(rootDir, opts = {}) {
deleteEdgesForFile.run({ f: relPath });
deleteMetricsForFile.run(relPath);
deleteComplexityForFile?.run(relPath);
deleteDataflowForFile?.run(relPath, relPath);
deleteNodesForFile.run(relPath);
}

Expand Down Expand Up @@ -1078,6 +1088,18 @@ export async function buildGraph(rootDir, opts = {}) {
}
_t.complexityMs = performance.now() - _t.complexity0;

// Opt-in dataflow analysis (--dataflow)
if (opts.dataflow) {
_t.dataflow0 = performance.now();
try {
const { buildDataflowEdges } = await import('./dataflow.js');
await buildDataflowEdges(db, allSymbols, rootDir, engineOpts);
} catch (err) {
debug(`Dataflow analysis failed: ${err.message}`);
}
_t.dataflowMs = performance.now() - _t.dataflow0;
}

// Release any remaining cached WASM trees for GC
for (const [, symbols] of allSymbols) {
symbols._tree = null;
Expand Down
38 changes: 37 additions & 1 deletion src/cli.js
Original file line number Diff line number Diff line change
Expand Up @@ -97,10 +97,11 @@ program
.command('build [dir]')
.description('Parse repo and build graph in .codegraph/graph.db')
.option('--no-incremental', 'Force full rebuild (ignore file hashes)')
.option('--dataflow', 'Extract data flow edges (flows_to, returns, mutates)')
.action(async (dir, opts) => {
const root = path.resolve(dir || '.');
const engine = program.opts().engine;
await buildGraph(root, { incremental: opts.incremental, engine });
await buildGraph(root, { incremental: opts.incremental, engine, dataflow: opts.dataflow });
});

program
Expand Down Expand Up @@ -967,6 +968,41 @@ program
});
});

program
.command('dataflow <name>')
.description('Show data flow for a function: parameters, return consumers, mutations')
.option('-d, --db <path>', 'Path to graph.db')
.option('-f, --file <path>', 'Scope to file (partial match)')
.option('-k, --kind <kind>', 'Filter by symbol kind')
.option('-T, --no-tests', 'Exclude test/spec files from results')
.option('--include-tests', 'Include test/spec files (overrides excludeTests config)')
.option('-j, --json', 'Output as JSON')
.option('--ndjson', 'Newline-delimited JSON output')
.option('--limit <number>', 'Max results to return')
.option('--offset <number>', 'Skip N results (default: 0)')
.option('--path <target>', 'Find data flow path to <target>')
.option('--impact', 'Show data-dependent blast radius')
.option('--depth <n>', 'Max traversal depth', '5')
.action(async (name, opts) => {
if (opts.kind && !ALL_SYMBOL_KINDS.includes(opts.kind)) {
console.error(`Invalid kind "${opts.kind}". Valid: ${ALL_SYMBOL_KINDS.join(', ')}`);
process.exit(1);
}
const { dataflow } = await import('./dataflow.js');
dataflow(name, opts.db, {
file: opts.file,
kind: opts.kind,
noTests: resolveNoTests(opts),
json: opts.json,
ndjson: opts.ndjson,
limit: opts.limit ? parseInt(opts.limit, 10) : undefined,
offset: opts.offset ? parseInt(opts.offset, 10) : undefined,
path: opts.path,
impact: opts.impact,
depth: opts.depth,
});
});

program
.command('complexity [target]')
.description('Show per-function complexity metrics (cognitive, cyclomatic, nesting depth, MI)')
Expand Down
Loading