diff --git a/.github/git-mind.yml.example b/.github/git-mind.yml.example new file mode 100644 index 00000000..5d4c584e --- /dev/null +++ b/.github/git-mind.yml.example @@ -0,0 +1,8 @@ +# git-mind configuration for GitHub Actions +# Copy to .github/git-mind.yml and customize + +suggest: + # Agent command for generating suggestions. + # Must accept prompt on stdin and output JSON on stdout. + # Example using Claude: + agent: "claude -p --output-format json" diff --git a/.github/workflows/gitmind-review.yml b/.github/workflows/gitmind-review.yml new file mode 100644 index 00000000..28043c6a --- /dev/null +++ b/.github/workflows/gitmind-review.yml @@ -0,0 +1,100 @@ +name: git-mind Review Commands + +on: + issue_comment: + types: [created] + +permissions: + contents: read + pull-requests: write + issues: write + +jobs: + handle-command: + if: > + github.event.issue.pull_request && + contains(github.event.comment.body, '/gitmind') + runs-on: ubuntu-latest + steps: + # Checkout default branch (trusted code) — never the PR head. + # issue_comment is a privileged context; checking out attacker-controlled + # code would allow arbitrary execution with write permissions. + - name: Checkout + uses: actions/checkout@v4 + + - name: Check commenter permissions + id: authz + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + COMMENTER: ${{ github.event.comment.user.login }} + REPO: ${{ github.repository }} + run: | + PERMISSION=$(gh api "repos/${REPO}/collaborators/${COMMENTER}/permission" --jq '.permission') + if [[ "$PERMISSION" != "admin" && "$PERMISSION" != "write" && "$PERMISSION" != "maintain" ]]; then + echo "User ${COMMENTER} does not have write access. Skipping." + echo "authorized=false" >> "$GITHUB_OUTPUT" + else + echo "authorized=true" >> "$GITHUB_OUTPUT" + fi + + - name: Setup Node.js + if: steps.authz.outputs.authorized == 'true' + uses: actions/setup-node@v4 + with: + node-version: '22' + + - name: Install dependencies + if: steps.authz.outputs.authorized == 'true' + run: npm ci + + - name: Parse and execute command + if: steps.authz.outputs.authorized == 'true' + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + COMMENT_BODY: ${{ github.event.comment.body }} + PR_NUMBER: ${{ github.event.issue.number }} + REPO: ${{ github.repository }} + run: | + # Extract command from comment + COMMAND=$(printf '%s' "$COMMENT_BODY" | grep -oP '/gitmind\s+\K(accept-all|accept\s+\d+|reject\s+\d+)' || true) + + if [ -z "$COMMAND" ]; then + echo "No valid /gitmind command found" + exit 0 + fi + + ACTION=$(echo "$COMMAND" | awk '{print $1}') + INDEX=$(echo "$COMMAND" | awk '{print $2}') + + case "$ACTION" in + accept-all) + if node bin/git-mind.js review --batch accept --json; then + REPLY="All pending suggestions accepted." + else + REPLY="Failed to accept suggestions." + fi + ;; + accept) + if [ -z "$INDEX" ] || ! [[ "$INDEX" =~ ^[0-9]+$ ]]; then + REPLY="Invalid index. Usage: \`/gitmind accept \`" + elif node bin/git-mind.js review --batch accept --index "$INDEX" --json; then + REPLY="Suggestion $INDEX accepted." + else + REPLY="Failed to accept suggestion $INDEX." + fi + ;; + reject) + if [ -z "$INDEX" ] || ! [[ "$INDEX" =~ ^[0-9]+$ ]]; then + REPLY="Invalid index. Usage: \`/gitmind reject \`" + elif node bin/git-mind.js review --batch reject --index "$INDEX" --json; then + REPLY="Suggestion $INDEX rejected." + else + REPLY="Failed to reject suggestion $INDEX." + fi + ;; + esac + + if [ -n "$REPLY" ]; then + gh api "repos/${REPO}/issues/${PR_NUMBER}/comments" \ + -f body="**git-mind:** $REPLY" + fi diff --git a/CHANGELOG.md b/CHANGELOG.md index 2bca59a1..c7a1b909 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,75 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [2.0.0-alpha.4] - 2026-02-13 + +### Added + +- **`git mind at ` command** — Time-travel: materialize the graph at a historical point via epoch markers. Resolves git refs to Lamport ticks and filters the CRDT graph to that ceiling. Supports `--json` output with epoch metadata (#202) +- **Epoch API** — `getCurrentTick(graph)`, `recordEpoch(graph, sha, tick)`, `lookupEpoch(graph, sha)`, `lookupNearestEpoch(graph, cwd, sha)`, `getEpochForRef(graph, cwd, ref)` in `src/epoch.js` (#202) +- **Automatic epoch recording** — `processCommit` now records an epoch marker after processing each commit, correlating the commit SHA to the current Lamport tick (#202) + +### Fixed + +- **Shell injection in `src/epoch.js`** — Replaced `execSync` string interpolation with `execFileSync` array args in `lookupNearestEpoch` and `getEpochForRef`, preventing command injection via crafted ref names (#202) +- **Missing `contents` permission in `gitmind-review.yml`** — Workflow now includes `contents: read` so `actions/checkout` can fetch the repo; unspecified scopes default to `none` when `permissions` is explicit (#200) +- **`action.yml` ignores workflow-level `GITMIND_AGENT`** — Validation step no longer overrides inherited env var with empty `inputs.agent`; suggest step falls back to `env.GITMIND_AGENT` (#199) +- **`parseReviewCommand` accepts index 0** — Now returns `null` for index `< 1` since suggestions are 1-indexed (#200) +- **Backtick characters in PR suggestion table** — `formatSuggestionsAsMarkdown` strips backticks from source/target to prevent breaking inline code spans (#200) +- **`findMarkdownFiles` swallows all errors** — Now only catches `ENOENT`/`ENOTDIR`; permission errors and other failures propagate (#196) +- **`extractGraphData` strips extension from directory name** — `String.replace` with `extname` only replaced the first `.md` occurrence; now uses `slice` to target only the trailing extension (#196) +- **`qualifyNodeId` unhelpful error for non-prefixed IDs** — Now throws a descriptive error mentioning `prefix:identifier` format instead of falling through to `buildCrossRepoId`'s generic validation (#197) +- **`qualifyNodeId` accepts multi-colon local IDs** — `a:b:c` now throws a clear error instead of falling through to `buildCrossRepoId`'s generic validation (#197) +- **`formatSuggestionsAsMarkdown` crashes on missing `type`** — All suggestion fields (`source`, `target`, `type`, `confidence`, `rationale`) now have null-coalescing guards (#200) +- **Empty pending list produces confusing range** — `reviewCmd` now says "No pending suggestions to review" instead of "Index N out of range (1-0)" (#200) +- **`import`/`export` positional arg breaks with preceding flags** — Both `git mind import --dry-run file.yaml` and `git mind export --format json file.yaml` now scan for the first non-flag argument (#196, #195) +- **Frontmatter closing delimiter matches `---suffix`** — `parseFrontmatter` now requires `---` followed by newline or EOF (#196) +- **Doctor orphan detection uses hardcoded prefixes** — Now uses `SYSTEM_PREFIXES` from validators plus `decision` instead of hardcoded `startsWith` checks (#201) +- **Epoch SHA truncation too short** — Widened from 8 to 12 characters to reduce birthday-paradox collision risk (#202) +- **`serializeExport` silently falls back to YAML** — Now throws on unsupported format instead of silently defaulting (#195) +- **Empty `catch` in `processCommit`** — Epoch recording errors now logged when `GITMIND_DEBUG` is set (#202) +- **`gitmind-review.yml` `echo` for untrusted input** — Replaced with `printf '%s'` to prevent `-n`/`-e`/backslash misbehavior (#200) + +### Changed + +- **`epoch` added to `SYSTEM_PREFIXES`** — Epoch markers use the `epoch:` prefix, classified as system. Excluded from export and doctor orphan detection (#202) +- **Permission test skipped under root** — `findMarkdownFiles` chmod test now skips when running as root (CI containers) (#196) +- **Test count** — 312 tests across 19 files (was 286 across 18) + +## [2.0.0-alpha.3] - 2026-02-12 + +### Added + +- **`git mind export` command** — Serialize the graph to YAML or JSON in v1 import-compatible format, enabling round-trip workflows. Supports `--format yaml|json`, `--prefix ` filtering, file output or stdout, and `--json` for structured metadata (#195) +- **Export API** — `exportGraph(graph, opts)`, `serializeExport(data, format)`, `exportToFile(graph, path, opts)` in `src/export.js` (#195) +- **`git mind import --from-markdown` command** — Import nodes and edges from markdown file frontmatter. Auto-generates `doc:` IDs from file paths, recognizes all 8 edge types as frontmatter fields. Supports `--dry-run`, `--json`, glob patterns (#196) +- **Frontmatter API** — `parseFrontmatter(content)`, `extractGraphData(path, frontmatter)`, `findMarkdownFiles(basePath, pattern)`, `importFromMarkdown(graph, cwd, pattern, opts)` in `src/frontmatter.js` (#196) +- **`importData` shared pipeline** — Extracted from `importFile` in `src/import.js` for reuse by frontmatter import and future merge (#196) +- **Cross-repo edge protocol** — `repo:owner/name:prefix:identifier` syntax for referencing nodes in other repositories. `git mind link --remote ` qualifies local IDs. Validators accept cross-repo format, `extractPrefix` returns inner prefix (#197) +- **Remote API** — `parseCrossRepoId`, `buildCrossRepoId`, `isCrossRepoId`, `extractRepo`, `qualifyNodeId` in `src/remote.js` (#197) +- **`git mind merge` command** — Merge another repository's graph into the local graph with cross-repo qualification. Supports `--from `, `--repo-name `, `--dry-run`, `--json`. Auto-detects repo identifier from origin remote (#198) +- **Merge API** — `mergeFromRepo(localGraph, remotePath, opts)`, `detectRepoIdentifier(repoPath)` in `src/merge.js` (#198) +- **GitHub Action** — Composite action (`action.yml`) that runs `git mind suggest` on PRs and posts formatted suggestions as a comment. Configurable agent command via action input or `.github/git-mind.yml` (#199) +- **PR suggestion display** — `formatSuggestionsAsMarkdown` renders suggestions as a markdown table with `/gitmind accept|reject|accept-all` commands. `parseReviewCommand` parses slash commands from comment bodies (#200) +- **Slash command workflow** — `.github/workflows/gitmind-review.yml` handles `/gitmind accept N`, `/gitmind reject N`, and `/gitmind accept-all` commands in PR comments (#200) + +### Fixed + +- **Privileged workflow checkout** — `gitmind-review.yml` now checks out the default branch (trusted code) instead of the PR head ref, preventing untrusted code execution in `issue_comment` context. Permissions scoped to `pull-requests: write` and `issues: write` only (#200) +- **Shell injection in `post-comment.js`** — Comment body passed via stdin (`--input -`) instead of shell interpolation, preventing backtick command substitution. Repo and PR number validated before use (#199) +- **`BOOLEAN_FLAGS` missing `dry-run` and `validate`** — `parseFlags` now treats `--dry-run` and `--validate` as boolean flags instead of consuming the next argument as their value (#195, #198) +- **Pipe characters in markdown table** — `formatSuggestionsAsMarkdown` escapes `|` in rationale and type fields to prevent table row corruption (#200) +- **Frontmatter CRLF handling** — `parseFrontmatter` now finds the first newline dynamically instead of assuming `\n` at offset 4, supporting Windows line endings (#196) +- **`buildCrossRepoId` validation** — Throws on malformed `localId` missing `prefix:identifier` format instead of producing an invalid cross-repo ID (#197) +- **Orphaned JSDoc** — `formatExportResult` moved above `formatImportResult`'s JSDoc block to restore correct documentation association (#195) +- **Accept/reject workflow stubs** — Individual `/gitmind accept N` and `/gitmind reject N` now respond with "not yet supported" instead of silently appearing to succeed (#200) +- **`action.yml` stderr mixing** — Suggest step redirects stderr to `/dev/null` instead of mixing it into JSON output (#199) + +### Changed + +- **`repo` added to `SYSTEM_PREFIXES`** — Cross-repo IDs use the `repo:` prefix, now classified as system (#197) +- **Test count** — 286 tests across 18 files (was 208 across 13) + ## [2.0.0-alpha.2] - 2026-02-11 ### Added @@ -134,5 +203,6 @@ Complete rewrite from C23 to Node.js on `@git-stunts/git-warp`. - Docker-based CI/CD - All C-specific documentation +[2.0.0-alpha.3]: https://github.com/neuroglyph/git-mind/releases/tag/v2.0.0-alpha.3 [2.0.0-alpha.2]: https://github.com/neuroglyph/git-mind/releases/tag/v2.0.0-alpha.2 [2.0.0-alpha.0]: https://github.com/neuroglyph/git-mind/releases/tag/v2.0.0-alpha.0 diff --git a/GRAPH_SCHEMA.md b/GRAPH_SCHEMA.md index 3477c4c1..d0a1280e 100644 --- a/GRAPH_SCHEMA.md +++ b/GRAPH_SCHEMA.md @@ -65,9 +65,9 @@ The `/` does not require escaping outside JS regex literals. | Whitespace | Invalid anywhere in the ID — no trimming, no normalization | | Comparison | Exact byte/character match — no Unicode normalization | -### Reserved: Cross-Repo IDs (v2) +### Cross-Repo IDs -The following syntax is **reserved** for version 2. Version 1 parsers must not accept it. +Cross-repo IDs reference nodes in other repositories. ``` cross-repo-id = "repo:" owner "/" name ":" prefix ":" identifier @@ -75,6 +75,12 @@ cross-repo-id = "repo:" owner "/" name ":" prefix ":" identifier Example: `repo:neuroglyph/echo:crate:echo-core` +**Rules:** +- The `repo:` prefix is a system prefix — it cannot be used for regular nodes +- `extractPrefix` returns the **inner** prefix (e.g., `crate` for `repo:owner/name:crate:id`) +- Cross-repo IDs are valid in any context where a node ID is accepted +- Use `git mind link --remote ` to qualify local IDs as cross-repo + --- ## 3. Prefix Taxonomy diff --git a/GUIDE.md b/GUIDE.md index e8cfc3e1..a4bc2643 100644 --- a/GUIDE.md +++ b/GUIDE.md @@ -14,9 +14,10 @@ Everything you need to know — from zero to power user. 6. [Views](#views) 7. [Importing graphs from YAML](#importing-graphs-from-yaml) 8. [Commit directives](#commit-directives) -9. [Using git-mind as a library](#using-git-mind-as-a-library) -10. [Appendix A: How it works under the hood](#appendix-a-how-it-works-under-the-hood) -11. [Appendix B: Edge types reference](#appendix-b-edge-types-reference) +9. [Time-travel with `git mind at`](#time-travel-with-git-mind-at) +10. [Using git-mind as a library](#using-git-mind-as-a-library) +11. [Appendix A: How it works under the hood](#appendix-a-how-it-works-under-the-hood) +12. [Appendix B: Edge types reference](#appendix-b-edge-types-reference) --- @@ -32,7 +33,7 @@ git-mind captures those relationships explicitly, so you can query them, visuali **What makes it different?** -- **Git-native** — Your graph is versioned alongside your code. Check out an old commit, get the old graph. +- **Git-native** — Your graph is versioned alongside your code. Use `git mind at` to see the graph at any historical point. - **Conflict-free** — Built on CRDTs, so multiple people can add edges simultaneously without conflicts. - **Branch and merge** — Try experimental connections in a branch, merge what works. - **No setup** — No database to run. No config files. Just `git mind init`. @@ -298,17 +299,43 @@ git mind view roadmap # render the roadmap view git mind view architecture # render the architecture view ``` +### `git mind at ` + +Show the graph at a historical point in time. + +```bash +git mind at HEAD~50 +git mind at v1.0.0 +git mind at abc123 --json +``` + +Resolves the ref to a commit SHA, finds the epoch marker (or nearest ancestor), and materializes the graph at that Lamport tick. See [Time-travel with `git mind at`](#time-travel-with-git-mind-at) for details. + +**Flags:** + +| Flag | Description | +|------|-------------| +| `--json` | Output as JSON (includes epoch metadata) | + ### `git mind suggest` -*(Stub — not yet implemented)* +Generate AI-powered edge suggestions based on recent code changes. -Will use AI to suggest edges based on code analysis. +```bash +git mind suggest +git mind suggest --agent "my-agent-cmd" --context HEAD~5..HEAD --json +``` ### `git mind review` -*(Stub — not yet implemented)* +Review pending AI suggestions interactively or in batch. -Will present suggested edges for human review and approval. +```bash +git mind review # interactive mode +git mind review --batch accept # accept all pending +git mind review --batch reject # reject all pending +git mind review --json # list pending as JSON +``` ### `git mind help` @@ -468,6 +495,70 @@ await processCommit(graph, { --- +## Time-travel with `git mind at` + +git-mind records **epoch markers** as you commit. Each epoch correlates a git commit SHA to a Lamport tick in the CRDT graph, allowing you to materialize the graph at any historical point in time. + +### Setup + +Epoch markers are recorded automatically when you use `git mind process-commit` (either manually or via the post-commit hook). Install the hook to start recording: + +```bash +git mind install-hooks +``` + +### Usage + +```bash +# See the graph as it was at a specific commit +git mind at HEAD~50 + +# Use any git ref — branch names, tags, SHAs +git mind at v1.0.0 +git mind at abc123 + +# JSON output (includes epoch metadata) +git mind at HEAD~10 --json +``` + +### How it works + +When a commit is processed, git-mind: + +1. Records the current Lamport tick (the CRDT's logical clock) +2. Stores an `epoch:` node in the graph with the tick, full SHA, and timestamp +3. These epoch nodes travel with the graph on push/pull/merge — they're part of the CRDT + +When you run `git mind at `: + +1. The ref is resolved to a commit SHA +2. The epoch node for that SHA is looked up (or the nearest ancestor's epoch) +3. The graph is materialized with a ceiling at that tick — only patches with `lamport <= tick` are visible +4. You see the graph exactly as it was at that moment in time + +### Programmatic usage + +```javascript +import { loadGraph, getEpochForRef, computeStatus, getCurrentTick, recordEpoch } from '@neuroglyph/git-mind'; + +const graph = await loadGraph('.'); + +// Record an epoch for the current commit +const tick = await getCurrentTick(graph); +await recordEpoch(graph, commitSha, tick); + +// Time-travel to a ref +const result = await getEpochForRef(graph, '.', 'HEAD~10'); +if (result) { + graph._seekCeiling = result.epoch.tick; + await graph.materialize({ ceiling: result.epoch.tick }); + const status = await computeStatus(graph); + console.log(status); +} +``` + +--- + ## Using git-mind as a library git-mind exports its core modules for use in scripts and integrations. diff --git a/README.md b/README.md index f81420ba..8d93732f 100644 --- a/README.md +++ b/README.md @@ -4,24 +4,28 @@ **git-mind** turns any Git repository into a semantic knowledge graph. Link files, concepts, and ideas with typed, confidence-scored edges — all stored in Git itself. No servers. No databases. Just `git push`. -Because the graph lives *in* Git, it evolves with your code. Check out last month's commit and see what you understood then. Check out today's and see how your understanding has grown. Your knowledge has a history — git-mind makes it visible. +Because the graph lives *in* Git, it evolves with your code. Use `git mind at` to travel back in time and see what you understood then. Your knowledge has a history — git-mind makes it visible. ## Watch your understanding evolve ```bash -# What implemented the auth spec six months ago? -$ git checkout main~200 -$ git mind list --type implements --target docs/auth-spec.md - src/basic_auth.js --[implements]--> docs/auth-spec.md (100%) - -# What implements it now? -$ git checkout main -$ git mind list --type implements --target docs/auth-spec.md - src/oauth2.js --[implements]--> docs/auth-spec.md (100%) - src/jwt_handler.js --[implements]--> docs/auth-spec.md (100%) - src/basic_auth.js --[implements]--> docs/auth-spec.md (100%) - -# Your code changed. Your understanding changed with it. +# What did the graph look like 200 commits ago? +$ git mind at main~200 +Graph at main~200 +commit a1b2c3d4 tick 42 +═════════════════════════════════ +Nodes: 12 + file 5 + spec 4 + task 3 + +Edges: 8 + implements 4 + documents 3 + depends-on 1 + +# What does it look like now? +$ git mind status ``` Try an idea in a branch. If it works, merge it — graph and all. If it doesn't, delete the branch. Your knowledge graph supports the same workflow your code does. @@ -89,7 +93,7 @@ Each edge carries a **confidence score** (0.0 to 1.0). Human-created edges defau git-mind is a thin layer on [`@git-stunts/git-warp`](https://github.com/nicktomlin/git-warp) — a multi-writer CRDT graph database that lives in Git. This gives git-mind: -- **Time-travel** — check out any commit, see the graph as it was at that moment. Watch connections appear, change, and deepen over time. +- **Time-travel** — `git mind at HEAD~50` materializes the graph as it was at that commit. Watch connections appear, change, and deepen over time. - **Conflict-free merging** — multiple writers, deterministic convergence. No merge conflicts in your knowledge graph, ever. - **Branch and merge** — try experimental connections in a branch, merge what works, discard what doesn't. Same workflow as your code. - **Git-native storage** — invisible to normal workflows. No files in your working tree, no databases to run. It's just Git. diff --git a/action.yml b/action.yml new file mode 100644 index 00000000..208786e5 --- /dev/null +++ b/action.yml @@ -0,0 +1,65 @@ +name: 'git-mind Suggest' +description: 'Run git-mind suggest on a PR and post results as a comment' + +inputs: + agent: + description: 'Agent command for generating suggestions (overrides GITMIND_AGENT)' + required: false + github-token: + description: 'GitHub token for posting comments' + required: false + default: ${{ github.token }} + +runs: + using: 'composite' + steps: + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '22' + + - name: Install dependencies + shell: bash + run: npm ci + working-directory: ${{ github.action_path }} + + - name: Validate agent configuration + shell: bash + env: + INPUT_AGENT: ${{ inputs.agent }} + run: | + # Prefer explicit input; fall back to inherited GITMIND_AGENT env var + EFFECTIVE="${INPUT_AGENT:-${GITMIND_AGENT:-}}" + if [ -z "$EFFECTIVE" ]; then + echo "::error::No agent configured. Set the 'agent' input or the GITMIND_AGENT environment variable." + exit 1 + fi + + - name: Run git-mind suggest + id: suggest + shell: bash + env: + GITMIND_AGENT: ${{ inputs.agent || env.GITMIND_AGENT }} + run: | + BASE_SHA="${{ github.event.pull_request.base.sha }}" + HEAD_SHA="${{ github.event.pull_request.head.sha }}" + SUGGEST_ERR=$(mktemp) + RESULT=$(node ${{ github.action_path }}/bin/git-mind.js suggest --json --context "${BASE_SHA}..${HEAD_SHA}" 2>"$SUGGEST_ERR") || true + if [ -s "$SUGGEST_ERR" ]; then + echo "::warning::git-mind suggest stderr:" + cat "$SUGGEST_ERR" + fi + echo "result<> "$GITHUB_OUTPUT" + echo "$RESULT" >> "$GITHUB_OUTPUT" + echo "GITMIND_EOF" >> "$GITHUB_OUTPUT" + rm -f "$SUGGEST_ERR" + + - name: Post comment + shell: bash + env: + GITHUB_TOKEN: ${{ inputs.github-token }} + SUGGEST_RESULT: ${{ steps.suggest.outputs.result }} + run: | + node ${{ github.action_path }}/action/post-comment.js \ + "${{ github.repository }}" \ + "${{ github.event.pull_request.number }}" diff --git a/action/post-comment.js b/action/post-comment.js new file mode 100644 index 00000000..2e6da5bc --- /dev/null +++ b/action/post-comment.js @@ -0,0 +1,55 @@ +#!/usr/bin/env node + +/** + * Post git-mind suggestions as a GitHub PR comment. + * Called by the composite action after `git mind suggest --json`. + * + * Usage: node post-comment.js + * Reads SUGGEST_RESULT from environment variable. + */ + +import { execSync } from 'node:child_process'; +import { formatSuggestionsAsMarkdown } from '../src/format-pr.js'; + +const [repo, prNumber] = process.argv.slice(2); + +if (!repo || !prNumber) { + console.error('Usage: post-comment.js '); + process.exit(1); +} + +// Validate inputs before shell interpolation +if (!/^[A-Za-z0-9._-]+\/[A-Za-z0-9._-]+$/.test(repo)) { + console.error('Invalid repo format. Expected owner/name.'); + process.exit(1); +} +if (!/^\d+$/.test(prNumber)) { + console.error('Invalid PR number. Expected an integer.'); + process.exit(1); +} + +const raw = process.env.SUGGEST_RESULT ?? ''; + +// Parse the suggest result +let result; +try { + result = JSON.parse(raw); +} catch { + console.log('No valid JSON in suggest result — skipping comment.'); + process.exit(0); +} + +const body = `## git-mind Suggestions\n\n${formatSuggestionsAsMarkdown(result.suggestions)}`; + +// Post comment via gh CLI — pass body as JSON via stdin to avoid shell injection +try { + const payload = JSON.stringify({ body }); + execSync( + `gh api repos/${repo}/issues/${prNumber}/comments --input -`, + { input: payload, stdio: ['pipe', 'inherit', 'inherit'] }, + ); + console.log('Comment posted successfully.'); +} catch (err) { + console.error(`Failed to post comment: ${err.message}`); + process.exit(1); +} diff --git a/bin/git-mind.js b/bin/git-mind.js index dc5c7f90..052e8f27 100755 --- a/bin/git-mind.js +++ b/bin/git-mind.js @@ -5,7 +5,7 @@ * Usage: git mind [options] */ -import { init, link, view, list, remove, nodes, status, importCmd, installHooks, processCommitCmd, doctor, suggest, review } from '../src/cli/commands.js'; +import { init, link, view, list, remove, nodes, status, at, importCmd, importMarkdownCmd, exportCmd, mergeCmd, installHooks, processCommitCmd, doctor, suggest, review } from '../src/cli/commands.js'; const args = process.argv.slice(2); const command = args[0]; @@ -19,6 +19,7 @@ Commands: link Create a semantic edge --type Edge type (default: relates-to) --confidence Confidence 0.0-1.0 (default: 1.0) + --remote Qualify IDs as cross-repo remove Remove a semantic edge --type Edge type (default: relates-to) view [name] Show a named view (or list views) @@ -32,9 +33,21 @@ Commands: --json Output as JSON status Show graph health dashboard --json Output as JSON + at Show graph at a historical point in time + --json Output as JSON import Import a YAML graph file --dry-run, --validate Validate without writing --json Output as JSON + --from-markdown Import from markdown frontmatter + export [file] Export graph to YAML/JSON + --format yaml|json Output format (default: yaml) + --prefix Filter by node prefix + --json Output as JSON metadata + merge Merge another repo's graph + --from Path to remote repo + --repo-name Override detected repo identifier + --dry-run Preview without writing + --json Output as JSON install-hooks Install post-commit Git hook doctor Run graph integrity checks --fix Auto-fix dangling edges @@ -51,7 +64,7 @@ Edge types: implements, augments, relates-to, blocks, belongs-to, consumed-by, depends-on, documents`); } -const BOOLEAN_FLAGS = new Set(['json', 'fix']); +const BOOLEAN_FLAGS = new Set(['json', 'fix', 'dry-run', 'validate']); /** * Parse --flag value pairs from args. @@ -92,6 +105,7 @@ switch (command) { await link(cwd, source, target, { type: flags.type, confidence: flags.confidence ? parseFloat(flags.confidence) : undefined, + remote: flags.remote, }); break; } @@ -139,16 +153,56 @@ switch (command) { await status(cwd, { json: args.includes('--json') }); break; + case 'at': { + const atRef = args[1]; + if (!atRef || atRef.startsWith('--')) { + console.error('Usage: git mind at '); + process.exitCode = 1; + break; + } + await at(cwd, atRef, { json: args.includes('--json') }); + break; + } + case 'import': { - const importPath = args[1]; + const importFlags = parseFlags(args.slice(1)); + const dryRun = importFlags['dry-run'] === true || importFlags['validate'] === true; + const jsonMode = importFlags.json === true; + + if (importFlags['from-markdown']) { + await importMarkdownCmd(cwd, importFlags['from-markdown'], { dryRun, json: jsonMode }); + break; + } + + const importPath = args.slice(1).find(a => !a.startsWith('--')); if (!importPath) { - console.error('Usage: git mind import [--dry-run] [--json]'); + console.error('Usage: git mind import [--dry-run] [--json] [--from-markdown ]'); process.exitCode = 1; break; } - await importCmd(cwd, importPath, { - dryRun: args.includes('--dry-run') || args.includes('--validate'), - json: args.includes('--json'), + await importCmd(cwd, importPath, { dryRun, json: jsonMode }); + break; + } + + case 'export': { + const exportFlags = parseFlags(args.slice(1)); + const exportFile = args.slice(1).find(a => !a.startsWith('--')); + await exportCmd(cwd, { + file: exportFile, + format: exportFlags.format, + prefix: exportFlags.prefix, + json: exportFlags.json ?? false, + }); + break; + } + + case 'merge': { + const mergeFlags = parseFlags(args.slice(1)); + await mergeCmd(cwd, { + from: mergeFlags.from, + repoName: mergeFlags['repo-name'], + dryRun: mergeFlags['dry-run'] === true, + json: mergeFlags.json === true, }); break; } @@ -189,6 +243,7 @@ switch (command) { const reviewFlags = parseFlags(args.slice(1)); await review(cwd, { batch: reviewFlags.batch, + index: reviewFlags.index ? parseInt(reviewFlags.index, 10) : undefined, json: reviewFlags.json ?? false, }); break; diff --git a/package.json b/package.json index 15be155a..79dc4557 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@neuroglyph/git-mind", - "version": "2.0.0-alpha.1", + "version": "2.0.0-alpha.3", "description": "A project knowledge graph tool built on git-warp", "type": "module", "license": "Apache-2.0", diff --git a/src/cli/commands.js b/src/cli/commands.js index 31b7991e..fd244dbe 100644 --- a/src/cli/commands.js +++ b/src/cli/commands.js @@ -11,12 +11,17 @@ import { createEdge, queryEdges, removeEdge, EDGE_TYPES } from '../edges.js'; import { getNodes, hasNode, getNode, getNodesByPrefix } from '../nodes.js'; import { computeStatus } from '../status.js'; import { importFile } from '../import.js'; +import { importFromMarkdown } from '../frontmatter.js'; +import { exportGraph, serializeExport, exportToFile } from '../export.js'; +import { qualifyNodeId } from '../remote.js'; +import { mergeFromRepo } from '../merge.js'; import { renderView, listViews } from '../views.js'; import { processCommit } from '../hooks.js'; +import { getEpochForRef } from '../epoch.js'; import { runDoctor, fixIssues } from '../doctor.js'; import { generateSuggestions } from '../suggest.js'; import { getPendingSuggestions, acceptSuggestion, rejectSuggestion, skipSuggestion, batchDecision } from '../review.js'; -import { success, error, info, warning, formatEdge, formatView, formatNode, formatNodeList, formatStatus, formatImportResult, formatDoctorResult, formatSuggestions, formatReviewItem, formatDecisionSummary } from './format.js'; +import { success, error, info, warning, formatEdge, formatView, formatNode, formatNodeList, formatStatus, formatExportResult, formatImportResult, formatDoctorResult, formatSuggestions, formatReviewItem, formatDecisionSummary, formatAtStatus } from './format.js'; /** * Initialize a git-mind graph in the current repo. @@ -37,15 +42,19 @@ export async function init(cwd) { * @param {string} cwd * @param {string} source * @param {string} target - * @param {{ type?: string, confidence?: number }} opts + * @param {{ type?: string, confidence?: number, remote?: string }} opts */ export async function link(cwd, source, target, opts = {}) { const type = opts.type ?? 'relates-to'; + // Qualify node IDs with remote repo if --remote is specified + const src = opts.remote ? qualifyNodeId(source, opts.remote) : source; + const tgt = opts.remote ? qualifyNodeId(target, opts.remote) : target; + try { const graph = await loadGraph(cwd); - await createEdge(graph, { source, target, type, confidence: opts.confidence }); - console.log(success(`${source} --[${type}]--> ${target}`)); + await createEdge(graph, { source: src, target: tgt, type, confidence: opts.confidence }); + console.log(success(`${src} --[${type}]--> ${tgt}`)); } catch (err) { console.error(error(err.message)); process.exitCode = 1; @@ -253,6 +262,55 @@ export async function status(cwd, opts = {}) { } } +/** + * Show the graph at a historical point in time via epoch markers. + * @param {string} cwd + * @param {string} ref - Git ref (HEAD, HEAD~5, branch name, SHA, etc.) + * @param {{ json?: boolean }} opts + */ +export async function at(cwd, ref, opts = {}) { + if (!ref) { + console.error(error('Usage: git mind at ')); + process.exitCode = 1; + return; + } + + try { + const graph = await loadGraph(cwd); + const result = await getEpochForRef(graph, cwd, ref); + + if (!result) { + console.error(error(`No epoch marker found for "${ref}" or any of its ancestors`)); + process.exitCode = 1; + return; + } + + const { sha, epoch } = result; + + // Materialize the graph at the epoch's Lamport tick + await graph.materialize({ ceiling: epoch.tick }); + + const statusResult = await computeStatus(graph); + + if (opts.json) { + console.log(JSON.stringify({ + ref, + sha: sha.slice(0, 8), + fullSha: sha, + tick: epoch.tick, + nearest: epoch.nearest ?? false, + recordedAt: epoch.recordedAt, + status: statusResult, + }, null, 2)); + } else { + console.log(formatAtStatus(ref, sha, epoch, statusResult)); + } + } catch (err) { + console.error(error(err.message)); + process.exitCode = 1; + } +} + /** * Import a YAML file into the graph. * @param {string} cwd @@ -279,6 +337,101 @@ export async function importCmd(cwd, filePath, opts = {}) { } } +/** + * Import nodes and edges from markdown frontmatter. + * @param {string} cwd + * @param {string} pattern - Glob pattern for markdown files + * @param {{ dryRun?: boolean, json?: boolean }} opts + */ +export async function importMarkdownCmd(cwd, pattern, opts = {}) { + try { + const graph = await loadGraph(cwd); + const result = await importFromMarkdown(graph, cwd, pattern, { dryRun: opts.dryRun }); + + if (opts.json) { + console.log(JSON.stringify(result, null, 2)); + } else { + console.log(formatImportResult(result)); + } + + if (!result.valid) { + process.exitCode = 1; + } + } catch (err) { + console.error(error(err.message)); + process.exitCode = 1; + } +} + +/** + * Export the graph to a file or stdout. + * @param {string} cwd + * @param {{ file?: string, format?: string, prefix?: string, json?: boolean }} opts + */ +export async function exportCmd(cwd, opts = {}) { + try { + const graph = await loadGraph(cwd); + const format = opts.format ?? 'yaml'; + + if (opts.file) { + const result = await exportToFile(graph, opts.file, { format, prefix: opts.prefix }); + + if (opts.json) { + console.log(JSON.stringify(result, null, 2)); + } else { + console.log(formatExportResult(result)); + } + } else { + // stdout mode + const data = await exportGraph(graph, { prefix: opts.prefix }); + + if (opts.json) { + console.log(JSON.stringify(data, null, 2)); + } else { + const output = serializeExport(data, format); + process.stdout.write(output); + } + } + } catch (err) { + console.error(error(err.message)); + process.exitCode = 1; + } +} + +/** + * Merge a remote repository's graph into the local graph. + * @param {string} cwd + * @param {{ from: string, repoName?: string, dryRun?: boolean, json?: boolean }} opts + */ +export async function mergeCmd(cwd, opts = {}) { + if (!opts.from) { + console.error(error('Usage: git mind merge --from [--repo-name ]')); + process.exitCode = 1; + return; + } + + try { + const graph = await loadGraph(cwd); + const result = await mergeFromRepo(graph, opts.from, { + repoName: opts.repoName, + dryRun: opts.dryRun, + }); + + if (opts.json) { + console.log(JSON.stringify(result, null, 2)); + } else { + if (result.dryRun) { + console.log(info(`Dry run: would merge ${result.nodes} node(s), ${result.edges} edge(s) from ${result.repoName}`)); + } else { + console.log(success(`Merged ${result.nodes} node(s), ${result.edges} edge(s) from ${result.repoName}`)); + } + } + } catch (err) { + console.error(error(err.message)); + process.exitCode = 1; + } +} + /** * Run graph integrity checks. * @param {string} cwd @@ -349,6 +502,35 @@ export async function review(cwd, opts = {}) { process.exitCode = 1; return; } + + // Individual item by index + if (opts.index !== undefined) { + const pending = await getPendingSuggestions(graph); + if (pending.length === 0) { + console.error(error('No pending suggestions to review.')); + process.exitCode = 1; + return; + } + const idx = opts.index - 1; // 1-indexed to 0-indexed + if (idx < 0 || idx >= pending.length) { + console.error(error(`Index ${opts.index} out of range (1-${pending.length})`)); + process.exitCode = 1; + return; + } + const suggestion = pending[idx]; + const decision = opts.batch === 'accept' + ? await acceptSuggestion(graph, suggestion) + : await rejectSuggestion(graph, suggestion); + const result = { processed: 1, decisions: [decision] }; + + if (opts.json) { + console.log(JSON.stringify(result, null, 2)); + } else { + console.log(formatDecisionSummary(result)); + } + return; + } + const result = await batchDecision(graph, opts.batch); if (opts.json) { diff --git a/src/cli/format.js b/src/cli/format.js index db89f550..a3e3fe80 100644 --- a/src/cli/format.js +++ b/src/cli/format.js @@ -6,6 +6,23 @@ import chalk from 'chalk'; import figures from 'figures'; +/** + * Render a sorted key→count table (shared by formatStatus and formatAtStatus). + * @param {Record} entries - Key→count map + * @param {string[]} lines - Output array to push into + * @param {{ pct?: number }} [opts] - If pct is provided, show percentage based on total + */ +function renderCountTable(entries, lines, opts = {}) { + const sorted = Object.entries(entries).sort(([, a], [, b]) => b - a); + for (const [key, count] of sorted) { + let suffix = ''; + if (opts.pct !== undefined && opts.pct > 0) { + suffix = ` ${chalk.dim(`(${Math.round((count / opts.pct) * 100)}%)`)}`; + } + lines.push(` ${chalk.yellow(key.padEnd(14))} ${String(count).padStart(3)}${suffix}`); + } +} + /** * Format a success message. * @param {string} msg @@ -136,23 +153,12 @@ export function formatStatus(status) { // Nodes section lines.push(`${chalk.bold('Nodes:')} ${status.nodes.total}`); - const prefixes = Object.entries(status.nodes.byPrefix) - .sort(([, a], [, b]) => b - a); - for (const [prefix, count] of prefixes) { - const pct = status.nodes.total > 0 - ? Math.round((count / status.nodes.total) * 100) - : 0; - lines.push(` ${chalk.yellow(prefix.padEnd(14))} ${String(count).padStart(3)} ${chalk.dim(`(${pct}%)`)}`); - } + renderCountTable(status.nodes.byPrefix, lines, { pct: status.nodes.total }); lines.push(''); // Edges section lines.push(`${chalk.bold('Edges:')} ${status.edges.total}`); - const types = Object.entries(status.edges.byType) - .sort(([, a], [, b]) => b - a); - for (const [type, count] of types) { - lines.push(` ${chalk.yellow(type.padEnd(14))} ${String(count).padStart(3)}`); - } + renderCountTable(status.edges.byType, lines); lines.push(''); // Health section @@ -309,6 +315,50 @@ export function formatDecisionSummary(result) { return lines.join('\n'); } +/** + * Format an export result for terminal display. + * @param {{stats: {nodes: number, edges: number}, path?: string}} result + * @param {boolean} [toStdout=false] + * @returns {string} + */ +export function formatExportResult(result, toStdout = false) { + if (toStdout) { + return `${chalk.green(figures.tick)} Exported ${result.stats.nodes} node(s), ${result.stats.edges} edge(s)`; + } + return `${chalk.green(figures.tick)} Exported ${result.stats.nodes} node(s), ${result.stats.edges} edge(s) to ${chalk.cyan(result.path)}`; +} + +/** + * Format an `at` (time-travel) status for terminal display. + * @param {string} ref - The git ref that was resolved + * @param {string} sha - Resolved commit SHA + * @param {import('../epoch.js').EpochInfo} epoch - Epoch marker info + * @param {import('../status.js').GraphStatus} status - Computed status at that tick + * @returns {string} + */ +export function formatAtStatus(ref, sha, epoch, status) { + const lines = []; + + lines.push(chalk.bold(`Graph at ${ref}`)); + const shaStr = `commit ${chalk.cyan(sha.slice(0, 8))}`; + const tickStr = `tick ${chalk.yellow(String(epoch.tick))}`; + const nearestStr = epoch.nearest ? chalk.dim(' (nearest epoch)') : ''; + lines.push(`${shaStr} ${tickStr}${nearestStr}`); + lines.push(chalk.dim('═'.repeat(32))); + lines.push(''); + + // Nodes section + lines.push(`${chalk.bold('Nodes:')} ${status.nodes.total}`); + renderCountTable(status.nodes.byPrefix, lines); + lines.push(''); + + // Edges section + lines.push(`${chalk.bold('Edges:')} ${status.edges.total}`); + renderCountTable(status.edges.byType, lines); + + return lines.join('\n'); +} + /** * Format an import result for terminal display. * @param {import('../import.js').ImportResult} result diff --git a/src/doctor.js b/src/doctor.js index d1f20c14..d983a79e 100644 --- a/src/doctor.js +++ b/src/doctor.js @@ -4,9 +4,12 @@ * Composable checks that identify structural issues in the knowledge graph. */ -import { isLowConfidence } from './validators.js'; +import { isLowConfidence, SYSTEM_PREFIXES, extractPrefix } from './validators.js'; import { removeEdge } from './edges.js'; +/** Prefixes excluded from orphan-node detection (system-generated + review decisions). */ +const EXCLUDED_ORPHAN_PREFIXES = new Set([...SYSTEM_PREFIXES, 'decision']); + /** * @typedef {object} DoctorIssue * @property {string} type - Issue type identifier @@ -97,7 +100,7 @@ export function detectOrphanNodes(nodes, edges) { } return nodes - .filter(n => !connected.has(n) && !n.startsWith('decision:')) + .filter(n => !connected.has(n) && !EXCLUDED_ORPHAN_PREFIXES.has(extractPrefix(n))) .map(n => ({ type: 'orphan-node', severity: 'info', diff --git a/src/epoch.js b/src/epoch.js new file mode 100644 index 00000000..a7fc3bef --- /dev/null +++ b/src/epoch.js @@ -0,0 +1,142 @@ +/** + * @module epoch + * Epoch markers for time-travel — correlate git commits to CRDT Lamport ticks. + * + * Epoch nodes (`epoch:`) are stored in the CRDT graph itself, so they + * travel with push/pull/merge like any other graph data. + */ + +import { execFileSync } from 'node:child_process'; + +/** + * @typedef {object} EpochInfo + * @property {number} tick - Lamport tick at the time of recording + * @property {string} fullSha - Full commit SHA + * @property {string} recordedAt - ISO timestamp + * @property {boolean} [nearest] - True if this epoch was found via ancestor walk + */ + +/** + * Get the current maximum Lamport tick from the graph. + * + * @param {import('@git-stunts/git-warp').default} graph + * @returns {Promise} + */ +export async function getCurrentTick(graph) { + const { maxTick } = await graph.discoverTicks(); + return maxTick; +} + +/** + * Record an epoch marker correlating a git commit SHA to a Lamport tick. + * + * @param {import('@git-stunts/git-warp').default} graph + * @param {string} commitSha - Full commit SHA (at least 12 characters) + * @param {number} tick - Lamport tick to record + * @returns {Promise} + */ +export async function recordEpoch(graph, commitSha, tick) { + const shaPrefix = commitSha.slice(0, 12); + const nodeId = `epoch:${shaPrefix}`; + + const patch = await graph.createPatch(); + + if (!(await graph.hasNode(nodeId))) { + patch.addNode(nodeId); + } + + patch.setProperty(nodeId, 'tick', tick); + patch.setProperty(nodeId, 'fullSha', commitSha); + patch.setProperty(nodeId, 'recordedAt', new Date().toISOString()); + + await patch.commit(); +} + +/** + * Look up an epoch marker by commit SHA. + * + * @param {import('@git-stunts/git-warp').default} graph + * @param {string} commitSha - Full commit SHA (at least 12 characters) + * @returns {Promise} + */ +export async function lookupEpoch(graph, commitSha) { + const shaPrefix = commitSha.slice(0, 12); + const nodeId = `epoch:${shaPrefix}`; + + const exists = await graph.hasNode(nodeId); + if (!exists) return null; + + const propsMap = await graph.getNodeProps(nodeId); + if (!propsMap) return null; + + return { + tick: propsMap.get('tick'), + fullSha: propsMap.get('fullSha'), + recordedAt: propsMap.get('recordedAt'), + }; +} + +/** + * Walk up to `maxWalk` ancestor commits looking for an epoch marker. + * Returns the nearest ancestor's epoch, or null if none found. + * + * @param {import('@git-stunts/git-warp').default} graph + * @param {string} cwd - Repository working directory + * @param {string} targetSha - Starting commit SHA + * @param {number} [maxWalk=100] - Maximum ancestors to check + * @returns {Promise} + */ +export async function lookupNearestEpoch(graph, cwd, targetSha, maxWalk = 100) { + let shas; + try { + const output = execFileSync( + 'git', ['rev-list', `--max-count=${maxWalk}`, targetSha], + { cwd, encoding: 'utf-8' }, + ).trim(); + shas = output ? output.split('\n') : []; + } catch { + return null; + } + + // Skip first entry (targetSha itself) — caller should have already checked direct lookup + for (let i = 1; i < shas.length; i++) { + const epoch = await lookupEpoch(graph, shas[i]); + if (epoch) { + return { ...epoch, nearest: true }; + } + } + + return null; +} + +/** + * Resolve a git ref to a commit SHA, then find its epoch marker. + * Falls back to nearest ancestor epoch if no exact match. + * + * @param {import('@git-stunts/git-warp').default} graph + * @param {string} cwd - Repository working directory + * @param {string} ref - Git ref (branch, tag, SHA, HEAD~N, etc.) + * @returns {Promise<{sha: string, epoch: EpochInfo}|null>} + */ +export async function getEpochForRef(graph, cwd, ref) { + let sha; + try { + sha = execFileSync('git', ['rev-parse', ref], { cwd, encoding: 'utf-8' }).trim(); + } catch { + return null; + } + + // Direct lookup + const direct = await lookupEpoch(graph, sha); + if (direct) { + return { sha, epoch: direct }; + } + + // Ancestor fallback + const nearest = await lookupNearestEpoch(graph, cwd, sha); + if (nearest) { + return { sha, epoch: nearest }; + } + + return null; +} diff --git a/src/export.js b/src/export.js new file mode 100644 index 00000000..ce55ec70 --- /dev/null +++ b/src/export.js @@ -0,0 +1,136 @@ +/** + * @module export + * Graph export to v1 import-compatible format (YAML/JSON). + * Enables round-trip: export → edit → import. + */ + +import { writeFile } from 'node:fs/promises'; +import yaml from 'js-yaml'; +import { extractPrefix } from './validators.js'; + +/** Edge property keys excluded from export (timestamps are system-managed). */ +const EXCLUDED_EDGE_PROPS = new Set(['createdAt', 'importedAt', 'reviewedAt']); + +/** Node prefixes excluded from export by default (system-generated). */ +const EXCLUDED_PREFIXES = new Set(['decision', 'commit', 'epoch']); + +/** + * @typedef {object} ExportOptions + * @property {string} [prefix] - Only include nodes matching this prefix + * @property {string} [format='yaml'] - Output format: 'yaml' or 'json' + */ + +/** + * @typedef {object} ExportData + * @property {number} version - Schema version (always 1) + * @property {Array<{id: string, properties?: Record}>} nodes + * @property {Array<{source: string, target: string, type: string, confidence?: number, rationale?: string}>} edges + */ + +/** + * Export the graph as a v1 import-compatible data structure. + * + * @param {import('@git-stunts/git-warp').default} graph + * @param {ExportOptions} [opts] + * @returns {Promise} + */ +export async function exportGraph(graph, opts = {}) { + const prefixFilter = opts.prefix ?? null; + + // Collect all nodes + const allNodeIds = await graph.getNodes(); + + // Filter nodes + const filteredNodeIds = allNodeIds.filter(id => { + const prefix = extractPrefix(id); + // Exclude system prefixes + if (prefix && EXCLUDED_PREFIXES.has(prefix)) return false; + // Apply prefix filter if specified + if (prefixFilter && prefix !== prefixFilter) return false; + return true; + }); + + const nodeIdSet = new Set(filteredNodeIds); + + // Build node entries with properties + const nodes = []; + for (const id of filteredNodeIds) { + const propsMap = await graph.getNodeProps(id); + const entry = { id }; + + if (propsMap && propsMap.size > 0) { + const properties = {}; + for (const [key, value] of propsMap) { + properties[key] = value; + } + entry.properties = properties; + } + + nodes.push(entry); + } + + // Collect and filter edges + const allEdges = await graph.getEdges(); + const edges = []; + + for (const edge of allEdges) { + // Only include edges where both endpoints are in the filtered set + if (!nodeIdSet.has(edge.from) || !nodeIdSet.has(edge.to)) continue; + + const entry = { + source: edge.from, + target: edge.to, + type: edge.label, + }; + + // Include non-excluded edge properties + if (edge.props) { + for (const [key, value] of Object.entries(edge.props)) { + if (EXCLUDED_EDGE_PROPS.has(key)) continue; + if (value !== undefined && value !== null) { + entry[key] = value; + } + } + } + + edges.push(entry); + } + + return { version: 1, nodes, edges }; +} + +/** + * Serialize export data to a string. + * + * @param {ExportData} data + * @param {'yaml'|'json'} [format='yaml'] + * @returns {string} + */ +export function serializeExport(data, format = 'yaml') { + if (format === 'json') { + return JSON.stringify(data, null, 2); + } + if (format !== 'yaml') { + throw new Error(`Unsupported export format: "${format}". Expected "yaml" or "json".`); + } + return yaml.dump(data, { lineWidth: -1, noRefs: true, sortKeys: false }); +} + +/** + * Export the graph to a file. + * + * @param {import('@git-stunts/git-warp').default} graph + * @param {string} filePath - Output file path + * @param {ExportOptions} [opts] + * @returns {Promise<{stats: {nodes: number, edges: number}, path: string}>} + */ +export async function exportToFile(graph, filePath, opts = {}) { + const data = await exportGraph(graph, opts); + const format = opts.format ?? (filePath.endsWith('.json') ? 'json' : 'yaml'); + const content = serializeExport(data, format); + await writeFile(filePath, content, 'utf-8'); + return { + stats: { nodes: data.nodes.length, edges: data.edges.length }, + path: filePath, + }; +} diff --git a/src/format-pr.js b/src/format-pr.js new file mode 100644 index 00000000..6bd9a91b --- /dev/null +++ b/src/format-pr.js @@ -0,0 +1,77 @@ +/** + * @module format-pr + * Markdown formatting for PR suggestion display and slash command parsing. + */ + +/** Escape backslashes and pipe characters for markdown table cells. */ +function escapeCell(str) { + return str + .replace(/\\/g, '\\\\') + .replace(/\|/g, '\\|'); +} + +/** + * Format suggestions as a markdown table for PR comments. + * + * @param {Array<{source: string, target: string, type: string, confidence: number, rationale?: string}>} suggestions + * @returns {string} + */ +export function formatSuggestionsAsMarkdown(suggestions) { + if (!suggestions || suggestions.length === 0) { + return '> No new edge suggestions for this PR.'; + } + + const lines = [ + '| # | Source | Target | Type | Confidence | Rationale |', + '|---|--------|--------|------|------------|-----------|', + ]; + + for (let i = 0; i < suggestions.length; i++) { + const s = suggestions[i]; + const conf = `${(((s.confidence ?? 0)) * 100).toFixed(0)}%`; + const rationale = escapeCell(s.rationale ?? ''); + const source = escapeCell((s.source ?? '').replace(/`/g, '')); + const target = escapeCell((s.target ?? '').replace(/`/g, '')); + lines.push(`| ${i + 1} | \`${source}\` | \`${target}\` | ${escapeCell(s.type ?? '')} | ${conf} | ${rationale} |`); + } + + lines.push(''); + lines.push('
Commands'); + lines.push(''); + for (let i = 0; i < suggestions.length; i++) { + lines.push(`- \`/gitmind accept ${i + 1}\` — Accept suggestion ${i + 1}`); + lines.push(`- \`/gitmind reject ${i + 1}\` — Reject suggestion ${i + 1}`); + } + lines.push(`- \`/gitmind accept-all\` — Accept all suggestions`); + lines.push(''); + lines.push('
'); + lines.push(''); + lines.push('---'); + lines.push('*Posted by [git-mind](https://github.com/neuroglyph/git-mind)*'); + + return lines.join('\n'); +} + +/** + * Parse a /gitmind slash command from a comment body. + * + * @param {string} body - Comment body text + * @returns {{ command: string, index?: number } | null} + */ +export function parseReviewCommand(body) { + if (typeof body !== 'string') return null; + + const match = body.match(/\/gitmind\s+(accept-all|accept|reject)(?:\s+(\d+))?/); + if (!match) return null; + + const command = match[1]; + + if (command === 'accept-all') { + return { command: 'accept-all' }; + } + + const index = match[2] ? parseInt(match[2], 10) : undefined; + if (index === undefined || index < 1) return null; // accept/reject require a positive 1-indexed value + + return { command, index }; +} diff --git a/src/frontmatter.js b/src/frontmatter.js new file mode 100644 index 00000000..21e26a8b --- /dev/null +++ b/src/frontmatter.js @@ -0,0 +1,229 @@ +/** + * @module frontmatter + * Markdown frontmatter parsing and graph extraction. + * Converts markdown files with YAML frontmatter into graph nodes and edges. + */ + +import { readFile, readdir, stat } from 'node:fs/promises'; +import { join, relative, extname } from 'node:path'; +import yaml from 'js-yaml'; +import { EDGE_TYPES } from './validators.js'; +import { importData } from './import.js'; + +/** Edge type fields recognized in frontmatter. */ +const EDGE_TYPE_SET = new Set(EDGE_TYPES); + +/** + * Parse YAML frontmatter from markdown content. + * + * @param {string} content - Raw markdown file content + * @returns {{ frontmatter: Record | null, body: string }} + */ +export function parseFrontmatter(content) { + // Normalize CRLF to LF for consistent delimiter handling + content = content.replace(/\r\n/g, '\n'); + + if (!content.startsWith('---')) { + return { frontmatter: null, body: content }; + } + + const firstNewline = content.indexOf('\n'); + if (firstNewline === -1) return { frontmatter: null, body: content }; + + // Match standalone closing delimiter: \n--- followed by newline or end-of-string + const closeRe = /\n---(?:\n|$)/; + const closeMatch = closeRe.exec(content.slice(firstNewline)); + if (!closeMatch) { + return { frontmatter: null, body: content }; + } + const endIdx = firstNewline + closeMatch.index; + const bodyStart = endIdx + closeMatch[0].length; + + const yamlBlock = content.slice(firstNewline + 1, endIdx); + try { + const parsed = yaml.load(yamlBlock); + if (!parsed || typeof parsed !== 'object' || Array.isArray(parsed)) { + return { frontmatter: null, body: content.slice(bodyStart) }; + } + return { frontmatter: parsed, body: content.slice(bodyStart) }; + } catch { + return { frontmatter: null, body: content }; + } +} + +/** + * Extract graph node and edges from parsed frontmatter. + * + * @param {string} relativePath - File path relative to the base directory + * @param {Record} frontmatter - Parsed frontmatter + * @returns {{ node: {id: string, properties?: Record}, edges: Array<{source: string, target: string, type: string}> }} + */ +export function extractGraphData(relativePath, frontmatter) { + // Determine node ID + const ext = extname(relativePath); + const pathWithoutExt = ext ? relativePath.slice(0, -ext.length) : relativePath; + const id = typeof frontmatter.id === 'string' ? frontmatter.id : `doc:${pathWithoutExt}`; + + // Build node properties from non-edge frontmatter fields + const properties = {}; + if (typeof frontmatter.title === 'string') { + properties.title = frontmatter.title; + } + + const node = { id }; + if (Object.keys(properties).length > 0) { + node.properties = properties; + } + + // Extract edges from edge-type fields + const edges = []; + for (const edgeType of EDGE_TYPE_SET) { + const value = frontmatter[edgeType]; + if (!value) continue; + + const targets = Array.isArray(value) ? value : [value]; + for (const target of targets) { + if (typeof target === 'string') { + edges.push({ source: id, target, type: edgeType }); + } + } + } + + return { node, edges }; +} + +/** + * Find markdown files matching a glob pattern. + * + * Supported patterns: + * - `**\/*.md` — all .md files recursively from basePath + * - `docs/**\/*.md` — all .md files recursively under docs/ + * - `*.md` — top-level .md files in basePath + * - `docs/*.md` — top-level .md files in docs/ + * + * Limitations: + * - Exact file paths (e.g., "docs/README.md") are not matched directly + * - Complex globs with character classes (e.g., "docs/202?/*.md") are not supported + * - Returns empty results for non-existent start directories (no error thrown) + * + * @param {string} basePath - Root directory to search from + * @param {string} pattern - Glob pattern (see supported patterns above) + * @returns {Promise} Sorted absolute paths of matching .md files + */ +export async function findMarkdownFiles(basePath, pattern) { + const recursive = pattern.includes('**'); + const results = []; + + async function walk(dir) { + const entries = await readdir(dir, { withFileTypes: true }); + for (const entry of entries) { + const fullPath = join(dir, entry.name); + if (entry.isDirectory()) { + if (recursive) await walk(fullPath); + } else if (entry.isFile() && entry.name.endsWith('.md')) { + results.push(fullPath); + } + } + } + + // Determine the starting directory from the pattern + const parts = pattern.split('/'); + let startDir = basePath; + for (const part of parts) { + if (part === '**' || part.includes('*')) break; + startDir = join(startDir, part); + } + + try { + const info = await stat(startDir); + if (info.isDirectory()) { + if (recursive) { + await walk(startDir); + } else { + // Non-recursive: only top-level .md files + const entries = await readdir(startDir, { withFileTypes: true }); + for (const entry of entries) { + if (entry.isFile() && entry.name.endsWith('.md')) { + results.push(join(startDir, entry.name)); + } + } + } + } + } catch (err) { + if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { + // Directory doesn't exist — return empty results + } else { + throw err; + } + } + + return results.sort(); +} + +/** + * Import graph data from markdown frontmatter. + * + * @param {import('@git-stunts/git-warp').default} graph + * @param {string} cwd - Working directory for resolving paths + * @param {string} pattern - Glob pattern for markdown files + * @param {{ dryRun?: boolean }} [opts] + * @returns {Promise} + */ +export async function importFromMarkdown(graph, cwd, pattern, opts = {}) { + const files = await findMarkdownFiles(cwd, pattern); + + if (files.length === 0) { + return { + valid: true, + errors: [], + warnings: ['No markdown files found matching pattern'], + stats: { nodes: 0, edges: 0 }, + dryRun: opts.dryRun ?? false, + }; + } + + // Collect all nodes and edges from frontmatter + const nodes = []; + const edges = []; + + for (const filePath of files) { + const content = await readFile(filePath, 'utf-8'); + const { frontmatter } = parseFrontmatter(content); + + if (!frontmatter) { + continue; // Skip files without frontmatter + } + + const relPath = relative(cwd, filePath); + const data = extractGraphData(relPath, frontmatter); + nodes.push(data.node); + + // Ensure edge target nodes exist + for (const edge of data.edges) { + edges.push(edge); + } + } + + if (nodes.length === 0) { + return { + valid: true, + errors: [], + warnings: ['No markdown files with frontmatter found'], + stats: { nodes: 0, edges: 0 }, + dryRun: opts.dryRun ?? false, + }; + } + + // Collect target node IDs that aren't already sources + const sourceIds = new Set(nodes.map(n => n.id)); + for (const edge of edges) { + if (!sourceIds.has(edge.target)) { + nodes.push({ id: edge.target }); + sourceIds.add(edge.target); + } + } + + // Build v1 import data and run through the validated pipeline + const v1Data = { version: 1, nodes, edges }; + return importData(graph, v1Data, opts); +} diff --git a/src/hooks.js b/src/hooks.js index 66270919..6bc16210 100644 --- a/src/hooks.js +++ b/src/hooks.js @@ -4,6 +4,7 @@ */ import { createEdge } from './edges.js'; +import { getCurrentTick, recordEpoch } from './epoch.js'; /** * @typedef {object} Directive @@ -63,5 +64,13 @@ export async function processCommit(graph, commit) { }); } + // Record an epoch marker correlating this commit to the current Lamport tick + try { + const tick = await getCurrentTick(graph); + await recordEpoch(graph, commit.sha, tick); + } catch (err) { + if (process.env.GITMIND_DEBUG) console.error('epoch recording failed:', err); + } + return directives; } diff --git a/src/import.js b/src/import.js index 060bc71f..61cb0456 100644 --- a/src/import.js +++ b/src/import.js @@ -215,22 +215,17 @@ async function writeImport(graph, data) { } /** - * Import a YAML file into the graph. + * Import a validated v1 data object into the graph. + * This is the shared pipeline used by both YAML import and frontmatter import. * * @param {import('@git-stunts/git-warp').default} graph - * @param {string} filePath - Path to the YAML file + * @param {object} data - v1 import data ({ version, nodes, edges }) * @param {{ dryRun?: boolean }} [opts] * @returns {Promise} */ -export async function importFile(graph, filePath, opts = {}) { +export async function importData(graph, data, opts = {}) { const dryRun = opts.dryRun ?? false; - // Parse - const { data, parseError } = await parseImportFile(filePath); - if (parseError) { - return { valid: false, errors: [parseError], warnings: [], stats: { nodes: 0, edges: 0 }, dryRun }; - } - // Validate const { valid, errors, warnings } = await validateImportData(data, graph); if (!valid) { @@ -250,3 +245,21 @@ export async function importFile(graph, filePath, opts = {}) { const stats = await writeImport(graph, data); return { valid: true, errors: [], warnings, stats, dryRun: false }; } + +/** + * Import a YAML file into the graph. + * + * @param {import('@git-stunts/git-warp').default} graph + * @param {string} filePath - Path to the YAML file + * @param {{ dryRun?: boolean }} [opts] + * @returns {Promise} + */ +export async function importFile(graph, filePath, opts = {}) { + // Parse + const { data, parseError } = await parseImportFile(filePath); + if (parseError) { + return { valid: false, errors: [parseError], warnings: [], stats: { nodes: 0, edges: 0 }, dryRun: opts.dryRun ?? false }; + } + + return importData(graph, data, opts); +} diff --git a/src/index.js b/src/index.js index 9f17065d..d32b4a84 100644 --- a/src/index.js +++ b/src/index.js @@ -7,7 +7,9 @@ export { initGraph, loadGraph, saveGraph } from './graph.js'; export { createEdge, queryEdges, removeEdge, EDGE_TYPES } from './edges.js'; export { getNodes, hasNode, getNode, getNodesByPrefix } from './nodes.js'; export { computeStatus } from './status.js'; -export { importFile, parseImportFile, validateImportData } from './import.js'; +export { importFile, importData, parseImportFile, validateImportData } from './import.js'; +export { importFromMarkdown, parseFrontmatter } from './frontmatter.js'; +export { exportGraph, serializeExport, exportToFile } from './export.js'; export { validateNodeId, validateEdgeType, validateConfidence, validateEdge, extractPrefix, classifyPrefix, isLowConfidence, @@ -16,10 +18,20 @@ export { } from './validators.js'; export { defineView, declareView, renderView, listViews, resetViews } from './views.js'; export { parseDirectives, processCommit } from './hooks.js'; +export { + getCurrentTick, recordEpoch, lookupEpoch, + lookupNearestEpoch, getEpochForRef, +} from './epoch.js'; export { detectDanglingEdges, detectOrphanMilestones, detectOrphanNodes, detectLowConfidenceEdges, runDoctor, fixIssues, } from './doctor.js'; +export { + parseCrossRepoId, buildCrossRepoId, isCrossRepoId, + extractRepo, qualifyNodeId, CROSS_REPO_ID_REGEX, +} from './remote.js'; +export { mergeFromRepo, detectRepoIdentifier } from './merge.js'; +export { formatSuggestionsAsMarkdown, parseReviewCommand } from './format-pr.js'; export { extractFileContext, extractCommitContext, extractGraphContext, buildPrompt, extractContext, diff --git a/src/merge.js b/src/merge.js new file mode 100644 index 00000000..265acbfe --- /dev/null +++ b/src/merge.js @@ -0,0 +1,123 @@ +/** + * @module merge + * Multi-repo graph merge — import another repo's graph with cross-repo qualification. + */ + +import { execSync } from 'node:child_process'; +import { initGraph } from './graph.js'; +import { qualifyNodeId } from './remote.js'; + +/** + * Detect the repository identifier from a Git remote URL. + * Parses the "origin" remote to extract owner/name. + * + * @param {string} repoPath - Path to the Git repository + * @returns {string|null} Repository identifier (owner/name), or null if not detectable + */ +export function detectRepoIdentifier(repoPath) { + try { + const url = execSync('git remote get-url origin', { + cwd: repoPath, + encoding: 'utf-8', + stdio: ['pipe', 'pipe', 'pipe'], + }).trim(); + + // SSH format: git@github.com:owner/name.git + const sshMatch = url.match(/:([^/]+)\/([^/.]+?)(?:\.git)?$/); + if (sshMatch && url.includes('@')) return `${sshMatch[1]}/${sshMatch[2]}`; + + // HTTPS format: https://github.com/owner/name.git + const httpsMatch = url.match(/\/([^/]+)\/([^/.]+?)(?:\.git)?$/); + if (httpsMatch) return `${httpsMatch[1]}/${httpsMatch[2]}`; + + return null; + } catch { + return null; + } +} + +/** Edge properties to preserve during merge. */ +const MERGE_EDGE_PROPS = new Set(['confidence', 'rationale']); + +/** + * @typedef {object} MergeResult + * @property {number} nodes - Number of nodes merged + * @property {number} edges - Number of edges merged + * @property {string} repoName - Repository identifier used for qualification + * @property {boolean} dryRun - Whether this was a dry-run + */ + +/** + * Merge a remote repository's graph into the local graph. + * All remote nodes are qualified with cross-repo IDs. + * + * @param {import('@git-stunts/git-warp').default} localGraph + * @param {string} remoteRepoPath - Path to the remote Git repository + * @param {{ repoName?: string, dryRun?: boolean }} [opts] + * @returns {Promise} + */ +export async function mergeFromRepo(localGraph, remoteRepoPath, opts = {}) { + const repoName = opts.repoName ?? detectRepoIdentifier(remoteRepoPath); + if (!repoName) { + throw new Error('Could not detect repository identifier. Use --repo-name to specify it.'); + } + + const dryRun = opts.dryRun ?? false; + + // Open the remote graph + const remoteGraph = await initGraph(remoteRepoPath); + + // Read remote nodes and edges + const remoteNodes = await remoteGraph.getNodes(); + const remoteEdges = await remoteGraph.getEdges(); + + if (dryRun) { + return { nodes: remoteNodes.length, edges: remoteEdges.length, repoName, dryRun: true }; + } + + // Create a single atomic patch + const patch = await localGraph.createPatch(); + let nodeCount = 0; + let edgeCount = 0; + + // Qualify and add remote nodes + for (const nodeId of remoteNodes) { + const qualifiedId = qualifyNodeId(nodeId, repoName); + patch.addNode(qualifiedId); + nodeCount++; + + // Copy node properties + const propsMap = await remoteGraph.getNodeProps(nodeId); + if (propsMap) { + for (const [key, value] of propsMap) { + patch.setProperty(qualifiedId, key, value); + } + } + } + + // Qualify and add remote edges + for (const edge of remoteEdges) { + const qualifiedSource = qualifyNodeId(edge.from, repoName); + const qualifiedTarget = qualifyNodeId(edge.to, repoName); + + patch.addEdge(qualifiedSource, qualifiedTarget, edge.label); + + // Copy selected edge properties + if (edge.props) { + for (const [key, value] of Object.entries(edge.props)) { + if (MERGE_EDGE_PROPS.has(key) && value !== undefined && value !== null) { + patch.setEdgeProperty(qualifiedSource, qualifiedTarget, edge.label, key, value); + } + } + } + + edgeCount++; + } + + // Commit atomically (guard against empty patch) + if (nodeCount > 0 || edgeCount > 0) { + await patch.commit(); + } + + return { nodes: nodeCount, edges: edgeCount, repoName, dryRun: false }; +} diff --git a/src/remote.js b/src/remote.js new file mode 100644 index 00000000..c1d0ed7c --- /dev/null +++ b/src/remote.js @@ -0,0 +1,101 @@ +/** + * @module remote + * Cross-repo edge protocol — ID parsing and qualification. + * Cross-repo IDs follow the format: repo:owner/name:prefix:identifier + */ + +/** @type {RegExp} Regex for cross-repo node IDs. */ +export const CROSS_REPO_ID_REGEX = /^repo:([A-Za-z0-9._-]+\/[A-Za-z0-9._-]+):([a-z][a-z0-9-]*):([A-Za-z0-9._\/@-]+)$/; + +/** + * @typedef {object} CrossRepoId + * @property {string} repo - Repository identifier (owner/name) + * @property {string} prefix - Inner prefix (e.g., "crate", "task") + * @property {string} identifier - Node identifier within the prefix + * @property {string} local - Local node ID (prefix:identifier) + */ + +/** + * Parse a cross-repo node ID. + * + * @param {string} nodeId + * @returns {CrossRepoId|null} Parsed components, or null if not a cross-repo ID + */ +export function parseCrossRepoId(nodeId) { + if (typeof nodeId !== 'string') return null; + const match = CROSS_REPO_ID_REGEX.exec(nodeId); + if (!match) return null; + return { + repo: match[1], + prefix: match[2], + identifier: match[3], + local: `${match[2]}:${match[3]}`, + }; +} + +/** + * Build a cross-repo node ID from components. + * + * @param {string} repo - Repository identifier (owner/name) + * @param {string} localId - Local node ID (prefix:identifier) + * @returns {string} Cross-repo ID (repo:owner/name:prefix:identifier) + */ +export function buildCrossRepoId(repo, localId) { + const candidate = `repo:${repo}:${localId}`; + if (!isCrossRepoId(candidate)) { + throw new Error( + `buildCrossRepoId: invalid result "${candidate}". ` + + `localId "${localId}" must match "prefix:identifier" (e.g. "task:auth"). ` + + `See CROSS_REPO_ID_REGEX / isCrossRepoId for full format.` + ); + } + return candidate; +} + +/** + * Check whether a node ID is a cross-repo ID. + * + * @param {string} nodeId + * @returns {boolean} + */ +export function isCrossRepoId(nodeId) { + return typeof nodeId === 'string' && CROSS_REPO_ID_REGEX.test(nodeId); +} + +/** + * Extract the repository identifier from a cross-repo ID. + * + * @param {string} nodeId + * @returns {string|null} Repository identifier (owner/name), or null if not cross-repo + */ +export function extractRepo(nodeId) { + const parsed = parseCrossRepoId(nodeId); + return parsed ? parsed.repo : null; +} + +/** + * Qualify a local node ID with a repository identifier. + * If the ID is already cross-repo, returns it unchanged. + * + * @param {string} nodeId - Local or cross-repo node ID + * @param {string} repo - Repository identifier (owner/name) + * @returns {string} Qualified cross-repo ID + */ +export function qualifyNodeId(nodeId, repo) { + if (isCrossRepoId(nodeId)) return nodeId; + if (typeof nodeId !== 'string' || !nodeId.includes(':')) { + throw new Error( + `qualifyNodeId: "${nodeId}" is not a valid node ID. ` + + `Node IDs must use the prefix:identifier format (e.g. "task:auth").` + ); + } + // Validate local ID has exactly one colon (prefix:identifier, no extra colons) + const parts = nodeId.split(':'); + if (parts.length !== 2 || !parts[0] || !parts[1]) { + throw new Error( + `qualifyNodeId: "${nodeId}" is not a valid node ID. ` + + `Node IDs must use the prefix:identifier format (e.g. "task:auth").` + ); + } + return buildCrossRepoId(repo, nodeId); +} diff --git a/src/validators.js b/src/validators.js index fadcf3d4..fdb69533 100644 --- a/src/validators.js +++ b/src/validators.js @@ -4,6 +4,8 @@ * Implements constraints from GRAPH_SCHEMA.md (BDK-001). */ +import { CROSS_REPO_ID_REGEX } from './remote.js'; + // ── Constants ──────────────────────────────────────────────────────── /** @type {RegExp} Canonical regex for node IDs (prefix:identifier) */ @@ -22,7 +24,7 @@ export const CANONICAL_PREFIXES = [ ]; /** @type {string[]} System-generated prefixes (reserved, not user-writable) */ -export const SYSTEM_PREFIXES = ['commit']; +export const SYSTEM_PREFIXES = ['commit', 'repo', 'epoch']; /** @type {string[]} Valid edge types */ export const EDGE_TYPES = [ @@ -49,6 +51,12 @@ const SELF_EDGE_FORBIDDEN = ['blocks', 'depends-on']; */ export function extractPrefix(nodeId) { if (typeof nodeId !== 'string') return null; + // Cross-repo IDs: repo:owner/name:prefix:identifier → return inner prefix + if (nodeId.startsWith('repo:')) { + const match = CROSS_REPO_ID_REGEX.exec(nodeId); + if (match) return match[2]; // inner prefix + return 'repo'; + } const idx = nodeId.indexOf(':'); if (idx === -1) return null; return nodeId.slice(0, idx); @@ -67,7 +75,7 @@ export function validateNodeId(nodeId) { if (nodeId.length > NODE_ID_MAX_LENGTH) { return { valid: false, error: `Node ID exceeds max length of ${NODE_ID_MAX_LENGTH} characters (got ${nodeId.length})` }; } - if (!NODE_ID_REGEX.test(nodeId)) { + if (!NODE_ID_REGEX.test(nodeId) && !CROSS_REPO_ID_REGEX.test(nodeId)) { return { valid: false, error: `Invalid node ID: "${nodeId}". Must match prefix:identifier (lowercase prefix, valid identifier chars)` }; } return { valid: true }; diff --git a/test/epoch.test.js b/test/epoch.test.js new file mode 100644 index 00000000..66a3b194 --- /dev/null +++ b/test/epoch.test.js @@ -0,0 +1,213 @@ +import { describe, it, expect, beforeEach, afterEach } from 'vitest'; +import { mkdtemp, rm, writeFile } from 'node:fs/promises'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { execSync } from 'node:child_process'; +import { initGraph } from '../src/graph.js'; +import { createEdge } from '../src/edges.js'; +import { getCurrentTick, recordEpoch, lookupEpoch, lookupNearestEpoch, getEpochForRef } from '../src/epoch.js'; +import { processCommit } from '../src/hooks.js'; +import { exportGraph } from '../src/export.js'; +import { detectOrphanNodes } from '../src/doctor.js'; +import { classifyPrefix } from '../src/validators.js'; + +describe('epoch', () => { + let tempDir; + let graph; + + beforeEach(async () => { + tempDir = await mkdtemp(join(tmpdir(), 'gitmind-test-')); + execSync('git init', { cwd: tempDir, stdio: 'ignore' }); + execSync('git config user.email "test@test.com"', { cwd: tempDir, stdio: 'ignore' }); + execSync('git config user.name "Test"', { cwd: tempDir, stdio: 'ignore' }); + graph = await initGraph(tempDir); + }); + + afterEach(async () => { + await rm(tempDir, { recursive: true, force: true }); + }); + + // ── getCurrentTick ───────────────────────────────────────── + + it('returns 0 for an empty graph', async () => { + const tick = await getCurrentTick(graph); + expect(tick).toBe(0); + }); + + it('returns positive tick after creating edges', async () => { + await createEdge(graph, { source: 'task:a', target: 'spec:b', type: 'implements' }); + const tick = await getCurrentTick(graph); + expect(tick).toBeGreaterThan(0); + }); + + // ── recordEpoch / lookupEpoch ────────────────────────────── + + it('records and looks up an epoch by SHA', async () => { + await recordEpoch(graph, 'abc123def456', 42); + const epoch = await lookupEpoch(graph, 'abc123def456'); + + expect(epoch).not.toBeNull(); + expect(epoch.tick).toBe(42); + expect(epoch.fullSha).toBe('abc123def456'); + expect(epoch.recordedAt).toBeTruthy(); + }); + + it('returns null for a missing epoch', async () => { + const epoch = await lookupEpoch(graph, 'nonexistent'); + expect(epoch).toBeNull(); + }); + + it('uses first 12 chars of SHA as node ID', async () => { + await recordEpoch(graph, 'abc123def456789', 10); + const nodes = await graph.getNodes(); + expect(nodes).toContain('epoch:abc123def456'); + }); + + // ── lookupNearestEpoch ───────────────────────────────────── + + it('finds ancestor epoch when direct lookup fails', async () => { + // Create a commit chain: c1 -> c2 -> c3 + await writeFile(join(tempDir, 'a.txt'), 'a'); + execSync('git add a.txt && git commit -m "c1"', { cwd: tempDir, stdio: 'ignore' }); + const c1 = execSync('git rev-parse HEAD', { cwd: tempDir, encoding: 'utf-8' }).trim(); + + await writeFile(join(tempDir, 'b.txt'), 'b'); + execSync('git add b.txt && git commit -m "c2"', { cwd: tempDir, stdio: 'ignore' }); + + await writeFile(join(tempDir, 'c.txt'), 'c'); + execSync('git add c.txt && git commit -m "c3"', { cwd: tempDir, stdio: 'ignore' }); + const c3 = execSync('git rev-parse HEAD', { cwd: tempDir, encoding: 'utf-8' }).trim(); + + // Record epoch only for c1 + await recordEpoch(graph, c1, 5); + + // Looking up from c3 should find c1's epoch as nearest + const epoch = await lookupNearestEpoch(graph, tempDir, c3); + expect(epoch).not.toBeNull(); + expect(epoch.tick).toBe(5); + expect(epoch.nearest).toBe(true); + }); + + it('returns null when no ancestors have epochs', async () => { + await writeFile(join(tempDir, 'a.txt'), 'a'); + execSync('git add a.txt && git commit -m "c1"', { cwd: tempDir, stdio: 'ignore' }); + const c1 = execSync('git rev-parse HEAD', { cwd: tempDir, encoding: 'utf-8' }).trim(); + + const epoch = await lookupNearestEpoch(graph, tempDir, c1); + expect(epoch).toBeNull(); + }); + + // ── getEpochForRef ───────────────────────────────────────── + + it('resolves ref and finds epoch', async () => { + await writeFile(join(tempDir, 'a.txt'), 'a'); + execSync('git add a.txt && git commit -m "c1"', { cwd: tempDir, stdio: 'ignore' }); + const sha = execSync('git rev-parse HEAD', { cwd: tempDir, encoding: 'utf-8' }).trim(); + + await recordEpoch(graph, sha, 7); + + const result = await getEpochForRef(graph, tempDir, 'HEAD'); + expect(result).not.toBeNull(); + expect(result.sha).toBe(sha); + expect(result.epoch.tick).toBe(7); + }); + + it('returns null for invalid ref', async () => { + const result = await getEpochForRef(graph, tempDir, 'nonexistent-ref'); + expect(result).toBeNull(); + }); + + it('falls back to nearest ancestor epoch', async () => { + await writeFile(join(tempDir, 'a.txt'), 'a'); + execSync('git add a.txt && git commit -m "c1"', { cwd: tempDir, stdio: 'ignore' }); + const c1 = execSync('git rev-parse HEAD', { cwd: tempDir, encoding: 'utf-8' }).trim(); + + await writeFile(join(tempDir, 'b.txt'), 'b'); + execSync('git add b.txt && git commit -m "c2"', { cwd: tempDir, stdio: 'ignore' }); + + // Record epoch only for c1 + await recordEpoch(graph, c1, 3); + + const result = await getEpochForRef(graph, tempDir, 'HEAD'); + expect(result).not.toBeNull(); + expect(result.epoch.tick).toBe(3); + expect(result.epoch.nearest).toBe(true); + }); + + // ── Integration: ceiling materialization ──────────────────── + + it('materializes graph at historical tick, hiding later edges', async () => { + // Create first batch of edges + await createEdge(graph, { source: 'task:a', target: 'spec:b', type: 'implements' }); + const tick1 = await getCurrentTick(graph); + + // Record an epoch at this point + await recordEpoch(graph, 'aaaa1111bbbb2222', tick1); + + // Create more edges + await createEdge(graph, { source: 'task:c', target: 'spec:d', type: 'documents' }); + + // Verify both edges exist now + let edges = await graph.getEdges(); + expect(edges.length).toBe(2); + + // Materialize at ceiling = tick1 (should only see the first edge) + await graph.materialize({ ceiling: tick1 }); + edges = await graph.getEdges(); + expect(edges.length).toBe(1); + expect(edges[0].from).toBe('task:a'); + expect(edges[0].to).toBe('spec:b'); + }); + + // ── Export filtering ──────────────────────────────────────── + + it('excludes epoch nodes from export', async () => { + await createEdge(graph, { source: 'task:a', target: 'spec:b', type: 'implements' }); + await recordEpoch(graph, 'abc123def456', 42); + + const data = await exportGraph(graph); + const ids = data.nodes.map(n => n.id); + expect(ids).not.toContain('epoch:abc123def456'); + expect(ids).toContain('task:a'); + }); + + // ── Doctor filtering ─────────────────────────────────────── + + it('excludes epoch nodes from orphan detection', () => { + const nodes = ['task:a', 'task:b', 'epoch:abc123def456']; + const edges = [{ from: 'task:a', to: 'task:b', label: 'blocks' }]; + const issues = detectOrphanNodes(nodes, edges); + + expect(issues).toHaveLength(0); + }); + + // ── Validators ────────────────────────────────────────────── + + it('classifies epoch prefix as system', () => { + expect(classifyPrefix('epoch')).toBe('system'); + }); + + // ── processCommit epoch recording ────────────────────────── + + it('processCommit records an epoch automatically', async () => { + await processCommit(graph, { + sha: 'fade0123cafe4567', + message: 'fix: update auth\n\nIMPLEMENTS: spec:auth', + }); + + const epoch = await lookupEpoch(graph, 'fade0123cafe4567'); + expect(epoch).not.toBeNull(); + expect(epoch.tick).toBeGreaterThanOrEqual(0); + expect(epoch.fullSha).toBe('fade0123cafe4567'); + }); + + it('processCommit records epoch even without directives', async () => { + await processCommit(graph, { + sha: 'dead0000beef1111', + message: 'chore: update deps', + }); + + const epoch = await lookupEpoch(graph, 'dead0000beef1111'); + expect(epoch).not.toBeNull(); + }); +}); diff --git a/test/export.test.js b/test/export.test.js new file mode 100644 index 00000000..6751ed34 --- /dev/null +++ b/test/export.test.js @@ -0,0 +1,237 @@ +import { describe, it, expect, beforeEach, afterEach } from 'vitest'; +import { mkdtemp, rm, readFile } from 'node:fs/promises'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { execSync } from 'node:child_process'; +import { initGraph } from '../src/graph.js'; +import { createEdge } from '../src/edges.js'; +import { importFile } from '../src/import.js'; +import { exportGraph, serializeExport, exportToFile } from '../src/export.js'; + +describe('export', () => { + let tempDir; + let graph; + + beforeEach(async () => { + tempDir = await mkdtemp(join(tmpdir(), 'gitmind-test-')); + execSync('git init', { cwd: tempDir, stdio: 'ignore' }); + graph = await initGraph(tempDir); + }); + + afterEach(async () => { + await rm(tempDir, { recursive: true, force: true }); + }); + + // ── Empty graph ────────────────────────────────────────────── + + it('exports empty graph', async () => { + const data = await exportGraph(graph); + expect(data.version).toBe(1); + expect(data.nodes).toEqual([]); + expect(data.edges).toEqual([]); + }); + + // ── Basic export ───────────────────────────────────────────── + + it('exports nodes and edges', async () => { + await createEdge(graph, { + source: 'spec:auth', + target: 'file:src/auth.js', + type: 'documents', + confidence: 0.9, + rationale: 'Auth spec', + }); + + const data = await exportGraph(graph); + expect(data.version).toBe(1); + expect(data.nodes).toHaveLength(2); + expect(data.nodes.map(n => n.id).sort()).toEqual(['file:src/auth.js', 'spec:auth']); + expect(data.edges).toHaveLength(1); + expect(data.edges[0].source).toBe('spec:auth'); + expect(data.edges[0].target).toBe('file:src/auth.js'); + expect(data.edges[0].type).toBe('documents'); + }); + + // ── Edge properties ────────────────────────────────────────── + + it('includes confidence and rationale, excludes timestamps', async () => { + await createEdge(graph, { + source: 'task:a', + target: 'spec:b', + type: 'implements', + confidence: 0.8, + rationale: 'Test rationale', + }); + + const data = await exportGraph(graph); + const edge = data.edges[0]; + expect(edge.confidence).toBe(0.8); + expect(edge.rationale).toBe('Test rationale'); + expect(edge.createdAt).toBeUndefined(); + expect(edge.importedAt).toBeUndefined(); + }); + + // ── System node exclusion ──────────────────────────────────── + + it('excludes decision: nodes', async () => { + // Create a regular edge + await createEdge(graph, { source: 'task:a', target: 'spec:b', type: 'implements' }); + + // Create a decision node (simulating review provenance) + const patch = await graph.createPatch(); + patch.addNode('decision:123'); + patch.setProperty('decision:123', 'action', 'accept'); + await patch.commit(); + + const data = await exportGraph(graph); + const ids = data.nodes.map(n => n.id); + expect(ids).not.toContain('decision:123'); + expect(ids).toContain('task:a'); + expect(ids).toContain('spec:b'); + }); + + it('excludes commit: nodes', async () => { + const patch = await graph.createPatch(); + patch.addNode('commit:abc123'); + patch.addNode('task:a'); + await patch.commit(); + + const data = await exportGraph(graph); + const ids = data.nodes.map(n => n.id); + expect(ids).not.toContain('commit:abc123'); + expect(ids).toContain('task:a'); + }); + + // ── Prefix filtering ──────────────────────────────────────── + + it('filters by prefix', async () => { + await createEdge(graph, { source: 'task:a', target: 'task:b', type: 'blocks' }); + await createEdge(graph, { source: 'spec:auth', target: 'task:a', type: 'relates-to' }); + + const data = await exportGraph(graph, { prefix: 'task' }); + expect(data.nodes).toHaveLength(2); + expect(data.nodes.every(n => n.id.startsWith('task:'))).toBe(true); + // Only the edge between two task: nodes should be included + expect(data.edges).toHaveLength(1); + expect(data.edges[0].source).toBe('task:a'); + expect(data.edges[0].target).toBe('task:b'); + }); + + // ── Node properties ────────────────────────────────────────── + + it('exports node properties', async () => { + const patch = await graph.createPatch(); + patch.addNode('task:auth'); + patch.setProperty('task:auth', 'status', 'active'); + patch.setProperty('task:auth', 'priority', 'high'); + await patch.commit(); + + const data = await exportGraph(graph); + const node = data.nodes.find(n => n.id === 'task:auth'); + expect(node.properties).toEqual({ status: 'active', priority: 'high' }); + }); + + it('omits properties key when node has none', async () => { + await createEdge(graph, { source: 'task:a', target: 'spec:b', type: 'implements' }); + + const data = await exportGraph(graph); + // Nodes created implicitly by createEdge have no user properties + // (they may have system properties from edge creation, but node-level props are empty) + for (const node of data.nodes) { + if (node.properties) { + expect(Object.keys(node.properties).length).toBeGreaterThan(0); + } + } + }); + + // ── Serialization ──────────────────────────────────────────── + + it('serializes to YAML', async () => { + await createEdge(graph, { source: 'task:a', target: 'spec:b', type: 'implements' }); + const data = await exportGraph(graph); + const yamlStr = serializeExport(data, 'yaml'); + expect(yamlStr).toContain('version: 1'); + expect(yamlStr).toContain('task:a'); + expect(yamlStr).toContain('implements'); + }); + + it('serializes to JSON', async () => { + await createEdge(graph, { source: 'task:a', target: 'spec:b', type: 'implements' }); + const data = await exportGraph(graph); + const jsonStr = serializeExport(data, 'json'); + const parsed = JSON.parse(jsonStr); + expect(parsed.version).toBe(1); + expect(parsed.nodes.length).toBe(2); + }); + + // ── File export ────────────────────────────────────────────── + + it('exports to file', async () => { + await createEdge(graph, { source: 'task:a', target: 'spec:b', type: 'implements' }); + const outPath = join(tempDir, 'export.yaml'); + + const result = await exportToFile(graph, outPath); + expect(result.stats.nodes).toBe(2); + expect(result.stats.edges).toBe(1); + expect(result.path).toBe(outPath); + + const content = await readFile(outPath, 'utf-8'); + expect(content).toContain('version: 1'); + }); + + it('infers JSON format from file extension', async () => { + await createEdge(graph, { source: 'task:a', target: 'spec:b', type: 'implements' }); + const outPath = join(tempDir, 'export.json'); + + await exportToFile(graph, outPath); + const content = await readFile(outPath, 'utf-8'); + const parsed = JSON.parse(content); + expect(parsed.version).toBe(1); + }); + + // ── Round-trip ─────────────────────────────────────────────── + + it('round-trips: export → import → compare', async () => { + // Build a graph + await createEdge(graph, { + source: 'file:src/auth.js', + target: 'spec:auth', + type: 'implements', + confidence: 0.9, + rationale: 'Main auth module', + }); + + const patch = await graph.createPatch(); + patch.addNode('task:auth'); + patch.setProperty('task:auth', 'status', 'active'); + await patch.commit(); + + // Export + const exportPath = join(tempDir, 'roundtrip.yaml'); + await exportToFile(graph, exportPath); + + // Import into a fresh graph + const freshDir = await mkdtemp(join(tmpdir(), 'gitmind-rt-')); + try { + execSync('git init', { cwd: freshDir, stdio: 'ignore' }); + const freshGraph = await initGraph(freshDir); + + const importResult = await importFile(freshGraph, exportPath); + expect(importResult.valid).toBe(true); + expect(importResult.errors).toEqual([]); + + // Compare + const freshNodes = (await freshGraph.getNodes()).sort(); + const origExport = await exportGraph(graph); + const origNodeIds = origExport.nodes.map(n => n.id).sort(); + expect(freshNodes).toEqual(origNodeIds); + + const freshEdges = await freshGraph.getEdges(); + expect(freshEdges).toHaveLength(1); + expect(freshEdges[0].props.confidence).toBe(0.9); + expect(freshEdges[0].props.rationale).toBe('Main auth module'); + } finally { + await rm(freshDir, { recursive: true, force: true }); + } + }); +}); diff --git a/test/format-pr.test.js b/test/format-pr.test.js new file mode 100644 index 00000000..9ee40438 --- /dev/null +++ b/test/format-pr.test.js @@ -0,0 +1,184 @@ +import { describe, it, expect } from 'vitest'; +import { formatSuggestionsAsMarkdown, parseReviewCommand } from '../src/format-pr.js'; + +describe('format-pr', () => { + // ── formatSuggestionsAsMarkdown ───────────────────────────── + + describe('formatSuggestionsAsMarkdown', () => { + it('returns empty message for no suggestions', () => { + const result = formatSuggestionsAsMarkdown([]); + expect(result).toContain('No new edge suggestions'); + }); + + it('returns empty message for null suggestions', () => { + const result = formatSuggestionsAsMarkdown(null); + expect(result).toContain('No new edge suggestions'); + }); + + it('formats a single suggestion', () => { + const suggestions = [{ + source: 'file:auth.js', + target: 'spec:auth', + type: 'implements', + confidence: 0.8, + rationale: 'Auth module', + }]; + + const result = formatSuggestionsAsMarkdown(suggestions); + expect(result).toContain('| 1 |'); + expect(result).toContain('`file:auth.js`'); + expect(result).toContain('`spec:auth`'); + expect(result).toContain('implements'); + expect(result).toContain('80%'); + expect(result).toContain('Auth module'); + }); + + it('formats multiple suggestions with commands', () => { + const suggestions = [ + { source: 'task:a', target: 'spec:b', type: 'implements', confidence: 0.9 }, + { source: 'task:c', target: 'spec:d', type: 'relates-to', confidence: 0.7 }, + ]; + + const result = formatSuggestionsAsMarkdown(suggestions); + expect(result).toContain('| 1 |'); + expect(result).toContain('| 2 |'); + expect(result).toContain('/gitmind accept 1'); + expect(result).toContain('/gitmind reject 2'); + expect(result).toContain('/gitmind accept-all'); + }); + + it('escapes pipe characters in rationale', () => { + const suggestions = [{ + source: 'task:a', + target: 'spec:b', + type: 'implements', + confidence: 0.8, + rationale: 'reason A | reason B', + }]; + + const result = formatSuggestionsAsMarkdown(suggestions); + // Pipe should be escaped so it doesn't break the table + expect(result).not.toMatch(/\| reason A \| reason B \|/); + expect(result).toContain('reason A'); + expect(result).toContain('reason B'); + }); + + it('escapes backslashes in rationale', () => { + const suggestions = [{ + source: 'task:a', + target: 'spec:b', + type: 'implements', + confidence: 0.8, + rationale: 'path\\to\\file', + }]; + + const result = formatSuggestionsAsMarkdown(suggestions); + // Backslashes should be escaped so markdown doesn't treat them as escape chars + expect(result).toContain('path\\\\to\\\\file'); + }); + + it('escapes backslash-pipe sequence in rationale', () => { + const suggestions = [{ + source: 'task:a', + target: 'spec:b', + type: 'implements', + confidence: 0.8, + rationale: 'test\\|end', // literal backslash followed by pipe + }]; + + const result = formatSuggestionsAsMarkdown(suggestions); + const dataRow = result.split('\n').find(l => l.startsWith('| 1')); + // A 7-column table row should have exactly 8 pipe delimiters. + // Without backslash escaping, \| becomes \\| which is a raw pipe = 9 delimiters. + expect((dataRow.match(/\|/g) || []).length).toBe(8); + }); + + it('handles missing rationale', () => { + const suggestions = [{ + source: 'task:a', + target: 'spec:b', + type: 'implements', + confidence: 1.0, + }]; + + const result = formatSuggestionsAsMarkdown(suggestions); + expect(result).toContain('100%'); + // Should not contain "undefined" + expect(result).not.toContain('undefined'); + }); + + it('handles suggestion with missing type gracefully', () => { + const suggestions = [{ + source: 'task:a', + target: 'spec:b', + confidence: 0.8, + }]; + + const result = formatSuggestionsAsMarkdown(suggestions); + expect(result).not.toContain('undefined'); + expect(result).toContain('| 1 |'); + }); + }); + + // ── parseReviewCommand ────────────────────────────────────── + + describe('parseReviewCommand', () => { + it('parses accept command', () => { + expect(parseReviewCommand('/gitmind accept 1')).toEqual({ command: 'accept', index: 1 }); + }); + + it('parses reject command', () => { + expect(parseReviewCommand('/gitmind reject 3')).toEqual({ command: 'reject', index: 3 }); + }); + + it('parses accept-all command', () => { + expect(parseReviewCommand('/gitmind accept-all')).toEqual({ command: 'accept-all' }); + }); + + it('returns null for non-matching text', () => { + expect(parseReviewCommand('just a normal comment')).toBeNull(); + }); + + it('returns null for accept without index', () => { + expect(parseReviewCommand('/gitmind accept')).toBeNull(); + }); + + it('returns null for non-string input', () => { + expect(parseReviewCommand(null)).toBeNull(); + }); + + it('parses command embedded in larger comment', () => { + const body = 'LGTM!\n\n/gitmind accept 2\n\nGreat work!'; + expect(parseReviewCommand(body)).toEqual({ command: 'accept', index: 2 }); + }); + + it('returns null for accept with index 0 (1-indexed)', () => { + expect(parseReviewCommand('/gitmind accept 0')).toBeNull(); + }); + + it('returns null for reject with index 0 (1-indexed)', () => { + expect(parseReviewCommand('/gitmind reject 0')).toBeNull(); + }); + }); + + // ── backtick escaping ────────────────────────────────────── + + describe('backtick escaping in suggestions', () => { + it('strips backtick characters from source and target', () => { + const suggestions = [{ + source: 'file:`auth`.js', + target: 'spec:`auth`', + type: 'implements', + confidence: 0.8, + }]; + + const result = formatSuggestionsAsMarkdown(suggestions); + // Backticks should be stripped to prevent breaking code spans + expect(result).toContain('`file:auth.js`'); + expect(result).toContain('`spec:auth`'); + // Verify table structure is intact (6 columns = 7 pipe delimiters) + const dataRow = result.split('\n').find(l => l.startsWith('| 1')); + expect((dataRow.match(/\|/g) || []).length).toBe(7); + }); + }); +}); diff --git a/test/frontmatter.test.js b/test/frontmatter.test.js new file mode 100644 index 00000000..4e02d847 --- /dev/null +++ b/test/frontmatter.test.js @@ -0,0 +1,277 @@ +import { describe, it, expect, beforeEach, afterEach } from 'vitest'; +import { mkdtemp, rm, writeFile, mkdir, chmod } from 'node:fs/promises'; +import { join, dirname } from 'node:path'; +import { tmpdir } from 'node:os'; +import { execSync } from 'node:child_process'; +import { initGraph } from '../src/graph.js'; +import { parseFrontmatter, extractGraphData, findMarkdownFiles, importFromMarkdown } from '../src/frontmatter.js'; +import { exportGraph } from '../src/export.js'; + +describe('frontmatter', () => { + let tempDir; + let graph; + + beforeEach(async () => { + tempDir = await mkdtemp(join(tmpdir(), 'gitmind-test-')); + execSync('git init', { cwd: tempDir, stdio: 'ignore' }); + graph = await initGraph(tempDir); + }); + + afterEach(async () => { + await rm(tempDir, { recursive: true, force: true }); + }); + + /** + * Helper: write a markdown file and return its path. + */ + async function writeMd(relativePath, content) { + const fullPath = join(tempDir, relativePath); + const dir = dirname(fullPath); + await mkdir(dir, { recursive: true }); + await writeFile(fullPath, content, 'utf-8'); + return fullPath; + } + + // ── parseFrontmatter ──────────────────────────────────────── + + describe('parseFrontmatter', () => { + it('parses valid frontmatter', () => { + const content = `--- +title: "Hello" +id: doc:hello +--- +# Hello World`; + const { frontmatter, body } = parseFrontmatter(content); + expect(frontmatter).toEqual({ title: 'Hello', id: 'doc:hello' }); + expect(body).toContain('# Hello World'); + }); + + it('returns null for missing frontmatter', () => { + const { frontmatter, body } = parseFrontmatter('# Just a heading\nSome content'); + expect(frontmatter).toBeNull(); + expect(body).toContain('# Just a heading'); + }); + + it('returns null for unclosed frontmatter', () => { + const content = `--- +title: "Open" +# No closing delimiter`; + const { frontmatter } = parseFrontmatter(content); + expect(frontmatter).toBeNull(); + }); + + it('handles CRLF line endings', () => { + const content = '---\r\ntitle: "Hello"\r\nid: doc:hello\r\n---\r\n# Hello World'; + const { frontmatter, body } = parseFrontmatter(content); + expect(frontmatter).toEqual({ title: 'Hello', id: 'doc:hello' }); + expect(body).toContain('# Hello World'); + }); + + it('returns null for invalid YAML in frontmatter', () => { + const content = `--- +: invalid: yaml: {{ +--- +Body`; + const { frontmatter } = parseFrontmatter(content); + expect(frontmatter).toBeNull(); + }); + }); + + // ── extractGraphData ──────────────────────────────────────── + + describe('extractGraphData', () => { + it('uses explicit id from frontmatter', () => { + const { node } = extractGraphData('docs/arch.md', { id: 'doc:architecture' }); + expect(node.id).toBe('doc:architecture'); + }); + + it('auto-generates id from path', () => { + const { node } = extractGraphData('docs/getting-started.md', { title: 'Guide' }); + expect(node.id).toBe('doc:docs/getting-started'); + }); + + it('extracts title as node property', () => { + const { node } = extractGraphData('readme.md', { title: 'README' }); + expect(node.properties).toEqual({ title: 'README' }); + }); + + it('extracts implements edge', () => { + const { edges } = extractGraphData('auth.md', { + id: 'doc:auth', + implements: 'spec:auth', + }); + expect(edges).toHaveLength(1); + expect(edges[0]).toEqual({ source: 'doc:auth', target: 'spec:auth', type: 'implements' }); + }); + + it('extracts array of relates-to edges', () => { + const { edges } = extractGraphData('arch.md', { + id: 'doc:arch', + 'relates-to': ['spec:a', 'spec:b'], + }); + expect(edges).toHaveLength(2); + expect(edges[0].target).toBe('spec:a'); + expect(edges[1].target).toBe('spec:b'); + }); + + it('strips extension from filename, not directory', () => { + const { node } = extractGraphData('notes.md/index.md', { title: 'Notes' }); + expect(node.id).toBe('doc:notes.md/index'); + }); + + it('extracts multiple edge types', () => { + const { edges } = extractGraphData('design.md', { + id: 'doc:design', + implements: 'spec:auth', + 'depends-on': 'doc:getting-started', + documents: 'module:auth', + }); + expect(edges).toHaveLength(3); + const types = edges.map(e => e.type).sort(); + expect(types).toEqual(['depends-on', 'documents', 'implements']); + }); + }); + + // ── findMarkdownFiles ─────────────────────────────────────── + + describe('findMarkdownFiles', () => { + it('finds .md files recursively', async () => { + await writeMd('docs/a.md', '# A'); + await writeMd('docs/sub/b.md', '# B'); + await writeFile(join(tempDir, 'docs/c.txt'), 'not markdown'); + + const files = await findMarkdownFiles(tempDir, 'docs/**/*.md'); + expect(files).toHaveLength(2); + expect(files[0]).toContain('a.md'); + expect(files[1]).toContain('b.md'); + }); + + it('returns empty for non-existent directory', async () => { + const files = await findMarkdownFiles(tempDir, 'nonexistent/**/*.md'); + expect(files).toEqual([]); + }); + + it('throws on permission errors (not ENOENT)', async () => { + if (process.getuid?.() === 0) return; // root ignores chmod + // Create a directory with no read permissions + const restrictedDir = join(tempDir, 'restricted'); + await mkdir(restrictedDir); + await chmod(restrictedDir, 0o000); + + try { + await expect(findMarkdownFiles(tempDir, 'restricted/**/*.md')) + .rejects.toThrow(); + } finally { + // Restore permissions for cleanup + await chmod(restrictedDir, 0o755); + } + }); + }); + + // ── importFromMarkdown ────────────────────────────────────── + + describe('importFromMarkdown', () => { + it('imports nodes from frontmatter', async () => { + await writeMd('docs/auth.md', `--- +id: doc:auth +title: "Auth Guide" +--- +# Authentication`); + + const result = await importFromMarkdown(graph, tempDir, 'docs/**/*.md'); + expect(result.valid).toBe(true); + expect(result.stats.nodes).toBe(1); + + const nodes = await graph.getNodes(); + expect(nodes).toContain('doc:auth'); + }); + + it('imports edges from frontmatter', async () => { + // Create the target node first + const patch = await graph.createPatch(); + patch.addNode('spec:auth'); + await patch.commit(); + + await writeMd('docs/auth.md', `--- +id: doc:auth +implements: spec:auth +--- +# Auth`); + + const result = await importFromMarkdown(graph, tempDir, 'docs/**/*.md'); + expect(result.valid).toBe(true); + expect(result.stats.edges).toBe(1); + + const edges = await graph.getEdges(); + expect(edges).toHaveLength(1); + expect(edges[0].from).toBe('doc:auth'); + expect(edges[0].to).toBe('spec:auth'); + }); + + it('supports dry-run mode', async () => { + await writeMd('docs/test.md', `--- +id: doc:test +title: "Test" +--- +# Test`); + + const result = await importFromMarkdown(graph, tempDir, 'docs/**/*.md', { dryRun: true }); + expect(result.valid).toBe(true); + expect(result.dryRun).toBe(true); + expect(result.stats.nodes).toBe(1); + + // Graph should be unchanged + const nodes = await graph.getNodes(); + expect(nodes).toHaveLength(0); + }); + + it('handles no matching files', async () => { + const result = await importFromMarkdown(graph, tempDir, 'docs/**/*.md'); + expect(result.valid).toBe(true); + expect(result.warnings[0]).toMatch(/No markdown files found/); + }); + + it('skips files without frontmatter', async () => { + await writeMd('docs/plain.md', '# Just a heading\nNo frontmatter here.'); + await writeMd('docs/with-fm.md', `--- +id: doc:with-fm +title: "Has FM" +--- +# Content`); + + const result = await importFromMarkdown(graph, tempDir, 'docs/**/*.md'); + expect(result.valid).toBe(true); + expect(result.stats.nodes).toBe(1); + }); + + it('is idempotent on re-import', async () => { + await writeMd('docs/auth.md', `--- +id: doc:auth +title: "Auth Guide" +--- +# Authentication`); + + await importFromMarkdown(graph, tempDir, 'docs/**/*.md'); + await importFromMarkdown(graph, tempDir, 'docs/**/*.md'); + + const nodes = await graph.getNodes(); + expect(nodes.filter(n => n === 'doc:auth')).toHaveLength(1); + }); + + it('round-trips with export', async () => { + await writeMd('docs/auth.md', `--- +id: doc:auth +title: "Auth Guide" +implements: spec:auth +--- +# Authentication`); + + await importFromMarkdown(graph, tempDir, 'docs/**/*.md'); + const exported = await exportGraph(graph); + + expect(exported.nodes.map(n => n.id).sort()).toEqual(['doc:auth', 'spec:auth']); + expect(exported.edges).toHaveLength(1); + expect(exported.edges[0].type).toBe('implements'); + }); + }); +}); diff --git a/test/merge.test.js b/test/merge.test.js new file mode 100644 index 00000000..b960842e --- /dev/null +++ b/test/merge.test.js @@ -0,0 +1,192 @@ +import { describe, it, expect, beforeEach, afterEach } from 'vitest'; +import { mkdtemp, rm } from 'node:fs/promises'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { execSync } from 'node:child_process'; +import { initGraph } from '../src/graph.js'; +import { createEdge, queryEdges } from '../src/edges.js'; +import { detectRepoIdentifier, mergeFromRepo } from '../src/merge.js'; + +describe('merge', () => { + let localDir; + let remoteDir; + let localGraph; + let remoteGraph; + + beforeEach(async () => { + localDir = await mkdtemp(join(tmpdir(), 'gitmind-local-')); + remoteDir = await mkdtemp(join(tmpdir(), 'gitmind-remote-')); + execSync('git init', { cwd: localDir, stdio: 'ignore' }); + execSync('git init', { cwd: remoteDir, stdio: 'ignore' }); + localGraph = await initGraph(localDir); + remoteGraph = await initGraph(remoteDir); + }); + + afterEach(async () => { + await rm(localDir, { recursive: true, force: true }); + await rm(remoteDir, { recursive: true, force: true }); + }); + + // ── detectRepoIdentifier ──────────────────────────────────── + + describe('detectRepoIdentifier', () => { + it('parses HTTPS URL', () => { + execSync('git remote add origin https://github.com/neuroglyph/echo.git', { cwd: remoteDir, stdio: 'ignore' }); + expect(detectRepoIdentifier(remoteDir)).toBe('neuroglyph/echo'); + }); + + it('parses SSH URL', () => { + execSync('git remote add origin git@github.com:neuroglyph/echo.git', { cwd: remoteDir, stdio: 'ignore' }); + expect(detectRepoIdentifier(remoteDir)).toBe('neuroglyph/echo'); + }); + + it('returns null when no remote exists', () => { + expect(detectRepoIdentifier(remoteDir)).toBeNull(); + }); + + it('handles URLs without .git suffix', () => { + execSync('git remote add origin https://github.com/owner/name', { cwd: remoteDir, stdio: 'ignore' }); + expect(detectRepoIdentifier(remoteDir)).toBe('owner/name'); + }); + }); + + // ── mergeFromRepo ─────────────────────────────────────────── + + describe('mergeFromRepo', () => { + it('merges and qualifies remote nodes', async () => { + // Populate remote graph + await createEdge(remoteGraph, { + source: 'spec:auth', + target: 'module:auth', + type: 'documents', + confidence: 0.9, + rationale: 'Auth spec docs', + }); + + const result = await mergeFromRepo(localGraph, remoteDir, { repoName: 'other/repo' }); + expect(result.nodes).toBe(2); + expect(result.edges).toBe(1); + expect(result.repoName).toBe('other/repo'); + expect(result.dryRun).toBe(false); + + // Check local graph has qualified nodes + const nodes = await localGraph.getNodes(); + expect(nodes).toContain('repo:other/repo:spec:auth'); + expect(nodes).toContain('repo:other/repo:module:auth'); + + // Check qualified edges + const edges = await queryEdges(localGraph); + expect(edges).toHaveLength(1); + expect(edges[0].from).toBe('repo:other/repo:spec:auth'); + expect(edges[0].to).toBe('repo:other/repo:module:auth'); + expect(edges[0].label).toBe('documents'); + }); + + it('preserves local data (additive only)', async () => { + // Pre-populate local graph + await createEdge(localGraph, { + source: 'task:a', + target: 'spec:b', + type: 'implements', + }); + + // Populate remote + await createEdge(remoteGraph, { + source: 'crate:core', + target: 'spec:api', + type: 'implements', + }); + + await mergeFromRepo(localGraph, remoteDir, { repoName: 'other/repo' }); + + const nodes = await localGraph.getNodes(); + expect(nodes).toContain('task:a'); + expect(nodes).toContain('spec:b'); + expect(nodes).toContain('repo:other/repo:crate:core'); + + const edges = await queryEdges(localGraph); + expect(edges).toHaveLength(2); // local + remote + }); + + it('preserves edge properties (confidence + rationale)', async () => { + await createEdge(remoteGraph, { + source: 'task:a', + target: 'spec:b', + type: 'implements', + confidence: 0.7, + rationale: 'Test rationale', + }); + + await mergeFromRepo(localGraph, remoteDir, { repoName: 'other/repo' }); + + const edges = await queryEdges(localGraph); + expect(edges[0].props.confidence).toBe(0.7); + expect(edges[0].props.rationale).toBe('Test rationale'); + }); + + it('handles dry-run mode', async () => { + await createEdge(remoteGraph, { + source: 'task:a', + target: 'spec:b', + type: 'implements', + }); + + const result = await mergeFromRepo(localGraph, remoteDir, { + repoName: 'other/repo', + dryRun: true, + }); + + expect(result.dryRun).toBe(true); + expect(result.nodes).toBe(2); + expect(result.edges).toBe(1); + + // Local graph should be unchanged + const nodes = await localGraph.getNodes(); + expect(nodes).toHaveLength(0); + }); + + it('handles empty remote graph', async () => { + const result = await mergeFromRepo(localGraph, remoteDir, { repoName: 'other/repo' }); + expect(result.nodes).toBe(0); + expect(result.edges).toBe(0); + }); + + it('is idempotent on re-merge', async () => { + await createEdge(remoteGraph, { + source: 'task:a', + target: 'spec:b', + type: 'implements', + }); + + await mergeFromRepo(localGraph, remoteDir, { repoName: 'other/repo' }); + await mergeFromRepo(localGraph, remoteDir, { repoName: 'other/repo' }); + + const nodes = await localGraph.getNodes(); + // Should not duplicate + const qualifiedNodes = nodes.filter(n => n.startsWith('repo:')); + expect(qualifiedNodes).toHaveLength(2); + + const edges = await queryEdges(localGraph); + expect(edges).toHaveLength(1); + }); + + it('throws when repo name cannot be detected', async () => { + await expect(mergeFromRepo(localGraph, remoteDir)) + .rejects.toThrow(/Could not detect repository identifier/); + }); + + it('copies node properties', async () => { + const patch = await remoteGraph.createPatch(); + patch.addNode('task:a'); + patch.setProperty('task:a', 'status', 'active'); + patch.setProperty('task:a', 'priority', 'high'); + await patch.commit(); + + await mergeFromRepo(localGraph, remoteDir, { repoName: 'other/repo' }); + + const props = await localGraph.getNodeProps('repo:other/repo:task:a'); + expect(props.get('status')).toBe('active'); + expect(props.get('priority')).toBe('high'); + }); + }); +}); diff --git a/test/remote.test.js b/test/remote.test.js new file mode 100644 index 00000000..2dd80921 --- /dev/null +++ b/test/remote.test.js @@ -0,0 +1,181 @@ +import { describe, it, expect, beforeEach, afterEach } from 'vitest'; +import { mkdtemp, rm } from 'node:fs/promises'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { execSync } from 'node:child_process'; +import { initGraph } from '../src/graph.js'; +import { createEdge, queryEdges } from '../src/edges.js'; +import { + parseCrossRepoId, buildCrossRepoId, isCrossRepoId, + extractRepo, qualifyNodeId, CROSS_REPO_ID_REGEX, +} from '../src/remote.js'; +import { + validateNodeId, extractPrefix, classifyPrefix, +} from '../src/validators.js'; + +describe('remote', () => { + // ── parseCrossRepoId ──────────────────────────────────────── + + describe('parseCrossRepoId', () => { + it('parses a valid cross-repo ID', () => { + const result = parseCrossRepoId('repo:neuroglyph/echo:crate:echo-core'); + expect(result).toEqual({ + repo: 'neuroglyph/echo', + prefix: 'crate', + identifier: 'echo-core', + local: 'crate:echo-core', + }); + }); + + it('parses cross-repo ID with dots in repo name', () => { + const result = parseCrossRepoId('repo:my.org/my-repo:task:BDK-001'); + expect(result).toEqual({ + repo: 'my.org/my-repo', + prefix: 'task', + identifier: 'BDK-001', + local: 'task:BDK-001', + }); + }); + + it('returns null for standard node ID', () => { + expect(parseCrossRepoId('task:a')).toBeNull(); + }); + + it('returns null for non-string input', () => { + expect(parseCrossRepoId(null)).toBeNull(); + expect(parseCrossRepoId(42)).toBeNull(); + }); + + it('returns null for malformed cross-repo ID', () => { + expect(parseCrossRepoId('repo:noslash:task:a')).toBeNull(); + }); + }); + + // ── buildCrossRepoId ──────────────────────────────────────── + + describe('buildCrossRepoId', () => { + it('builds a cross-repo ID', () => { + expect(buildCrossRepoId('neuroglyph/echo', 'crate:echo-core')) + .toBe('repo:neuroglyph/echo:crate:echo-core'); + }); + + it('throws on invalid localId', () => { + expect(() => buildCrossRepoId('owner/name', 'nocolon')).toThrow(/prefix:identifier/); + }); + }); + + // ── isCrossRepoId ────────────────────────────────────────── + + describe('isCrossRepoId', () => { + it('returns true for cross-repo IDs', () => { + expect(isCrossRepoId('repo:owner/name:task:a')).toBe(true); + }); + + it('returns false for standard IDs', () => { + expect(isCrossRepoId('task:a')).toBe(false); + }); + + it('returns false for non-strings', () => { + expect(isCrossRepoId(undefined)).toBe(false); + }); + }); + + // ── extractRepo ───────────────────────────────────────────── + + describe('extractRepo', () => { + it('extracts repo from cross-repo ID', () => { + expect(extractRepo('repo:neuroglyph/echo:crate:echo-core')).toBe('neuroglyph/echo'); + }); + + it('returns null for standard ID', () => { + expect(extractRepo('task:a')).toBeNull(); + }); + }); + + // ── qualifyNodeId ─────────────────────────────────────────── + + describe('qualifyNodeId', () => { + it('qualifies a local ID', () => { + expect(qualifyNodeId('crate:echo-core', 'neuroglyph/echo')) + .toBe('repo:neuroglyph/echo:crate:echo-core'); + }); + + it('returns cross-repo ID unchanged', () => { + const id = 'repo:neuroglyph/echo:crate:echo-core'; + expect(qualifyNodeId(id, 'other/repo')).toBe(id); + }); + + it('throws a clear error for non-prefixed local IDs', () => { + expect(() => qualifyNodeId('readme', 'owner/name')) + .toThrow(/not a valid node ID.*prefix:identifier/); + }); + + it('throws a clear error for multi-colon local IDs', () => { + expect(() => qualifyNodeId('a:b:c', 'owner/name')) + .toThrow(/not a valid node ID/); + }); + }); + + // ── Validator integration ─────────────────────────────────── + + describe('validator integration', () => { + it('validateNodeId accepts cross-repo IDs', () => { + const result = validateNodeId('repo:neuroglyph/echo:crate:echo-core'); + expect(result.valid).toBe(true); + }); + + it('validateNodeId still rejects invalid IDs', () => { + const result = validateNodeId('repo bad spaces'); + expect(result.valid).toBe(false); + }); + + it('extractPrefix returns inner prefix for cross-repo IDs', () => { + expect(extractPrefix('repo:neuroglyph/echo:crate:echo-core')).toBe('crate'); + }); + + it('classifyPrefix recognizes repo as system', () => { + expect(classifyPrefix('repo')).toBe('system'); + }); + }); + + // ── Graph integration ─────────────────────────────────────── + + describe('graph integration', () => { + let tempDir; + let graph; + + beforeEach(async () => { + tempDir = await mkdtemp(join(tmpdir(), 'gitmind-test-')); + execSync('git init', { cwd: tempDir, stdio: 'ignore' }); + graph = await initGraph(tempDir); + }); + + afterEach(async () => { + await rm(tempDir, { recursive: true, force: true }); + }); + + it('creates edge with cross-repo endpoint', async () => { + await createEdge(graph, { + source: 'file:src/auth.js', + target: 'repo:neuroglyph/echo:spec:auth', + type: 'implements', + }); + + const edges = await queryEdges(graph); + expect(edges).toHaveLength(1); + expect(edges[0].to).toBe('repo:neuroglyph/echo:spec:auth'); + }); + + it('queries edges with cross-repo nodes', async () => { + await createEdge(graph, { + source: 'repo:neuroglyph/echo:crate:echo-core', + target: 'module:auth', + type: 'depends-on', + }); + + const edges = await queryEdges(graph, { source: 'repo:neuroglyph/echo:crate:echo-core' }); + expect(edges).toHaveLength(1); + expect(edges[0].label).toBe('depends-on'); + }); + }); +}); diff --git a/test/validators.test.js b/test/validators.test.js index 19d1b6ee..596ec094 100644 --- a/test/validators.test.js +++ b/test/validators.test.js @@ -269,8 +269,9 @@ describe('constants', () => { expect(CANONICAL_PREFIXES).not.toContain('commit'); }); - it('SYSTEM_PREFIXES contains commit', () => { - expect(SYSTEM_PREFIXES).toEqual(['commit']); + it('SYSTEM_PREFIXES contains commit and repo', () => { + expect(SYSTEM_PREFIXES).toContain('commit'); + expect(SYSTEM_PREFIXES).toContain('repo'); }); it('ALL_PREFIXES is the union of canonical and system', () => {