diff --git a/.claude/settings.local.json b/.claude/settings.local.json
index f63c641..b7f95bc 100644
--- a/.claude/settings.local.json
+++ b/.claude/settings.local.json
@@ -157,7 +157,14 @@
"Bash(cargo tree *)",
"Bash(git restore *)",
"Bash(awk -F'\\\\t' '{print $4}')",
- "Bash(awk -F'\\\\t' '{print $1, $2}')"
+ "Bash(awk -F'\\\\t' '{print $1, $2}')",
+ "Bash(gh workflow *)",
+ "Bash(perl -i -pe 's/\\(ci_yaml: \\(None|Some\\\\\\(ci\\\\\\)\\),\\)/$1\\\\n external_namespaces: &[],\\\\n ignore_patterns: &[],/g' rivet-core/src/doc_check.rs)",
+ "Bash(xargs -I{} gh api {} --jq '.jobs[] | \"\\\\\\(.conclusion // \"running\"\\): \\\\\\(.name\\)\"')",
+ "Bash(perl -0777 -i -pe 's/\\(TraceabilityRule \\\\{[^}]*severity: [^,}]+,\\)\\(\\\\s*\\\\}\\)/$1\\\\n alternate_backlinks: vec![],$2/g' rivet-core/src/coverage.rs rivet-core/src/export.rs rivet-core/src/proofs.rs rivet-core/src/lifecycle.rs rivet-core/src/validate.rs)",
+ "Bash(perl -0777 -i -pe 's/\\(TraceabilityRule \\\\{[^}]*severity: [^,}]+,\\)\\(\\\\s*\\\\}\\)/$1\\\\n alternate_backlinks: vec![],$2/g' rivet-core/tests/proptest_operations.rs)",
+ "Bash(node scripts/diff-to-markdown.mjs --diff /tmp/malformed.json --pr 1 --run 1 --repo x/y)",
+ "Bash(node -e \"require\\('typescript'\\).transpileModule\\(require\\('fs'\\).readFileSync\\('rivet-delta.spec.ts','utf8'\\), { compilerOptions: { target: 'es2022', module: 'nodenext' } }\\)\")"
]
}
}
diff --git a/.github/workflows/rivet-delta.yml b/.github/workflows/rivet-delta.yml
new file mode 100644
index 0000000..894de18
--- /dev/null
+++ b/.github/workflows/rivet-delta.yml
@@ -0,0 +1,137 @@
+name: Rivet Delta
+
+# Runs on every PR that touches artifacts, schemas, or `rivet.yaml`.
+# Produces a graphical diff of the Rivet artifact graph between the PR's
+# merge base and the PR head, posts a markdown summary as a PR comment,
+# and uploads the full HTML dashboard + diff JSON as workflow artifacts
+# for deep inspection. The comment is updated in place on subsequent
+# pushes via a hidden marker ().
+#
+# This is informational — the workflow never blocks a merge. If the diff
+# can't be computed (parse errors, missing base, etc.) it posts a
+# warning comment instead of failing.
+
+on:
+ pull_request:
+ paths:
+ - "artifacts/**"
+ - "schemas/**"
+ - "rivet.yaml"
+ - "rivet-core/**"
+ - "rivet-cli/**"
+ - ".github/workflows/rivet-delta.yml"
+ - "scripts/diff-to-markdown.mjs"
+
+permissions:
+ contents: read
+ pull-requests: write
+
+concurrency:
+ group: rivet-delta-${{ github.event.pull_request.number }}
+ cancel-in-progress: true
+
+jobs:
+ delta:
+ name: Artifact delta
+ runs-on: ubuntu-latest
+ # All user-derived fields are captured into env vars at job scope so
+ # no `run:` step interpolates untrusted input into a shell context.
+ # base.sha is a 40-char SHA (safe) but enving everything is the
+ # GitHub-recommended pattern for workflows that accept PR input.
+ env:
+ PR_NUMBER: ${{ github.event.pull_request.number }}
+ BASE_SHA: ${{ github.event.pull_request.base.sha }}
+ RUN_ID: ${{ github.run_id }}
+ REPO: ${{ github.repository }}
+ steps:
+ - name: Checkout head
+ uses: actions/checkout@v6
+ with:
+ path: head
+ fetch-depth: 0
+
+ - name: Checkout base
+ uses: actions/checkout@v6
+ with:
+ path: base
+ ref: ${{ github.event.pull_request.base.sha }}
+ fetch-depth: 1
+
+ - uses: dtolnay/rust-toolchain@stable
+ - uses: Swatinem/rust-cache@v2
+ with:
+ workspaces: head
+
+ - name: Build rivet (release)
+ working-directory: head
+ run: cargo build --release -p rivet-cli
+
+ - name: Run diff (base vs head)
+ working-directory: head
+ continue-on-error: true
+ run: |
+ set -euo pipefail
+ mkdir -p delta-out
+ ./target/release/rivet diff \
+ --base "$GITHUB_WORKSPACE/base" \
+ --head "$GITHUB_WORKSPACE/head" \
+ --format json > delta-out/diff.json 2> delta-out/diff.stderr
+
+ - name: Run impact (since base)
+ working-directory: head
+ continue-on-error: true
+ run: |
+ set -euo pipefail
+ ./target/release/rivet impact \
+ --since "$BASE_SHA" \
+ --depth 5 \
+ --format json > delta-out/impact.json 2> delta-out/impact.stderr || \
+ echo '{"impacts": [], "error": "impact analysis failed"}' > delta-out/impact.json
+
+ - name: Export HTML dashboard for head
+ working-directory: head
+ continue-on-error: true
+ run: |
+ set -euo pipefail
+ ./target/release/rivet export \
+ --format html \
+ --output delta-out/html \
+ --version-label "pr-$PR_NUMBER" \
+ --offline || echo "export failed" > delta-out/export.err
+
+ - name: Generate markdown summary
+ id: summary
+ working-directory: head
+ run: |
+ set -euo pipefail
+ node scripts/diff-to-markdown.mjs \
+ --diff delta-out/diff.json \
+ --impact delta-out/impact.json \
+ --pr "$PR_NUMBER" \
+ --run "$RUN_ID" \
+ --repo "$REPO" \
+ > delta-out/comment.md
+ echo "comment_file=head/delta-out/comment.md" >> "$GITHUB_OUTPUT"
+
+ - name: Upload delta artifacts
+ uses: actions/upload-artifact@v4
+ with:
+ name: rivet-delta-pr-${{ github.event.pull_request.number }}
+ path: head/delta-out/
+ retention-days: 14
+
+ - name: Find previous delta comment
+ id: find_comment
+ uses: peter-evans/find-comment@v3
+ with:
+ issue-number: ${{ github.event.pull_request.number }}
+ comment-author: github-actions[bot]
+ body-includes: ""
+
+ - name: Post or update delta comment
+ uses: peter-evans/create-or-update-comment@v4
+ with:
+ issue-number: ${{ github.event.pull_request.number }}
+ comment-id: ${{ steps.find_comment.outputs.comment-id }}
+ edit-mode: replace
+ body-path: ${{ steps.summary.outputs.comment_file }}
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 4e06ca7..3613887 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -3,6 +3,124 @@
+## [0.4.2] — 2026-04-23
+
+
+
+This release closes 18 silent-accept findings discovered through dogfooding
+plus a customer bug-hunt pass. Theme: every place where invalid input used
+to silently succeed now surfaces a typed error or warning. Most are tiny
+behavioural changes; the cumulative effect is a much louder pipeline.
+
+### Correctness fixes (silent-accept antipattern)
+
+- **Required-link cardinality silently passed on flow-style YAML** —
+ `links: [{type: X, target: Y}]` parsed without error but the cardinality
+ counter saw zero, so a "required" link could be entirely absent and
+ `rivet validate` still returned PASS. Same hole for the named-field form
+ `targets: [SEC-AS-001]` derived from a schema's `link-fields[].name`. Both
+ shapes now produce identical `Vec` and the cardinality counter sees
+ them. (issue #3)
+- **Schema link-fields referencing undeclared link types** were emitted as
+ `Warning` from `rivet validate` (overall result still PASS) and silently
+ tolerated at schema load. Now `Error` with one diagnostic per
+ `(artifact, link-type)` pair, plus a new `Schema::validate_consistency()`
+ for fail-fast load-time checks. (issue #1)
+- **`{{group:TYPE:FIELD}}` two-arg form** discarded the second arg, treating
+ the type name as the field — every artifact bucketed into `"unset"`.
+- **`{{query (...) :limit 10}}`** colon-prefixed options were silently
+ dropped because the parser only recognised `key=value`. Now rejected with
+ a hint pointing to the correct syntax. New `fields=id,title,asil` option
+ customises columns.
+- **`{{coverage:typo-rule}} / {{matrix:UnknownType:Y}} / {{diagnostics:warnings}}`**
+ all rendered blank or all-results when given typo'd arguments. Each now
+ errors with a list of valid values.
+- **Standalone `{{artifact|links|table:…}}` on its own line** wrapped in
+ `
` producing invalid HTML nesting. Block-level embeds now emit
+ directly.
+- **`#[serde(deny_unknown_fields)]`** added to every schema-author struct
+ (`SchemaFile`, `SchemaMetadata`, `ArtifactTypeDef`, `FieldDef`,
+ `LinkFieldDef`, `LinkTypeDef`, `TraceabilityRule`, `ConditionalRule`,
+ `MistakeGuide`, `AlternateBacklink`) plus the artifact-level `Link` and
+ `Provenance` structs. Typo'd YAML keys now error at load time instead of
+ being silently dropped. New `LinkFieldDef.description` and
+ `TraceabilityRule.alternate_backlinks` to surface fields the bundled
+ schemas were already using.
+- **YAML CST parser** now handles inline `# comments` on mapping lines —
+ the LSP previously emitted `expected mapping key, found Some(Comment)`
+ on every CI workflow file. (issue #6b)
+- **`rivet docs check`** now honors `rivet.yaml` `docs:` paths instead of
+ only scanning the top-level `docs/` directory; projects with
+ `crates/*/docs` or `rivet/docs` layouts no longer get silently skipped.
+
+### LSP
+
+- **LSP resolves workspace schemas** — was reading from the launching
+ process's CWD. User-extended schema files referenced via
+ `rivet.yaml: schemas:` now load correctly. (issue #6a)
+
+### Dashboard / UI
+
+- **Artifact detail page** lists the documents that `[[ID]]`-reference it
+ (reverse index — closes the loop on the existing forward `/doc-linkage`
+ view).
+- **Mermaid + AADL diagrams** on artifact detail and `schema/show` pages
+ now wrap in `.svg-viewer` so they get the same zoom / fullscreen / popout
+ toolbar as graph and doc-linkage views. Parity test in
+ `diagram-viewer.spec.ts` pins the invariant.
+- **Document headings** carry stable `id="…"` slugs so in-page TOC links
+ and `#anchor` URLs navigate. (B1)
+- **Variants in the dashboard** are now documented in `getting-started.md`
+ and `what-is-rivet.md`. The auto-discovery convention, sidebar entry,
+ header dropdown and `/variants` overview are spelled out.
+
+### Documentation invariants
+
+- **External-namespace exemption** for `ArtifactIdValidity`. Three layers
+ to escape the `[A-Z]+-NNN`-pattern check when the prose legitimately
+ references external IDs (Jira, Polarion, hazard catalogs):
+ - `rivet.yaml: docs-check.external-namespaces: [GNV, GNR, HZO, UC]`
+ - `rivet.yaml: docs-check.ignore-patterns: []`
+ - HTML-comment directives: ``
+ or ``.
+- **AGENTS.md template** now ships an `ignore SC-1 REQ-001 FEAT-042`
+ directive so a fresh `rivet init && rivet docs check` doesn't fail on
+ its own example IDs. (issue #2)
+- **`AUDIT:` marker syntax** documented for the `ArtifactCounts`
+ invariant.
+- **`conditional-rules:` worked example** in `getting-started.md`.
+- **`` contract** documented for
+ `rivet init --agents`. Content outside the markers is preserved across
+ regeneration.
+
+### CLI
+
+- **`rivet stamp` batch flags**: `--type PATTERN` (glob or exact type),
+ `--changed-since REF` (git-aware), `--missing-provenance`. No more
+ `xargs` loops to stamp a batch of artifacts. (issue #4)
+- **`rivet init --agents --force-regen`** now requires `--yes` to confirm
+ the destructive overwrite. The flag was previously one accidental
+ trigger away from destroying a hand-written AGENTS.md.
+- **`rivet embed artifact:X / links:X / table:T:F`** error message now
+ explains why the embed only renders inside markdown documents instead
+ of the cryptic "handled inline" string.
+
+### Looking ahead — Safety-Critical Rust roadmap
+
+The next planned release will start a workspace-wide clippy lint
+escalation aligned with the Safety-Critical Rust Consortium guidelines:
+`unwrap_used`, `expect_used`, `indexing_slicing`,
+`wildcard_enum_match_arm`, `as_conversions`, `arithmetic_side_effects`,
+and `print_stdout` / `print_stderr` outside the CLI binary. Each lint
+will be enabled at `warn` first with per-site `allow` annotations
+carrying a `// SAFETY-REVIEW:` rationale, then escalated to `deny` once
+the backlog is drained. A later release will raise the `rivet-core`
+coverage gate from 40% → 70% and flip mutation testing to a hard gate.
+
+The eight commits in this release already implement the SCRC pattern
+"no silent acceptance of malformed input" empirically — the lint
+escalation makes the same discipline mechanical.
+
## [0.4.1] — 2026-04-22
### Correctness fixes (HIGH)
diff --git a/Cargo.lock b/Cargo.lock
index 3c15192..a84e880 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -2705,6 +2705,7 @@ dependencies = [
"lsp-types",
"notify",
"petgraph 0.7.1",
+ "regex",
"rivet-core",
"rmcp",
"rowan 0.16.2",
diff --git a/Cargo.toml b/Cargo.toml
index ac3b4fa..9db7269 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -7,7 +7,7 @@ members = [
]
[workspace.package]
-version = "0.4.1"
+version = "0.4.2"
authors = ["PulseEngine "]
edition = "2024"
license = "Apache-2.0"
diff --git a/docs/getting-started.md b/docs/getting-started.md
index 9235e11..9a443ea 100644
--- a/docs/getting-started.md
+++ b/docs/getting-started.md
@@ -332,6 +332,32 @@ incoming link of the specified type from one of the listed `from-types`.
Severity levels: `error` (validation fails), `warning` (reported but passes),
`info` (informational).
+### Conditional rules
+
+`conditional-rules:` apply a `then:` requirement only when a `when:`
+condition matches. Use this when a check should fire for a subset of
+artifacts (e.g. only approved status, only a specific type).
+
+```yaml
+artifact-types:
+ - name: design-decision
+ fields:
+ - { name: approver, type: string }
+ conditional-rules:
+ - name: valid-doc-requires-approver
+ description: Approved decisions must name an approver
+ when:
+ equals:
+ field: status
+ value: approved
+ then:
+ required-fields: [approver]
+ severity: error
+```
+
+Conditions: `equals` (exact value), `field-present` (any non-empty),
+`field-absent`. Requirements: `required-fields`, `required-links`.
+
---
## Link Types
@@ -809,6 +835,36 @@ rivet variant solve --model fm.yaml --variant v.yaml --binding bindings.yaml
rivet validate --model fm.yaml --variant v.yaml --binding bindings.yaml
```
+### Variants in the dashboard
+
+`rivet serve` auto-discovers variant configuration when these files exist
+(under `artifacts/` or any `source.path` from `rivet.yaml`):
+
+- `feature-model.yaml` or `feature_model.yaml`
+- `bindings.yaml` or `feature-bindings.yaml`
+- `variants/*.yaml` — one file per variant
+
+When a feature model is found, the dashboard exposes:
+
+1. **Sidebar "Variants" entry** with a count badge (declared variants).
+2. **Variant selector** in the header — picking one scopes every artifact
+ list, the link graph, and the coverage matrix to artifacts bound to
+ the variant's active features.
+3. **`/variants` overview page** — every declared variant with PASS/FAIL
+ constraint-solver status and the bound artifact set.
+
+If no feature model is present, the dashboard renders normally and
+`/variants` shows a friendly hint on how to create one.
+
+#### Filtering artifacts by variant
+
+In the dashboard via the header dropdown. On the CLI:
+
+```bash
+rivet list --variant eu-adas-c --format json
+rivet coverage --variant eu-adas-c
+```
+
---
## Zola Export
@@ -889,6 +945,75 @@ rivet init --hooks # installs commit-msg + pre-commit hooks
---
+## Documentation gates (`rivet docs check`)
+
+`rivet docs check` runs invariants over the project's markdown to catch
+drift between docs and the code/artifact reality. Eight invariants
+(see `rivet-core/src/doc_check.rs` for the full list), three of which
+have user-visible escape hatches.
+
+### `AUDIT:` marker — accept a numeric claim without a generated source
+
+`ArtifactCounts` flags prose like "42 requirements" unless either
+(a) a `{{stats:...}}` embed produces the number, or (b) an audit
+marker on the same line attests to a manual review:
+
+```markdown
+We currently track 42 requirements .
+```
+
+Use sparingly — the marker is a manual override, not an embed
+substitute. Audit dates are not parsed; the marker only suppresses the
+violation. Best practice: refresh the date when you re-verify.
+
+### `` skip directives
+
+Surgical exemptions for `ArtifactIdValidity` (the "artifact not found
+in store" check). Use these when a doc legitimately references
+external IDs that aren't in the local store:
+
+```markdown
+This requirement traces to GNV-396 .
+
+Or skip every ID on a single line:
+External refs: GNV-396, FOO-20819
+```
+
+For project-wide exemption, prefer `rivet.yaml: docs-check.external-namespaces`:
+
+```yaml
+docs-check:
+ external-namespaces: [GNV, GNR, HZO, UC] # any prefix here is exempt
+ ignore-patterns: ["^TEMP-\\d+$"] # free-form regex escape hatch
+```
+
+### ``
+
+Marks a doc as aspirational — `SubcommandReferences`, `EmbedTokenReferences`,
+and `ArtifactIdValidity` are skipped for that file. Used in `docs/design/*`
+where future-tense planning should not break the gate.
+
+### Re-running `rivet init --agents`
+
+The generated AGENTS.md/CLAUDE.md content is wrapped between
+`` and `` markers.
+Content **outside** those markers is preserved across regeneration —
+the file is never overwritten end-to-end as long as the markers exist.
+
+If you're upgrading a hand-written AGENTS.md to the managed format,
+use `rivet init --agents --migrate` once: it wraps the existing content
+below a fresh managed section without losing anything.
+
+`--force-regen` exists for unrecoverable cases where the markers got
+deleted, but it is **destructive** — it overwrites the entire file. To
+guard against accidental triggering, it now requires `--yes`:
+
+```bash
+rivet init --agents --force-regen --yes # destroys current AGENTS.md
+```
+
+---
+
## Next Steps
- Read the [schema reference](schemas.md) for full details on all built-in schemas
diff --git a/docs/what-is-rivet.md b/docs/what-is-rivet.md
index 26a3bf6..b017f49 100644
--- a/docs/what-is-rivet.md
+++ b/docs/what-is-rivet.md
@@ -175,6 +175,10 @@ reviews*.
`reachable-to` predicates in the evaluator.
- **Human reviews.** Whether the feature-model constraints encode the
real product-line rules.
+- **Dashboard.** When a feature model is in the project, `rivet serve`
+ auto-discovers it and renders a header dropdown plus a `/variants`
+ overview page. Selecting a variant scopes every artifact list, the
+ link graph, and the coverage matrix to that variant's active features.
### 4.6 LLM-assisted code review and authoring
diff --git a/npm/package.json b/npm/package.json
index 8d07ad5..9315a15 100644
--- a/npm/package.json
+++ b/npm/package.json
@@ -1,6 +1,6 @@
{
"name": "@pulseengine/rivet",
- "version": "0.4.1",
+ "version": "0.4.2",
"description": "Rivet — SDLC traceability, validation, and MCP server for safety-critical systems (ISO 26262, DO-178C, ASPICE, STPA)",
"main": "index.js",
"bin": {
diff --git a/rivet-cli/Cargo.toml b/rivet-cli/Cargo.toml
index 4ba71fb..006c34e 100644
--- a/rivet-cli/Cargo.toml
+++ b/rivet-cli/Cargo.toml
@@ -24,6 +24,7 @@ env_logger = { workspace = true }
log = { workspace = true }
serde_yaml = { workspace = true }
serde_json = { workspace = true }
+regex = { workspace = true }
axum = { workspace = true }
tokio = { workspace = true }
tower-http = { workspace = true }
diff --git a/rivet-cli/src/main.rs b/rivet-cli/src/main.rs
index 4ff5690..1895e0f 100644
--- a/rivet-cli/src/main.rs
+++ b/rivet-cli/src/main.rs
@@ -191,10 +191,16 @@ enum Command {
/// With --agents: overwrite existing AGENTS.md/CLAUDE.md even if
/// they have no rivet-managed markers. DESTRUCTIVE — replaces the
- /// whole file. Prefer --migrate when possible.
- #[arg(long, requires = "agents")]
+ /// whole file. Prefer --migrate when possible. Requires --yes to
+ /// confirm; otherwise refused with an error.
+ #[arg(long, requires = "agents", requires = "yes")]
force_regen: bool,
+ /// Confirm a destructive operation (currently required by
+ /// `--force-regen`). Without this flag, `--force-regen` is refused.
+ #[arg(long)]
+ yes: bool,
+
/// Install git hooks (commit-msg, pre-commit) that call rivet for validation
#[arg(long)]
hooks: bool,
@@ -726,7 +732,8 @@ enum Command {
/// Stamp artifact(s) with AI provenance metadata
Stamp {
- /// Artifact ID to stamp (or "all" for all artifacts in a file)
+ /// Artifact ID to stamp, "all" for every artifact, or a glob/prefix
+ /// when combined with --type or --pattern.
id: String,
/// Who created it: "human", "ai", or "ai-assisted"
#[arg(long, default_value = "ai-assisted")]
@@ -740,6 +747,19 @@ enum Command {
/// Human reviewer
#[arg(long)]
reviewed_by: Option,
+ /// Restrict stamping to artifacts whose `type` matches PATTERN.
+ /// Glob form (`SEC-*`) matches IDs by prefix; otherwise treated as
+ /// an exact artifact-type name (`requirement`, `threat-scenario`).
+ #[arg(long, value_name = "PATTERN")]
+ r#type: Option,
+ /// Stamp only artifacts whose source YAML changed since this git ref
+ /// (e.g. `--changed-since HEAD~1` or `--changed-since main`).
+ #[arg(long, value_name = "REF")]
+ changed_since: Option,
+ /// Stamp only artifacts that don't already have a `provenance:` block.
+ /// Combine with `id "all"` for "stamp every unstamped artifact".
+ #[arg(long)]
+ missing_provenance: bool,
},
/// Start the language server (LSP over stdio)
@@ -963,6 +983,7 @@ fn run(cli: Cli) -> Result {
agents,
migrate,
force_regen,
+ yes: _yes,
hooks,
} = &cli.command
{
@@ -1254,6 +1275,9 @@ fn run(cli: Cli) -> Result {
model,
session_id,
reviewed_by,
+ r#type,
+ changed_since,
+ missing_provenance,
} => cmd_stamp(
&cli,
id,
@@ -1261,6 +1285,9 @@ fn run(cli: Cli) -> Result {
model.as_deref(),
session_id.as_deref(),
reviewed_by.as_deref(),
+ r#type.as_deref(),
+ changed_since.as_deref(),
+ *missing_provenance,
),
}
}
@@ -2860,6 +2887,8 @@ fn cmd_init_agents(cli: &Cli, migrate: bool, force_regen: bool) -> Result
let agents_managed = format!(
r#"{sentinel}
+
+
# AGENTS.md — Rivet Project Instructions
> This section was generated by `rivet init --agents`. Re-run the command
@@ -6010,8 +6039,33 @@ fn cmd_docs_check(cli: &Cli, format: &str, fix: bool) -> Result {
let project_root = cli.project.canonicalize().unwrap_or_else(|_| cli.project.clone());
+ // Read project config so the docs scan honors any `docs:` paths from
+ // `rivet.yaml` (e.g. `rivet/docs`, `crates/*/docs`) — otherwise the gate
+ // silently misses every markdown file outside the top-level `docs/`.
+ // Missing or unreadable config degrades to the default `docs/` scan.
+ let project_config = rivet_core::load_project_config(&project_root.join("rivet.yaml")).ok();
+ let extra_doc_dirs: Vec = project_config
+ .as_ref()
+ .map(|c| c.docs.iter().map(std::path::PathBuf::from).collect())
+ .unwrap_or_default();
+ let external_namespaces: Vec = project_config
+ .as_ref()
+ .and_then(|c| c.docs_check.as_ref())
+ .map(|d| d.external_namespaces.clone())
+ .unwrap_or_default();
+ let ignore_patterns: Vec = project_config
+ .as_ref()
+ .and_then(|c| c.docs_check.as_ref())
+ .map(|d| {
+ d.ignore_patterns
+ .iter()
+ .filter_map(|p| regex::Regex::new(p).ok())
+ .collect()
+ })
+ .unwrap_or_default();
+
// 1. Collect docs.
- let docs = collect_docs(&project_root)
+ let docs = collect_docs(&project_root, &extra_doc_dirs)
.with_context(|| format!("scanning docs under {}", project_root.display()))?;
// 2. Build known-subcommand set from clap metadata (keeps check in sync
@@ -6058,6 +6112,8 @@ fn cmd_docs_check(cli: &Cli, format: &str, fix: bool) -> Result {
workspace_version,
store,
ci_yaml: ci_yaml_owned.as_deref(),
+ external_namespaces: &external_namespaces,
+ ignore_patterns: &ignore_patterns,
};
let invariants = default_invariants();
@@ -8465,6 +8521,70 @@ fn cmd_remove(cli: &Cli, id: &str, force: bool) -> Result {
}
/// Stamp an artifact (or all artifacts in its file) with AI provenance metadata.
+/// Minimal glob match supporting `*` (any sequence) and `?` (one char).
+/// Sufficient for `--type SEC-*` style stamping filters; we deliberately
+/// avoid pulling in the `glob` crate for this small need.
+fn glob_matches(pattern: &str, value: &str) -> bool {
+ fn rec(p: &[u8], v: &[u8]) -> bool {
+ match (p.first(), v.first()) {
+ (None, None) => true,
+ (Some(b'*'), _) => {
+ if rec(&p[1..], v) {
+ return true;
+ }
+ if v.is_empty() {
+ return false;
+ }
+ rec(p, &v[1..])
+ }
+ (Some(b'?'), Some(_)) => rec(&p[1..], &v[1..]),
+ (Some(pc), Some(vc)) if pc == vc => rec(&p[1..], &v[1..]),
+ _ => false,
+ }
+ }
+ rec(pattern.as_bytes(), value.as_bytes())
+}
+
+/// Return the relative paths of files changed since `git_ref` (committed
+/// changes plus uncommitted modifications). Empty vec on git error so
+/// `--changed-since` degrades to "no matches" rather than panicking.
+fn files_changed_since(project: &std::path::Path, git_ref: &str) -> Result> {
+ use std::process::Command as Cmd;
+ let mut paths = Vec::new();
+ // Committed changes since ref.
+ if let Ok(out) = Cmd::new("git")
+ .args(["diff", "--name-only", git_ref, "--"])
+ .current_dir(project)
+ .output()
+ {
+ if out.status.success() {
+ for line in String::from_utf8_lossy(&out.stdout).lines() {
+ if !line.is_empty() {
+ paths.push(line.to_string());
+ }
+ }
+ }
+ }
+ // Uncommitted modifications.
+ if let Ok(out) = Cmd::new("git")
+ .args(["status", "--porcelain"])
+ .current_dir(project)
+ .output()
+ {
+ if out.status.success() {
+ for line in String::from_utf8_lossy(&out.stdout).lines() {
+ if let Some(path) = line.get(3..) {
+ paths.push(path.to_string());
+ }
+ }
+ }
+ }
+ paths.sort();
+ paths.dedup();
+ Ok(paths)
+}
+
+#[allow(clippy::too_many_arguments)]
fn cmd_stamp(
cli: &Cli,
id: &str,
@@ -8472,6 +8592,9 @@ fn cmd_stamp(
model: Option<&str>,
session_id: Option<&str>,
reviewed_by: Option<&str>,
+ type_filter: Option<&str>,
+ changed_since: Option<&str>,
+ missing_provenance: bool,
) -> Result {
use rivet_core::mutate;
@@ -8510,24 +8633,75 @@ fn cmd_stamp(
let y = if m <= 2 { y + 1 } else { y };
let timestamp = format!("{y:04}-{m:02}-{d:02}T{hours:02}:{minutes:02}:{seconds:02}Z");
- // Collect artifact IDs to stamp
- let ids: Vec = if id == "all" {
- // Stamp every local artifact (skip externals with ':' prefix)
+ // Collect artifact IDs to stamp.
+ // Filter pipeline (applied in order, each shrinks the candidate set):
+ // 1. id == "all" → every local artifact;
+ // id is glob (contains '*' or '?') → match by id;
+ // id is plain → exact ID match.
+ // 2. --type pattern: glob on id (`SEC-*`) or exact artifact-type name.
+ // 3. --changed-since : artifact's source file changed in git.
+ // 4. --missing-provenance: artifact has no provenance: block.
+ let mut ids: Vec = if id == "all" || id.contains('*') || id.contains('?') {
+ let pattern = if id == "all" { "*" } else { id };
store
.iter()
.filter(|a| !a.id.contains(':'))
+ .filter(|a| glob_matches(pattern, &a.id))
.map(|a| a.id.clone())
.collect()
} else {
- // Single artifact
if !store.contains(id) {
anyhow::bail!("artifact '{id}' does not exist");
}
vec![id.to_string()]
};
+ if let Some(pat) = type_filter {
+ ids.retain(|aid| {
+ let Some(art) = store.get(aid) else {
+ return false;
+ };
+ // Glob form on ID, otherwise exact artifact-type name.
+ if pat.contains('*') || pat.contains('?') {
+ glob_matches(pat, &art.id)
+ } else {
+ art.artifact_type == pat
+ }
+ });
+ }
+
+ if let Some(git_ref) = changed_since {
+ let changed = files_changed_since(&cli.project, git_ref)?;
+ ids.retain(|aid| {
+ let Some(art) = store.get(aid) else {
+ return false;
+ };
+ let Some(sf) = art.source_file.as_ref() else {
+ return false;
+ };
+ let sf_str = sf.to_string_lossy();
+ changed
+ .iter()
+ .any(|c| sf_str.ends_with(c.as_str()) || c.ends_with(sf_str.as_ref()))
+ });
+ }
+
+ if missing_provenance {
+ ids.retain(|aid| {
+ store
+ .get(aid)
+ .and_then(|a| a.fields.get("provenance"))
+ .is_none()
+ });
+ }
+
if ids.is_empty() {
- anyhow::bail!("no artifacts found to stamp");
+ anyhow::bail!(
+ "no artifacts found to stamp (after filters: type={:?}, changed_since={:?}, missing_provenance={})",
+ type_filter,
+ changed_since,
+ missing_provenance
+ );
}
let mut stamped = 0;
@@ -9086,7 +9260,23 @@ fn cmd_lsp(cli: &Cli) -> Result {
// Initialize salsa database for incremental computation
let mut db = RivetDatabase::new();
let config_path = project_dir.join("rivet.yaml");
- let schemas_dir = resolve_schemas_dir(cli);
+ // Resolve schemas relative to the workspace `project_dir` (from
+ // root_uri), NOT `cli.project`, which is whatever directory the LSP
+ // process happened to be launched from. Without this override an LSP
+ // started from a different cwd would never see the workspace's
+ // user-defined schemas (e.g. schemas/ulinc.yaml), reporting
+ // "unknown artifact type" for every artifact that uses them.
+ let schemas_dir = if let Some(explicit) = &cli.schemas {
+ explicit.clone()
+ } else {
+ let workspace_schemas = project_dir.join("schemas");
+ if workspace_schemas.exists() {
+ workspace_schemas
+ } else {
+ resolve_schemas_dir(cli)
+ }
+ };
+ eprintln!("rivet lsp: schemas dir: {}", schemas_dir.display());
let config_opt = if config_path.exists() {
match rivet_core::load_project_config(&config_path) {
@@ -10281,6 +10471,41 @@ fn print_diagnostics(diagnostics: &[validate::Diagnostic]) {
// ── LSP unit tests ─────────────────────────────────────────────────────
+#[cfg(test)]
+mod stamp_glob_tests {
+ use super::glob_matches;
+
+ #[test]
+ fn glob_star_matches_prefix() {
+ assert!(glob_matches("SEC-*", "SEC-AS-001"));
+ assert!(glob_matches("SEC-*", "SEC-1"));
+ }
+
+ #[test]
+ fn glob_star_does_not_match_other_prefix() {
+ assert!(!glob_matches("SEC-*", "REQ-001"));
+ assert!(!glob_matches("SEC-*", "SECURITY-001"));
+ }
+
+ #[test]
+ fn glob_question_matches_single_char() {
+ assert!(glob_matches("REQ-???", "REQ-001"));
+ assert!(!glob_matches("REQ-???", "REQ-1234"));
+ }
+
+ #[test]
+ fn glob_exact_no_wildcards() {
+ assert!(glob_matches("REQ-001", "REQ-001"));
+ assert!(!glob_matches("REQ-001", "REQ-002"));
+ }
+
+ #[test]
+ fn glob_star_only_matches_anything() {
+ assert!(glob_matches("*", "REQ-001"));
+ assert!(glob_matches("*", ""));
+ }
+}
+
#[cfg(test)]
mod lsp_tests {
use super::*;
diff --git a/rivet-cli/src/render/artifacts.rs b/rivet-cli/src/render/artifacts.rs
index 77d00ab..342ccfe 100644
--- a/rivet-cli/src/render/artifacts.rs
+++ b/rivet-cli/src/render/artifacts.rs
@@ -423,10 +423,21 @@ pub(crate) fn render_artifact_detail(ctx: &RenderContext, id: &str) -> RenderRes
}
html.push_str("");
- // Diagram field — render mermaid or AADL diagram if present
+ // Diagram field — render mermaid or AADL diagram if present.
+ // Wraps in .svg-viewer so the toolbar (zoom-fit / fullscreen / popout)
+ // applies uniformly to artifact diagrams, graph views, and doc-linkage —
+ // same visual language regardless of where the diagram is shown.
if let Some(serde_yaml::Value::String(diagram)) = artifact.fields.get("diagram") {
html.push_str("
"); // .svg-viewer
+ html.push_str(""); // .card
}
// ── Example YAML ─────────────────────────────────────────────────────
diff --git a/rivet-cli/src/serve/variant.rs b/rivet-cli/src/serve/variant.rs
index b9e26ab..528d108 100644
--- a/rivet-cli/src/serve/variant.rs
+++ b/rivet-cli/src/serve/variant.rs
@@ -277,6 +277,7 @@ mod tests {
commits: None,
externals: None,
baselines: None,
+ docs_check: None,
}
}
diff --git a/rivet-cli/tests/init_integration.rs b/rivet-cli/tests/init_integration.rs
index 77e556e..d5d92c9 100644
--- a/rivet-cli/tests/init_integration.rs
+++ b/rivet-cli/tests/init_integration.rs
@@ -192,10 +192,10 @@ fn agents_md_force_regen_overwrites_no_markers() {
let seed = "# OLD HAND AUTHORED CONTENT\n\nOLD_SENTINEL_DELETE_ME\n";
std::fs::write(&agents, seed).unwrap();
- let out = run_init_agents(&dir, &["--force-regen"]);
+ let out = run_init_agents(&dir, &["--force-regen", "--yes"]);
assert!(
out.status.success(),
- "--force-regen must succeed. stderr: {}",
+ "--force-regen --yes must succeed. stderr: {}",
String::from_utf8_lossy(&out.stderr)
);
let stderr = String::from_utf8_lossy(&out.stderr);
diff --git a/rivet-core/src/coverage.rs b/rivet-core/src/coverage.rs
index 60cb6b9..1e7a8ac 100644
--- a/rivet-core/src/coverage.rs
+++ b/rivet-core/src/coverage.rs
@@ -180,6 +180,7 @@ mod tests {
target_types: vec![],
from_types: vec!["design-decision".into()],
severity: Severity::Warning,
+ alternate_backlinks: vec![],
},
TraceabilityRule {
name: "dd-justification".into(),
@@ -190,6 +191,7 @@ mod tests {
target_types: vec!["requirement".into()],
from_types: vec![],
severity: Severity::Error,
+ alternate_backlinks: vec![],
},
];
Schema::merge(&[file])
@@ -314,6 +316,7 @@ mod tests {
target_types: vec![], // match any — makes the self-link trap reachable
from_types: vec![],
severity: Severity::Error,
+ alternate_backlinks: vec![],
}];
let schema = Schema::merge(&[file]);
@@ -356,6 +359,7 @@ mod tests {
target_types: vec![],
from_types: vec![], // match any
severity: Severity::Warning,
+ alternate_backlinks: vec![],
}];
let schema = Schema::merge(&[file]);
diff --git a/rivet-core/src/doc_check.rs b/rivet-core/src/doc_check.rs
index c80c2c9..2420946 100644
--- a/rivet-core/src/doc_check.rs
+++ b/rivet-core/src/doc_check.rs
@@ -123,6 +123,13 @@ pub struct DocCheckContext<'a> {
pub store: Option<&'a Store>,
/// Contents of `.github/workflows/ci.yml` if present.
pub ci_yaml: Option<&'a str>,
+ /// External-namespace prefixes (e.g. "GNV", "JIRA") that exempt
+ /// matching IDs from the ArtifactIdValidity invariant. Sourced from
+ /// `rivet.yaml: docs-check.external-namespaces`.
+ pub external_namespaces: &'a [String],
+ /// Pre-compiled regex patterns from `docs-check.ignore-patterns`.
+ /// Any ID match that satisfies one of these is skipped.
+ pub ignore_patterns: &'a [regex::Regex],
}
/// One invariant.
@@ -136,8 +143,16 @@ pub trait DocInvariant {
// ────────────────────────────────────────────────────────────────────────
/// Collect candidate doc files: `README.md`, `CHANGELOG.md`, `AGENTS.md`,
-/// and every `*.md` under `docs/`.
-pub fn collect_docs(project_root: &Path) -> std::io::Result> {
+/// `CLAUDE.md` at the project root, every `*.md` under `docs/`, and every
+/// `*.md` under the `extra_dirs` passed by the caller (typically the
+/// project's `rivet.yaml` `docs:` list — e.g. `rivet/docs`, `crates/*/docs`).
+/// Paths in `extra_dirs` may be absolute or relative to `project_root`.
+///
+/// De-dupes by relative path so overlapping roots don't add a doc twice.
+pub fn collect_docs(
+ project_root: &Path,
+ extra_dirs: &[PathBuf],
+) -> std::io::Result> {
let mut out = Vec::new();
for top in ["README.md", "CHANGELOG.md", "AGENTS.md", "CLAUDE.md"] {
@@ -148,11 +163,34 @@ pub fn collect_docs(project_root: &Path) -> std::io::Result> {
}
}
- let docs_dir = project_root.join("docs");
- if docs_dir.is_dir() {
- walk_md(&docs_dir, project_root, &mut out)?;
+ let mut walked: std::collections::BTreeSet =
+ std::collections::BTreeSet::new();
+ let mut walk_once = |dir: PathBuf, out: &mut Vec| -> std::io::Result<()> {
+ if !dir.is_dir() {
+ return Ok(());
+ }
+ let canonical = dir.canonicalize().unwrap_or_else(|_| dir.clone());
+ if !walked.insert(canonical) {
+ return Ok(());
+ }
+ walk_md(&dir, project_root, out)
+ };
+
+ walk_once(project_root.join("docs"), &mut out)?;
+ for extra in extra_dirs {
+ let resolved = if extra.is_absolute() {
+ extra.clone()
+ } else {
+ project_root.join(extra)
+ };
+ walk_once(resolved, &mut out)?;
}
+ // Final de-dupe by rel_path in case a doc was reachable via both the
+ // default `docs/` and a configured extra that points at the same tree.
+ out.sort_by(|a, b| a.rel_path.cmp(&b.rel_path));
+ out.dedup_by(|a, b| a.rel_path == b.rel_path);
+
Ok(out)
}
@@ -515,7 +553,13 @@ impl DocInvariant for VersionConsistency {
// and unrelated semver (e.g. dependency numbers).
let raw = m.as_str();
let is_v_prefixed = raw.starts_with('v');
- let ctx_start = m.start().saturating_sub(32);
+ // Walk backwards to a char boundary to avoid panicking
+ // when the 32-byte window lands inside a multibyte
+ // character (e.g. an em-dash in a heading).
+ let mut ctx_start = m.start().saturating_sub(32);
+ while ctx_start > 0 && !doc.content.is_char_boundary(ctx_start) {
+ ctx_start -= 1;
+ }
let window = &doc.content[ctx_start..m.end()];
let is_version_context = window.to_ascii_lowercase().contains("version");
if !(is_v_prefixed || is_version_context) {
@@ -838,6 +882,12 @@ impl DocInvariant for ArtifactIdValidity {
// Collect IDs that live in the YAML front-matter block at the
// top of the file — those are *document* IDs, not artifact IDs.
let frontmatter_ids = collect_frontmatter_ids(&doc.content);
+ // Per-line skip set sourced from HTML-comment directives:
+ //
+ //
+ // The first form skips the named ID anywhere in the doc; the
+ // second skips every ID on the same line as the directive.
+ let (ignored_ids, ignored_lines) = collect_skip_directives(&doc.content);
let mut seen: BTreeMap = BTreeMap::new();
for cap in re.captures_iter(&doc.content) {
let m = cap.get(0).unwrap();
@@ -854,8 +904,28 @@ impl DocInvariant for ArtifactIdValidity {
if store.contains(&id) {
continue;
}
- // De-dupe per-file to avoid noisy output.
+ // External-namespace exemption (rivet.yaml docs-check.external-namespaces).
+ let prefix = id.split('-').next().unwrap_or("");
+ if ctx
+ .external_namespaces
+ .iter()
+ .any(|n| n.eq_ignore_ascii_case(prefix))
+ {
+ continue;
+ }
+ // Free-form regex exemption.
+ if ctx.ignore_patterns.iter().any(|re| re.is_match(&id)) {
+ continue;
+ }
+ // HTML-comment directives.
+ if ignored_ids.contains(&id) {
+ continue;
+ }
let line = line_for_offset(&doc.content, m.start());
+ if ignored_lines.contains(&line) {
+ continue;
+ }
+ // De-dupe per-file to avoid noisy output.
if seen.insert(id.clone(), line).is_some() {
continue;
}
@@ -873,6 +943,32 @@ impl DocInvariant for ArtifactIdValidity {
}
}
+/// Parse `` directives in a doc body.
+/// Returns (set-of-ignored-IDs, set-of-ignored-line-numbers).
+fn collect_skip_directives(content: &str) -> (BTreeSet, BTreeSet) {
+ let mut ids = BTreeSet::new();
+ let mut lines = BTreeSet::new();
+ let re = regex::Regex::new(
+ r"",
+ )
+ .unwrap();
+ for cap in re.captures_iter(content) {
+ let m = cap.get(0).unwrap();
+ let line = line_for_offset(content, m.start());
+ let directive = cap.get(1).map(|x| x.as_str().trim()).unwrap_or("");
+ if let Some(rest) = directive.strip_prefix("ignore-line") {
+ // Optional `ignore-line N` to skip a specific other line; default = same line.
+ let target = rest.trim().parse::().ok().unwrap_or(line);
+ lines.insert(target);
+ } else if let Some(rest) = directive.strip_prefix("ignore ") {
+ for token in rest.split([' ', ',']).filter(|s| !s.is_empty()) {
+ ids.insert(token.to_string());
+ }
+ }
+ }
+ (ids, lines)
+}
+
/// True for IDs that look like artifact IDs but refer to external
/// standards, encodings, or advisories.
fn is_non_artifact_id(id: &str) -> bool {
@@ -996,6 +1092,8 @@ mod tests {
workspace_version: version,
store: None,
ci_yaml: None,
+ external_namespaces: &[],
+ ignore_patterns: &[],
}
}
@@ -1210,6 +1308,8 @@ jobs:
workspace_version: "0.4.0",
store: None,
ci_yaml: Some(ci),
+ external_namespaces: &[],
+ ignore_patterns: &[],
};
let v = SoftGateHonesty.check(&ctx);
assert_eq!(v.len(), 1);
@@ -1238,6 +1338,8 @@ jobs:
workspace_version: "0.4.0",
store: None,
ci_yaml: Some(ci),
+ external_namespaces: &[],
+ ignore_patterns: &[],
};
let v = SoftGateHonesty.check(&ctx);
assert!(v.is_empty(), "got: {v:?}");
@@ -1287,12 +1389,80 @@ jobs:
workspace_version: "0.4.0",
store: Some(&store),
ci_yaml: None,
+ external_namespaces: &[],
+ ignore_patterns: &[],
};
let v = ArtifactIdValidity.check(&ctx);
assert_eq!(v.len(), 1);
assert_eq!(v[0].claim, "REQ-999");
}
+ #[test]
+ fn artifact_id_validity_honors_external_namespaces_config() {
+ // rivet.yaml docs-check.external-namespaces: [GNV, GNR, HZO, UC]
+ // exempts those Jira/Polarion/hazard IDs from "artifact not found".
+ let store = Store::new();
+ let docs = vec![doc(
+ "docs/stakereqs.md",
+ "Traces to GNV-396, GNR-968, HZO-189, and UC-1.",
+ )];
+ let subs = BTreeSet::new();
+ let embeds = BTreeSet::new();
+ let exempted = vec![
+ "GNV".to_string(),
+ "GNR".to_string(),
+ "HZO".to_string(),
+ "UC".to_string(),
+ ];
+ let ctx = DocCheckContext {
+ project_root: Path::new("."),
+ docs: &docs,
+ known_subcommands: &subs,
+ known_embeds: &embeds,
+ workspace_version: "0.4.0",
+ store: Some(&store),
+ ci_yaml: None,
+ external_namespaces: &exempted,
+ ignore_patterns: &[],
+ };
+ let v = ArtifactIdValidity.check(&ctx);
+ assert!(v.is_empty(), "external IDs should be exempted: {v:?}");
+ }
+
+ #[test]
+ fn artifact_id_validity_honors_html_comment_skip_directive() {
+ // exempts a specific ID.
+ // exempts every ID on the
+ // same line as the directive.
+ let store = Store::new();
+ let docs = vec![doc(
+ "docs/x.md",
+ "Mention REQ-WAT-1. \n\
+ Other line: REQ-WAT-2 \n\
+ Still flagged: REQ-WAT-3.",
+ )];
+ let subs = BTreeSet::new();
+ let embeds = BTreeSet::new();
+ let ctx = DocCheckContext {
+ project_root: Path::new("."),
+ docs: &docs,
+ known_subcommands: &subs,
+ known_embeds: &embeds,
+ workspace_version: "0.4.0",
+ store: Some(&store),
+ ci_yaml: None,
+ external_namespaces: &[],
+ ignore_patterns: &[],
+ };
+ let v = ArtifactIdValidity.check(&ctx);
+ let claims: Vec<&str> = v.iter().map(|x| x.claim.as_str()).collect();
+ assert_eq!(
+ claims,
+ vec!["REQ-WAT-3"],
+ "only the un-skipped ID should be flagged: {v:?}",
+ );
+ }
+
#[test]
fn artifact_id_validity_ignores_external_id_schemes() {
let store = Store::new();
@@ -1310,6 +1480,8 @@ jobs:
workspace_version: "0.4.0",
store: Some(&store),
ci_yaml: None,
+ external_namespaces: &[],
+ ignore_patterns: &[],
};
let v = ArtifactIdValidity.check(&ctx);
assert!(v.is_empty(), "got: {v:?}");
@@ -1335,6 +1507,8 @@ jobs:
workspace_version: "0.4.0",
store: Some(&store),
ci_yaml: None,
+ external_namespaces: &[],
+ ignore_patterns: &[],
};
let v = ArtifactIdValidity.check(&ctx);
assert_eq!(v.len(), 1);
@@ -1358,6 +1532,8 @@ jobs:
workspace_version: "0.4.0",
store: Some(&store),
ci_yaml: None,
+ external_namespaces: &[],
+ ignore_patterns: &[],
};
let v = ArtifactIdValidity.check(&ctx);
assert!(v.is_empty(), "got: {v:?}");
diff --git a/rivet-core/src/document.rs b/rivet-core/src/document.rs
index af35fc9..5b1f843 100644
--- a/rivet-core/src/document.rs
+++ b/rivet-core/src/document.rs
@@ -487,15 +487,22 @@ pub fn render_to_html(
html.push_str("\n");
in_blockquote = false;
}
- let text = &trimmed[level as usize + 1..];
+ let raw_text = &trimmed[level as usize + 1..];
let text = resolve_inline(
- text,
+ raw_text,
&artifact_exists,
&artifact_info,
&document_exists,
&embed_resolver,
);
- html.push_str(&format!("{text}\n"));
+ // Slugify the heading text so in-page TOC links and external
+ // anchors (#section-name) actually navigate. Strip embedded
+ // HTML the inline resolver injected so anchors stay stable
+ // across embed-content changes.
+ let slug = slugify_heading(raw_text);
+ html.push_str(&format!(
+ "{text}\n"
+ ));
continue;
}
@@ -664,6 +671,47 @@ pub fn render_to_html(
continue;
}
+ // Standalone block-level embed — a line that is *entirely* a single
+ // `{{...}}` token emits block HTML (
,
, etc.) and must
+ // NOT be wrapped in
, which produces invalid nesting. Falls back
+ // to inline handling if the line also contains other text.
+ if trimmed.starts_with("{{")
+ && trimmed.ends_with("}}")
+ && trimmed.matches("{{").count() == 1
+ && trimmed.matches("}}").count() == 1
+ {
+ if in_paragraph {
+ html.push_str("
\n");
+ in_table = false;
+ table_header_done = false;
+ }
+ if in_blockquote {
+ html.push_str("\n");
+ in_blockquote = false;
+ }
+ html.push_str(&resolve_inline(
+ trimmed,
+ &artifact_exists,
+ &artifact_info,
+ &document_exists,
+ &embed_resolver,
+ ));
+ html.push('\n');
+ continue;
+ }
+
// Regular text → paragraph
if in_list {
html.push_str("\n");
@@ -967,6 +1015,43 @@ pub fn html_escape(s: &str) -> String {
.replace('"', """)
}
+/// Convert a markdown heading text to a stable URL slug.
+///
+/// Used as the `id=` attribute on `` elements so in-page TOC links
+/// (`[Section](#section-name)`) and external `#anchor` URLs actually
+/// navigate. Strips embedded HTML (the inline resolver may have
+/// substituted artifact cards), lowercases, replaces non-alphanumerics
+/// with hyphens, and collapses runs.
+pub fn slugify_heading(raw: &str) -> String {
+ // Strip any HTML tags the inline resolver may have left in the text.
+ let mut without_tags = String::with_capacity(raw.len());
+ let mut in_tag = false;
+ for ch in raw.chars() {
+ match (ch, in_tag) {
+ ('<', _) => in_tag = true,
+ ('>', true) => in_tag = false,
+ (_, false) => without_tags.push(ch),
+ _ => {}
+ }
+ }
+ let lowered = without_tags.to_lowercase();
+ let mut slug = String::with_capacity(lowered.len());
+ let mut prev_dash = true; // suppress leading hyphens
+ for ch in lowered.chars() {
+ if ch.is_ascii_alphanumeric() {
+ slug.push(ch);
+ prev_dash = false;
+ } else if !prev_dash {
+ slug.push('-');
+ prev_dash = true;
+ }
+ }
+ while slug.ends_with('-') {
+ slug.pop();
+ }
+ slug
+}
+
// ---------------------------------------------------------------------------
// Rich embed rendering helpers
// ---------------------------------------------------------------------------
@@ -1667,9 +1752,10 @@ See frontmatter.
fn render_html_headings() {
let doc = parse_document(SAMPLE_DOC, None).unwrap();
let html = render_to_html(&doc, |_| true, |_| None, |_| false, noop_embed);
- assert!(html.contains("
"));
- assert!(html.contains("
"));
- assert!(html.contains("
"));
+ // Headings now carry id="..." for TOC anchors — match the open tag.
+ assert!(html.contains("
"), "got: {html}");
+ assert!(html.contains("
"), "got: {html}");
+ assert!(
+ html.contains("
"),
+ "got: {html}",
+ );
+ }
+
+ #[test]
+ fn slugify_heading_handles_edge_cases() {
+ use super::slugify_heading;
+ assert_eq!(slugify_heading("Top Level"), "top-level");
+ assert_eq!(slugify_heading(" spaces "), "spaces");
+ assert_eq!(slugify_heading("Multi --- dashes"), "multi-dashes");
+ assert_eq!(slugify_heading("with `code` span"), "with-code-span");
+ assert_eq!(slugify_heading("Section 1.2.3"), "section-1-2-3");
+ assert_eq!(slugify_heading(""), "");
+ }
+
+ // rivet: verifies REQ-033
+ #[test]
+ fn standalone_embed_line_not_wrapped_in_paragraph() {
+ // Regression guard: when `{{table:...}}`, `{{artifact:ID}}`, or
+ // `{{links:ID}}` appears on its own line, the resolved block-level
+ // HTML must emit directly — NOT inside a
, which produces
+ // invalid nesting like
...
.
+ let content =
+ "---\nid: DOC-BE\ntitle: Blocks\n---\n{{artifact:REQ-001}}\n\n{{links:REQ-001}}\n";
+ let doc = parse_document(content, None).unwrap();
+ let html = render_to_html(&doc, |_| true, rich_info_fn, |_| false, noop_embed);
+ assert!(
+ !html.contains("
: {html}"
+ );
+ assert!(
+ !html.contains("
: {html}"
+ );
+ }
+
// rivet: verifies REQ-033
#[test]
fn unknown_modifier_falls_back_to_default() {
diff --git a/rivet-core/src/embed.rs b/rivet-core/src/embed.rs
index bf4af77..ec7eba9 100644
--- a/rivet-core/src/embed.rs
+++ b/rivet-core/src/embed.rs
@@ -265,6 +265,16 @@ impl EmbedRequest {
for token in tail.split_whitespace() {
if let Some((key, val)) = token.split_once('=') {
options.insert(key.to_string(), val.to_string());
+ } else {
+ // Reject colon-prefixed syntax and other non-`key=value`
+ // tokens so they don't get silently dropped (SC-EMBED-3).
+ return Err(EmbedError {
+ kind: EmbedErrorKind::MalformedSyntax(format!(
+ "unrecognized option `{token}` — use `key=value` form \
+ (e.g. `limit=10`, not `:limit 10`)"
+ )),
+ raw_text: input.to_string(),
+ });
}
}
return Ok(EmbedRequest {
@@ -334,17 +344,27 @@ impl EmbedRequest {
pub fn resolve_embed(request: &EmbedRequest, ctx: &EmbedContext<'_>) -> Result {
match request.name.as_str() {
"stats" => Ok(render_stats(request, ctx)),
- "coverage" => Ok(render_coverage(request, ctx)),
- "diagnostics" => Ok(render_diagnostics(request, ctx)),
- "matrix" => Ok(render_matrix(request, ctx)),
+ "coverage" => render_coverage(request, ctx),
+ "diagnostics" => render_diagnostics(request, ctx),
+ "matrix" => render_matrix(request, ctx),
"query" => render_query(request, ctx),
"group" => render_group(request, ctx),
- // Legacy embeds (artifact, links, table) are still handled by
- // resolve_inline in document.rs — they should never reach here.
+ // Legacy embeds (artifact, links, table) are rendered by
+ // `resolve_inline` while a markdown document is being processed —
+ // not by this top-level resolver. So `rivet embed table:foo:bar`
+ // can't produce a card on its own; it only renders correctly when
+ // the token appears inside a doc that runs through the markdown
+ // pipeline (rivet serve, rivet export --format html, embeds in
+ // rendered prose). Inform the caller plainly so they don't waste
+ // time chasing the empty result.
"artifact" | "links" | "table" => Err(EmbedError {
- kind: EmbedErrorKind::MalformedSyntax(
- "artifact/links/table embeds are handled inline".into(),
- ),
+ kind: EmbedErrorKind::MalformedSyntax(format!(
+ "{} embed renders inside markdown documents (rivet serve / \
+ rivet export --format html). The CLI `rivet embed` command \
+ can't render it standalone — embed it in a doc and view \
+ the rendered output instead.",
+ request.name
+ )),
raw_text: format!("{request:?}"),
}),
other => Err(EmbedError {
@@ -544,10 +564,36 @@ fn severity_rank(s: crate::schema::Severity) -> u8 {
// ── Coverage renderer ───────────────────────────────────────────────────
/// Render `{{coverage}}` or `{{coverage:RULE_NAME}}`.
-fn render_coverage(request: &EmbedRequest, ctx: &EmbedContext<'_>) -> String {
+fn render_coverage(
+ request: &EmbedRequest,
+ ctx: &EmbedContext<'_>,
+) -> Result {
let report = coverage::compute_coverage(ctx.store, ctx.schema, ctx.graph);
let filter_rule = request.args.first().map(|s| s.as_str());
+ // If the user named a specific rule, verify it exists in the report
+ // before silently returning an empty table. A typo'd rule name used
+ // to render as "no coverage rules defined" — indistinguishable from
+ // a project that genuinely has no rules.
+ if let Some(name) = filter_rule {
+ let exists = report.entries.iter().any(|e| e.rule_name == name);
+ if !exists {
+ let known: Vec<&str> =
+ report.entries.iter().map(|e| e.rule_name.as_str()).collect();
+ let hint = if known.is_empty() {
+ "no traceability rules are defined in the loaded schemas".to_string()
+ } else {
+ format!("known rules: {}", known.join(", "))
+ };
+ return Err(EmbedError {
+ kind: EmbedErrorKind::MalformedSyntax(format!(
+ "coverage rule '{name}' not found — {hint}"
+ )),
+ raw_text: format!("{request:?}"),
+ });
+ }
+ }
+
let entries: Vec<_> = report
.entries
.iter()
@@ -555,7 +601,7 @@ fn render_coverage(request: &EmbedRequest, ctx: &EmbedContext<'_>) -> String {
.collect();
if entries.is_empty() {
- return "
No coverage rules defined.
\n".to_string();
+ return Ok("
No coverage rules defined.
\n".to_string());
}
let mut html = String::from(
@@ -610,7 +656,7 @@ fn render_coverage(request: &EmbedRequest, ctx: &EmbedContext<'_>) -> String {
}
html.push_str("\n");
- html
+ Ok(html)
}
// ── Diagnostics renderer ────────────────────────────────────────────────
@@ -618,27 +664,51 @@ fn render_coverage(request: &EmbedRequest, ctx: &EmbedContext<'_>) -> String {
/// Render `{{diagnostics}}` or `{{diagnostics:SEVERITY}}`.
///
/// Without args: all diagnostics. With severity arg: filtered by severity.
-fn render_diagnostics(request: &EmbedRequest, ctx: &EmbedContext<'_>) -> String {
+/// Unknown severity strings are rejected (regression guard for v0.4.1
+/// silent-accept where `{{diagnostics:warnings}}` returned everything).
+fn render_diagnostics(
+ request: &EmbedRequest,
+ ctx: &EmbedContext<'_>,
+) -> Result {
use crate::schema::Severity;
let filter_severity = request.args.first().map(|s| s.as_str());
+ if let Some(sev) = filter_severity {
+ if !matches!(sev, "error" | "warning" | "info") {
+ return Err(EmbedError {
+ kind: EmbedErrorKind::MalformedSyntax(format!(
+ "diagnostics severity '{sev}' is not recognised — \
+ use 'error', 'warning', or 'info'"
+ )),
+ raw_text: format!("{request:?}"),
+ });
+ }
+ }
let filtered: Vec<_> = ctx
.diagnostics
.iter()
.filter(|d| match filter_severity {
+ // The early validation pass at the top of render_diagnostics
+ // rejects unknown severities, so we know `filter_severity` is
+ // either None or one of the three supported strings here.
Some("error") => d.severity == Severity::Error,
Some("warning") => d.severity == Severity::Warning,
Some("info") => d.severity == Severity::Info,
- _ => true,
+ None => true,
+ // Defensive: any other value would have been rejected upstream.
+ // If this arm fires, there's a contract bug — fail loudly.
+ Some(other) => unreachable!(
+ "render_diagnostics severity filter '{other}' should have been rejected upstream",
+ ),
})
.collect();
if filtered.is_empty() {
let scope = filter_severity.unwrap_or("any");
- return format!(
+ return Ok(format!(
"
No diagnostics ({scope} severity).
\n"
- );
+ ));
}
let mut html = String::from(
@@ -702,7 +772,7 @@ fn render_diagnostics(request: &EmbedRequest, ctx: &EmbedContext<'_>) -> String
);
html.push_str("\n");
- html
+ Ok(html)
}
// ── Matrix renderer ─────────────────────────────────────────────────────
@@ -711,10 +781,45 @@ fn render_diagnostics(request: &EmbedRequest, ctx: &EmbedContext<'_>) -> String
///
/// Without args: renders one matrix per traceability rule in the schema.
/// With args: renders a specific matrix for the given source→target types.
-fn render_matrix(request: &EmbedRequest, ctx: &EmbedContext<'_>) -> String {
+/// Unknown artifact-type names are rejected so a typo no longer renders
+/// a silent blank table.
+fn render_matrix(
+ request: &EmbedRequest,
+ ctx: &EmbedContext<'_>,
+) -> Result {
let from_type = request.args.first().map(|s| s.as_str());
let to_type = request.args.get(1).map(|s| s.as_str());
+ // Validate explicit type names against the loaded schema before
+ // rendering anything — silent acceptance of an unknown type used to
+ // render an empty matrix indistinguishable from "rule applies but
+ // nothing covered yet". The user couldn't tell their typo from a
+ // genuine coverage gap.
+ for (label, maybe_name) in [("from", from_type), ("to", to_type)] {
+ if let Some(name) = maybe_name {
+ if !ctx.schema.artifact_types.contains_key(name) {
+ let mut known: Vec<&str> = ctx
+ .schema
+ .artifact_types
+ .keys()
+ .map(String::as_str)
+ .collect();
+ known.sort();
+ let hint = if known.is_empty() {
+ "no artifact types are loaded".to_string()
+ } else {
+ format!("known: {}", known.join(", "))
+ };
+ return Err(EmbedError {
+ kind: EmbedErrorKind::MalformedSyntax(format!(
+ "matrix {label}-type '{name}' is not a known artifact type — {hint}"
+ )),
+ raw_text: format!("{request:?}"),
+ });
+ }
+ }
+ }
+
let mut html = String::from("
\n");
- html
+ Ok(html)
}
/// Render a single traceability matrix as an HTML table.
@@ -919,6 +1024,22 @@ fn render_query(request: &EmbedRequest, ctx: &EmbedContext<'_>) -> Result = request
+ .options
+ .get("fields")
+ .map(|s| {
+ s.split(',')
+ .map(|p| p.trim().to_string())
+ .filter(|p| !p.is_empty())
+ .collect()
+ })
+ .filter(|v: &Vec| !v.is_empty())
+ .unwrap_or_else(|| DEFAULT_FIELDS.iter().map(|s| s.to_string()).collect());
+
let mut matches: Vec<&crate::model::Artifact> = Vec::new();
let mut total = 0usize;
for artifact in ctx.store.iter() {
@@ -937,21 +1058,29 @@ fn render_query(request: &EmbedRequest, ctx: &EmbedContext<'_>) -> Result
\
-
ID
Type
Title
Status
\
-
\n",
- );
- for a in &matches {
- let _ = writeln!(
+ html.push_str("
");
+ for f in &fields {
+ let _ = write!(
html,
- "
{id}
{typ}
{title}
{status}
",
- id = document::html_escape(&a.id),
- typ = document::html_escape(&a.artifact_type),
- title = document::html_escape(&a.title),
- status = document::html_escape(a.status.as_deref().unwrap_or("-")),
+ "
{}
",
+ document::html_escape(&column_heading(f))
);
}
+ html.push_str("\n");
+ for a in &matches {
+ html.push_str("
");
+ for f in &fields {
+ let raw = read_artifact_field(a, f);
+ let cell = if raw.is_empty() { "-".to_string() } else { raw };
+ let wrapped = if f == "id" {
+ format!("{}", document::html_escape(&cell))
+ } else {
+ document::html_escape(&cell)
+ };
+ let _ = write!(html, "
{wrapped}
");
+ }
+ html.push_str("
\n");
+ }
html.push_str("
\n");
if total > matches.len() {
@@ -980,13 +1109,14 @@ fn render_query(request: &EmbedRequest, ctx: &EmbedContext<'_>) -> Result) -> Result {
- let Some(field) = request.args.first() else {
+ let Some(first) = request.args.first() else {
return Err(EmbedError {
kind: EmbedErrorKind::MalformedSyntax(
"group embed requires a field name: {{group:status}}".into(),
@@ -994,16 +1124,28 @@ fn render_group(request: &EmbedRequest, ctx: &EmbedContext<'_>) -> Result (Some(first), second),
+ _ => (None, first),
+ };
+
let mut counts: BTreeMap = BTreeMap::new();
for a in ctx.store.iter() {
+ if let Some(t) = type_filter
+ && a.artifact_type != t
+ {
+ continue;
+ }
let raw = read_artifact_field(a, field);
// Treat empty/missing as "unset" so the totals always add up.
let bucket = if raw.is_empty() {
@@ -1044,6 +1186,22 @@ fn render_group(request: &EmbedRequest, ctx: &EmbedContext<'_>) -> Result String {
+ match name {
+ "id" => "ID".to_string(),
+ _ => {
+ let mut chars = name.chars();
+ match chars.next() {
+ Some(c) => c.to_ascii_uppercase().to_string() + chars.as_str(),
+ None => String::new(),
+ }
+ }
+ }
+}
+
/// Read a single string value for an artifact field by name.
///
/// Handles the well-known top-level fields (id, type, title, status,
@@ -1369,6 +1527,66 @@ mod tests {
assert!(result.is_ok(), "matrix should be a known embed type");
}
+ #[test]
+ fn matrix_embed_rejects_unknown_from_type() {
+ // Regression: {{matrix:UnknownType:OtherType}} used to render a
+ // blank table (silent accept). Now must error with a hint listing
+ // known types.
+ let ctx = EmbedContext::empty();
+ let req = EmbedRequest::parse("matrix:does-not-exist:other").unwrap();
+ let err = resolve_embed(&req, &ctx).unwrap_err();
+ let msg = match &err.kind {
+ EmbedErrorKind::MalformedSyntax(m) => m.clone(),
+ other => panic!("expected MalformedSyntax, got {other:?}"),
+ };
+ assert!(
+ msg.contains("does-not-exist"),
+ "error must name the unknown type: {msg}"
+ );
+ assert!(
+ msg.contains("from-type"),
+ "error must clarify which arg was wrong: {msg}"
+ );
+ }
+
+ #[test]
+ fn diagnostics_embed_rejects_unknown_severity() {
+ // Regression: {{diagnostics:warnings}} (typo) used to silently
+ // return ALL diagnostics because the severity match fell to the
+ // `_ => true` arm.
+ let ctx = EmbedContext::empty();
+ let req = EmbedRequest::parse("diagnostics:warnings").unwrap();
+ let err = resolve_embed(&req, &ctx).unwrap_err();
+ match &err.kind {
+ EmbedErrorKind::MalformedSyntax(m) => {
+ assert!(
+ m.contains("warnings") && m.contains("warning"),
+ "error must name the bad input and the correct value: {m}"
+ );
+ }
+ other => panic!("expected MalformedSyntax, got {other:?}"),
+ }
+ }
+
+ #[test]
+ fn coverage_embed_rejects_unknown_filter_rule() {
+ // Regression: {{coverage:typo-rule}} used to render "no coverage
+ // rules defined" — indistinguishable from a project that has no
+ // rules. Now errors with a list of known rule names.
+ let ctx = EmbedContext::empty();
+ let req = EmbedRequest::parse("coverage:does-not-exist").unwrap();
+ let err = resolve_embed(&req, &ctx).unwrap_err();
+ match &err.kind {
+ EmbedErrorKind::MalformedSyntax(m) => {
+ assert!(
+ m.contains("does-not-exist"),
+ "error must name the unknown rule: {m}"
+ );
+ }
+ other => panic!("expected MalformedSyntax, got {other:?}"),
+ }
+ }
+
#[test]
fn matrix_with_types_parses() {
let req = EmbedRequest::parse("matrix:requirement:feature").unwrap();
@@ -1618,6 +1836,56 @@ mod tests {
assert!(matches!(err.kind, EmbedErrorKind::MalformedSyntax(_)));
}
+ #[test]
+ fn query_embed_fields_option_customizes_columns() {
+ // `fields=id,title,asil` should produce the three columns in order.
+ let mut a = plain("REQ-1", "requirement", Some("Auth"), &[]);
+ a.fields
+ .insert("asil".into(), serde_yaml::Value::String("ASIL-B".into()));
+ let store = make_store(vec![a]);
+ let schema = Schema::merge(&[]);
+ let graph = LinkGraph::build(&store, &schema);
+ let html = run_embed(
+ "query:(= type \"requirement\") fields=id,title,asil",
+ &store,
+ &schema,
+ &graph,
+ )
+ .unwrap();
+ assert!(html.contains("
ID
"), "expected ID column: {html}");
+ assert!(
+ html.contains("
"),
+ "expected Asil column: {html}"
+ );
+ assert!(html.contains("ASIL-B"), "custom field value missing: {html}");
+ // Default Status column must be absent when `fields=` is overridden.
+ assert!(
+ !html.contains("
Status
"),
+ "Status column should be suppressed when fields= is set: {html}"
+ );
+ }
+
+ #[test]
+ fn query_embed_rejects_colon_prefixed_option_syntax() {
+ // Regression guard: `:limit 10` used to be silently dropped because
+ // the parser only recognized `key=value` tokens. Now it is rejected
+ // with a hint steering the user to the correct syntax.
+ let err = EmbedRequest::parse("query:(= type \"requirement\") :limit 10")
+ .unwrap_err();
+ let msg = match &err.kind {
+ EmbedErrorKind::MalformedSyntax(m) => m.clone(),
+ other => panic!("expected MalformedSyntax, got {other:?}"),
+ };
+ assert!(
+ msg.contains("key=value"),
+ "error should explain the correct syntax, got: {msg}"
+ );
+ }
+
// ── stats:type:NAME granular form ───────────────────────────────
#[test]
@@ -1737,6 +2005,42 @@ mod tests {
assert!(html.contains("unset"), "got: {html}");
}
+ #[test]
+ fn group_embed_two_arg_scopes_by_type() {
+ // Two-arg form: {{group:TYPE:FIELD}} — scope to artifacts of TYPE,
+ // group those by FIELD. Regression guard for the silent-accept bug
+ // where the second arg was discarded and every artifact fell into
+ // bucket "unset" because FIELD was read as the literal type name.
+ let mut req_a = plain("REQ-1", "requirement", None, &[]);
+ req_a.fields.insert(
+ "asil".into(),
+ serde_yaml::Value::String("ASIL-B".into()),
+ );
+ let mut req_b = plain("REQ-2", "requirement", None, &[]);
+ req_b.fields.insert(
+ "asil".into(),
+ serde_yaml::Value::String("ASIL-D".into()),
+ );
+ // Non-requirement artifact — should be excluded by type filter.
+ let mut test_a = plain("TEST-1", "test", None, &[]);
+ test_a.fields.insert(
+ "asil".into(),
+ serde_yaml::Value::String("ASIL-B".into()),
+ );
+ let store = make_store(vec![req_a, req_b, test_a]);
+ let schema = Schema::merge(&[]);
+ let graph = LinkGraph::build(&store, &schema);
+ let html =
+ run_embed("group:requirement:asil", &store, &schema, &graph).unwrap();
+ assert!(html.contains("ASIL-B"), "got: {html}");
+ assert!(html.contains("ASIL-D"), "got: {html}");
+ // Total must be 2 (only the two requirements), not 3.
+ assert!(
+ html.contains("2"),
+ "type filter did not exclude non-requirement artifact — got: {html}"
+ );
+ }
+
#[test]
fn group_embed_rejects_empty_field() {
let store = Store::new();
diff --git a/rivet-core/src/export.rs b/rivet-core/src/export.rs
index b07e64e..35dba01 100644
--- a/rivet-core/src/export.rs
+++ b/rivet-core/src/export.rs
@@ -2985,6 +2985,7 @@ mod tests {
target_types: vec![],
from_types: vec!["design-decision".into()],
severity: Severity::Warning,
+ alternate_backlinks: vec![],
}];
Schema::merge(&[file])
}
diff --git a/rivet-core/src/lifecycle.rs b/rivet-core/src/lifecycle.rs
index d9298d6..192eaf3 100644
--- a/rivet-core/src/lifecycle.rs
+++ b/rivet-core/src/lifecycle.rs
@@ -166,6 +166,7 @@ mod tests {
target_types: vec![],
from_types: vec!["feature".into()],
severity: Severity::Warning,
+ alternate_backlinks: vec![],
}]);
let artifacts = vec![make_artifact(
"REQ-001",
@@ -191,6 +192,7 @@ mod tests {
target_types: vec![],
from_types: vec!["feature".into()],
severity: Severity::Warning,
+ alternate_backlinks: vec![],
}]);
let artifacts = vec![
make_artifact("REQ-001", "requirement", Some("implemented"), vec![]),
@@ -223,6 +225,7 @@ mod tests {
target_types: vec![],
from_types: vec!["feature".into()],
severity: Severity::Warning,
+ alternate_backlinks: vec![],
}]);
let artifacts = vec![make_artifact(
"REQ-001",
@@ -247,6 +250,7 @@ mod tests {
target_types: vec![],
from_types: vec!["feature".into(), "design-decision".into()],
severity: Severity::Warning,
+ alternate_backlinks: vec![],
}]);
let artifacts = vec![
make_artifact("REQ-001", "requirement", Some("implemented"), vec![]),
diff --git a/rivet-core/src/model.rs b/rivet-core/src/model.rs
index 488f1dc..7e1decf 100644
--- a/rivet-core/src/model.rs
+++ b/rivet-core/src/model.rs
@@ -11,6 +11,7 @@ pub const TRACED_STATUSES: &[&str] = &["implemented", "done", "approved", "accep
/// A typed, directional link from one artifact to another.
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
+#[serde(deny_unknown_fields)]
pub struct Link {
/// Semantic type of this link (e.g., "leads-to-loss", "verifies").
pub link_type: String,
@@ -24,6 +25,7 @@ pub struct Link {
/// workflow, along with optional details about the model, session, and
/// human reviewer.
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
+#[serde(deny_unknown_fields)]
pub struct Provenance {
/// Origin of the artifact: "human", "ai", or "ai-assisted".
#[serde(rename = "created-by")]
@@ -266,6 +268,31 @@ pub struct ProjectConfig {
/// Order matters: earlier baselines are cumulatively included in later ones.
#[serde(default)]
pub baselines: Option>,
+ /// Optional `rivet docs check` configuration — exemptions, ignore lists, etc.
+ #[serde(default, rename = "docs-check")]
+ pub docs_check: Option,
+}
+
+/// Configuration block for `rivet docs check`. Mapped from `rivet.yaml`'s
+/// top-level `docs-check:` key. Used to exempt legitimate external IDs
+/// (Jira tickets, Polarion requirements, hazard catalogs, etc.) from the
+/// `ArtifactIdValidity` invariant so that StakeholderRequirements docs
+/// can reference upstream IDs without breaking the gate.
+#[derive(Debug, Clone, Default, Serialize, Deserialize)]
+pub struct DocsCheckConfig {
+ /// External-namespace prefixes that are valid IDs even though no
+ /// matching artifact exists in the local store. Example:
+ /// `external-namespaces: [GNV, GNR, HZO, UC, FOO]` exempts every
+ /// `GNV-396`, `GNR-968`, `HZO-189`, `UC-1`, `FOO-20819` from the
+ /// "artifact not found" violation. Match is on the prefix up to
+ /// the first `-`.
+ #[serde(default, rename = "external-namespaces")]
+ pub external_namespaces: Vec,
+ /// Free-form regex patterns to skip — escape hatch when the
+ /// namespace mechanism isn't enough. Patterns are applied to the
+ /// matched ID text and skip the violation when any one matches.
+ #[serde(default, rename = "ignore-patterns")]
+ pub ignore_patterns: Vec,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
diff --git a/rivet-core/src/proofs.rs b/rivet-core/src/proofs.rs
index bfe78a6..9179224 100644
--- a/rivet-core/src/proofs.rs
+++ b/rivet-core/src/proofs.rs
@@ -101,6 +101,7 @@ mod proofs {
target_types: vec![],
from_types: vec![],
severity: Severity::Warning,
+ alternate_backlinks: vec![],
}],
conditional_rules: vec![],
}])
diff --git a/rivet-core/src/schema.rs b/rivet-core/src/schema.rs
index e5ff5d0..3e071da 100644
--- a/rivet-core/src/schema.rs
+++ b/rivet-core/src/schema.rs
@@ -13,6 +13,7 @@ use crate::error::Error;
/// Top-level structure of a schema YAML file.
#[derive(Debug, Clone, Serialize, Deserialize)]
+#[serde(deny_unknown_fields)]
pub struct SchemaFile {
pub schema: SchemaMetadata,
#[serde(default, rename = "base-fields")]
@@ -28,6 +29,7 @@ pub struct SchemaFile {
}
#[derive(Debug, Clone, Serialize, Deserialize)]
+#[serde(deny_unknown_fields)]
pub struct SchemaMetadata {
pub name: String,
pub version: String,
@@ -46,6 +48,7 @@ pub struct SchemaMetadata {
// ── Artifact type definition ─────────────────────────────────────────────
#[derive(Debug, Clone, Serialize, Deserialize)]
+#[serde(deny_unknown_fields)]
pub struct ArtifactTypeDef {
pub name: String,
pub description: String,
@@ -92,6 +95,7 @@ pub struct ArtifactTypeDef {
/// A common mistake entry with problem description and fix command.
#[derive(Debug, Clone, Serialize, Deserialize)]
+#[serde(deny_unknown_fields)]
pub struct MistakeGuide {
pub problem: String,
#[serde(default, rename = "fix-command")]
@@ -99,6 +103,7 @@ pub struct MistakeGuide {
}
#[derive(Debug, Clone, Serialize, Deserialize)]
+#[serde(deny_unknown_fields)]
pub struct FieldDef {
pub name: String,
#[serde(rename = "type")]
@@ -112,6 +117,7 @@ pub struct FieldDef {
}
#[derive(Debug, Clone, Serialize, Deserialize)]
+#[serde(deny_unknown_fields)]
pub struct LinkFieldDef {
pub name: String,
#[serde(rename = "link-type")]
@@ -122,6 +128,9 @@ pub struct LinkFieldDef {
pub required: bool,
#[serde(default)]
pub cardinality: Cardinality,
+ /// Free-form description shown in schema docs and AI hints.
+ #[serde(default)]
+ pub description: Option,
}
#[derive(Debug, Clone, Serialize, Deserialize, Default, PartialEq, Eq)]
@@ -137,6 +146,7 @@ pub enum Cardinality {
// ── Link type definition ─────────────────────────────────────────────────
#[derive(Debug, Clone, Serialize, Deserialize)]
+#[serde(deny_unknown_fields)]
pub struct LinkTypeDef {
pub name: String,
#[serde(default)]
@@ -151,6 +161,7 @@ pub struct LinkTypeDef {
// ── Traceability rule ────────────────────────────────────────────────────
#[derive(Debug, Clone, Serialize, Deserialize)]
+#[serde(deny_unknown_fields)]
pub struct TraceabilityRule {
pub name: String,
pub description: String,
@@ -166,6 +177,22 @@ pub struct TraceabilityRule {
pub from_types: Vec,
#[serde(default)]
pub severity: Severity,
+ /// Alternative backlink shapes that satisfy this rule. Each entry
+ /// is a `(link-type, from-types)` pair — used by safety-case schemas
+ /// to express "supported-by OR decomposed-by OR has-sub-goal" without
+ /// duplicating the whole rule.
+ #[serde(default, rename = "alternate-backlinks")]
+ pub alternate_backlinks: Vec,
+}
+
+/// One alternative backlink shape inside a TraceabilityRule.
+#[derive(Debug, Clone, Serialize, Deserialize)]
+#[serde(deny_unknown_fields)]
+pub struct AlternateBacklink {
+ #[serde(rename = "link-type")]
+ pub link_type: String,
+ #[serde(default, rename = "from-types")]
+ pub from_types: Vec,
}
#[derive(Debug, Clone, Copy, Serialize, Deserialize, Default, PartialEq, Eq)]
@@ -189,6 +216,7 @@ fn default_severity() -> Severity {
/// rule to fire. This enables compound rules like "AI-generated artifacts with
/// active status must have a reviewer".
#[derive(Debug, Clone, Serialize, Deserialize)]
+#[serde(deny_unknown_fields)]
pub struct ConditionalRule {
pub name: String,
#[serde(default)]
@@ -642,7 +670,20 @@ impl Schema {
for file in files {
for at in &file.artifact_types {
- artifact_types.insert(at.name.clone(), at.clone());
+ let mut at = at.clone();
+ // Populate shorthand_links from link_fields so the YAML
+ // parser recognises named-field forms like `targets: [X]`
+ // as equivalent to `links: [{type: threatens, target: X}]`.
+ // Without this, cardinality validation silently skips the
+ // named-field form and "required" links appear absent.
+ for lf in &at.link_fields {
+ if lf.name != "links" {
+ at.shorthand_links
+ .entry(lf.name.clone())
+ .or_insert_with(|| lf.link_type.clone());
+ }
+ }
+ artifact_types.insert(at.name.clone(), at);
}
for lt in &file.link_types {
if let Some(inv) = <.inverse {
@@ -664,6 +705,62 @@ impl Schema {
}
}
+ /// Return schema-internal consistency issues as human-readable messages.
+ /// Callers should surface these as errors — a schema with dangling
+ /// link-field references silently breaks cardinality enforcement for
+ /// every artifact that uses the undeclared link type, so the schema
+ /// should not reach production without review.
+ ///
+ /// Checks:
+ /// - Every `link-field.link_type` is declared in `link-types:`.
+ /// - Every `link-field.target_types` names a known artifact type.
+ /// - Every traceability rule's `from_types` and target types exist.
+ pub fn validate_consistency(&self) -> Vec {
+ let mut issues = Vec::new();
+ let type_names: std::collections::HashSet<&str> =
+ self.artifact_types.keys().map(String::as_str).collect();
+ let link_names: std::collections::HashSet<&str> =
+ self.link_types.keys().map(String::as_str).collect();
+
+ for at in self.artifact_types.values() {
+ for lf in &at.link_fields {
+ if !link_names.contains(lf.link_type.as_str()) {
+ issues.push(format!(
+ "type '{}': link-field '{}' references unknown link type '{}'",
+ at.name, lf.name, lf.link_type
+ ));
+ }
+ for tt in &lf.target_types {
+ if !type_names.contains(tt.as_str()) {
+ issues.push(format!(
+ "type '{}': link-field '{}' target type '{}' is not a known artifact type",
+ at.name, lf.name, tt
+ ));
+ }
+ }
+ }
+ }
+ for rule in &self.traceability_rules {
+ for from in &rule.from_types {
+ if !type_names.contains(from.as_str()) {
+ issues.push(format!(
+ "rule '{}': from-type '{}' is not a known artifact type",
+ rule.name, from
+ ));
+ }
+ }
+ for target in &rule.target_types {
+ if !type_names.contains(target.as_str()) {
+ issues.push(format!(
+ "rule '{}': target-type '{}' is not a known artifact type",
+ rule.name, target
+ ));
+ }
+ }
+ }
+ issues
+ }
+
/// Look up an artifact type definition by name.
#[inline]
pub fn artifact_type(&self, name: &str) -> Option<&ArtifactTypeDef> {
diff --git a/rivet-core/src/sexpr_eval.rs b/rivet-core/src/sexpr_eval.rs
index 4cc391b..a352dc6 100644
--- a/rivet-core/src/sexpr_eval.rs
+++ b/rivet-core/src/sexpr_eval.rs
@@ -285,6 +285,17 @@ pub fn check(expr: &Expr, ctx: &EvalContext) -> bool {
// ── Field resolution ────────────────────────────────────────────────────
+/// Resolve a field accessor to a string value for s-expression comparisons.
+///
+/// Missing fields intentionally resolve to the empty string so filters like
+/// `(= asil "ASIL-D")` naturally exclude artifacts without an `asil` field
+/// rather than erroring out. This is filter semantics, not silent-accept:
+/// the caller wants "show artifacts whose asil = ASIL-D", and an artifact
+/// without `asil` correctly does NOT match. Reject-on-missing would make
+/// every cross-type query unusable.
+///
+/// Typos in field names should be caught by the schema layer
+/// (`deny_unknown_fields`) at YAML load time, not by the query evaluator.
fn resolve_str(acc: &Accessor, artifact: &Artifact) -> String {
match acc {
Accessor::Field(name) => match name.as_str() {
@@ -770,7 +781,28 @@ fn lower_list(node: &crate::sexpr::SyntaxNode, errors: &mut Vec) ->
return None;
}
let lt = extract_value(&args[0])?;
- let op_str = extract_symbol(&args[1]).unwrap_or_default();
+ // Reject empty/whitespace operators with a clear message instead
+ // of falling through the `_` arm with an "invalid operator ''"
+ // string that confuses users who supplied a non-symbol literal.
+ let Some(op_str) = extract_symbol(&args[1]) else {
+ errors.push(LowerError {
+ offset,
+ message: "'links-count' second argument must be one of \
+ the comparison operators >, <, >=, <=, =, != \
+ (got a non-symbol literal)"
+ .into(),
+ });
+ return None;
+ };
+ if op_str.trim().is_empty() {
+ errors.push(LowerError {
+ offset,
+ message: "'links-count' second argument is empty — \
+ expected one of >, <, >=, <=, =, !="
+ .into(),
+ });
+ return None;
+ }
let op = match op_str.as_str() {
">" => CompOp::Gt,
"<" => CompOp::Lt,
@@ -781,7 +813,10 @@ fn lower_list(node: &crate::sexpr::SyntaxNode, errors: &mut Vec) ->
_ => {
errors.push(LowerError {
offset,
- message: format!("invalid operator '{op_str}' in links-count"),
+ message: format!(
+ "'links-count' invalid operator '{op_str}' — \
+ expected one of >, <, >=, <=, =, !="
+ ),
});
return None;
}
diff --git a/rivet-core/src/validate.rs b/rivet-core/src/validate.rs
index 4896f1f..9e375bd 100644
--- a/rivet-core/src/validate.rs
+++ b/rivet-core/src/validate.rs
@@ -420,19 +420,34 @@ pub fn validate_structural(store: &Store, schema: &Schema, graph: &LinkGraph) ->
}
}
- // 8. Check unknown link types (not defined in schema)
+ // 8. Check unknown link types (not defined in schema).
+ // Elevated from Warning to Error: an undeclared link-type means the
+ // schema's cardinality and target-type guarantees silently don't apply
+ // to those links — the same severity as a broken required-link link,
+ // not a soft advisory. Pin to one diagnostic per (artifact, link-type)
+ // pair so a typo doesn't drown the report.
+ use std::collections::BTreeSet;
+ let known_link_types: BTreeSet<&str> = schema
+ .link_types
+ .keys()
+ .map(String::as_str)
+ .collect();
for artifact in store.iter() {
+ let mut seen: BTreeSet<&str> = BTreeSet::new();
for link in &artifact.links {
- if !schema.link_types.contains_key(&link.link_type) {
+ if !known_link_types.contains(link.link_type.as_str())
+ && seen.insert(link.link_type.as_str())
+ {
diagnostics.push(Diagnostic {
source_file: None,
line: None,
column: None,
- severity: Severity::Warning,
+ severity: Severity::Error,
artifact_id: Some(artifact.id.clone()),
rule: "unknown-link-type".to_string(),
message: format!(
- "link type '{}' is not defined in the schema",
+ "link type '{}' is not defined in the schema \
+ — declare it in link-types: or remove the link",
link.link_type
),
});
@@ -504,8 +519,8 @@ mod tests {
use crate::links::LinkGraph;
use crate::model::{Artifact, Link};
use crate::schema::{
- ArtifactTypeDef, Condition, ConditionalRule, FieldDef, Requirement, Severity,
- TraceabilityRule,
+ ArtifactTypeDef, Condition, ConditionalRule, FieldDef, LinkFieldDef, Requirement,
+ Severity, TraceabilityRule,
};
use crate::test_helpers::{minimal_artifact, minimal_schema};
use std::collections::BTreeMap;
@@ -684,6 +699,91 @@ mod tests {
assert_eq!(diags[0].severity, Severity::Warning);
}
+ // rivet: verifies REQ-004
+ #[test]
+ fn unknown_link_type_is_error_not_warning() {
+ // Regression guard: v0.4.1 emitted Warning for links whose type
+ // wasn't declared in the schema, so validation stayed PASS even
+ // though the cardinality and target-type guarantees silently
+ // didn't apply. Now promoted to Error — one per unique
+ // (artifact, link_type) pair to avoid noise.
+ use crate::store::Store;
+
+ let schema_file = minimal_schema("test");
+ let schema = Schema::merge(&[schema_file]);
+
+ let mut art = minimal_artifact("A-1", "test");
+ art.links = vec![
+ Link {
+ link_type: "undeclared-type".to_string(),
+ target: "B-1".to_string(),
+ },
+ Link {
+ link_type: "undeclared-type".to_string(),
+ target: "B-2".to_string(),
+ },
+ ];
+ let mut store = Store::new();
+ store.insert(art);
+ let graph = LinkGraph::build(&store, &schema);
+
+ let diags = crate::validate::validate(&store, &schema, &graph);
+ let unknown: Vec<_> = diags
+ .iter()
+ .filter(|d| d.rule == "unknown-link-type")
+ .collect();
+ assert_eq!(
+ unknown.len(),
+ 1,
+ "must emit exactly one diagnostic per (artifact, link-type) pair: {unknown:?}",
+ );
+ assert_eq!(
+ unknown[0].severity,
+ Severity::Error,
+ "unknown link type must be Error, got {:?}",
+ unknown[0].severity
+ );
+ }
+
+ // rivet: verifies REQ-010
+ #[test]
+ fn schema_consistency_flags_dangling_link_field_refs() {
+ // Regression guard: a schema with link-field.link_type pointing to
+ // an undeclared link type must be flagged at schema-check time,
+ // not silently tolerated until artifacts start being validated.
+ let mut file = minimal_schema("test");
+ file.artifact_types = vec![ArtifactTypeDef {
+ name: "test".to_string(),
+ description: "Test type".to_string(),
+ fields: vec![],
+ link_fields: vec![LinkFieldDef {
+ name: "satisfies".to_string(),
+ link_type: "nonexistent-link-type".to_string(),
+ required: false,
+ cardinality: Cardinality::ZeroOrMany,
+ target_types: vec!["another-missing-type".to_string()],
+ description: None,
+ }],
+ aspice_process: None,
+ common_mistakes: vec![],
+ example: None,
+ yaml_section: None,
+ yaml_sections: vec![],
+ yaml_section_suffix: None,
+ shorthand_links: std::collections::BTreeMap::new(),
+ }];
+ let schema = Schema::merge(&[file]);
+ let issues = schema.validate_consistency();
+ assert!(
+ issues.iter().any(|i| i.contains("nonexistent-link-type")),
+ "must flag undeclared link type: got {issues:?}",
+ );
+ assert!(
+ issues.iter().any(|i| i.contains("another-missing-type")),
+ "must flag unknown target type: got {issues:?}",
+ );
+ }
+
// rivet: verifies REQ-004
#[test]
fn required_links_passes_when_link_present() {
@@ -914,6 +1014,7 @@ then:
target_types: vec!["requirement".into()],
from_types: vec![],
severity: Severity::Error,
+ alternate_backlinks: vec![],
}];
Schema::merge(&[file])
}
@@ -1338,6 +1439,7 @@ then:
target_types: vec![], // empty — the ambiguous case
from_types: vec![],
severity: Severity::Error,
+ alternate_backlinks: vec![],
}];
let schema = Schema::merge(&[file]);
@@ -1420,6 +1522,7 @@ then:
target_types: vec![],
from_types: vec![], // empty — the ambiguous case
severity: Severity::Error,
+ alternate_backlinks: vec![],
}];
let schema = Schema::merge(&[file]);
diff --git a/rivet-core/src/yaml_cst.rs b/rivet-core/src/yaml_cst.rs
index af7e634..b459a9d 100644
--- a/rivet-core/src/yaml_cst.rs
+++ b/rivet-core/src/yaml_cst.rs
@@ -561,6 +561,15 @@ impl<'src> Parser<'src> {
) => {
self.parse_mapping_entry(indent);
}
+ Some(SyntaxKind::Comment) => {
+ // Indent-aligned comment line (e.g. ` # explanation`).
+ // Eat the comment and any trailing newline; do not flag
+ // as an error or interrupt the mapping.
+ self.bump();
+ if self.at(SyntaxKind::Newline) {
+ self.bump();
+ }
+ }
Some(SyntaxKind::Dash) if indent == min_indent => {
// Sequence at same indent — we're done with this mapping
break;
@@ -1014,6 +1023,25 @@ mod tests {
parse_and_check("key: value\n");
}
+ /// Regression: a comment-only line inside a mapping must not produce
+ /// "expected mapping key, found Some(Comment)". The LSP YAML parser
+ /// surfaced a false-positive diagnostic on every CI workflow file
+ /// (.github/workflows/*.yml) where line-leading comments are common.
+ #[test]
+ fn mapping_with_comment_only_line() {
+ parse_and_check("key1: value1\n# leading comment line\nkey2: value2\n");
+ }
+
+ #[test]
+ fn mapping_with_indented_comment_line() {
+ parse_and_check("parent:\n child1: 1\n # mid comment\n child2: 2\n");
+ }
+
+ #[test]
+ fn mapping_with_inline_trailing_comment_on_value() {
+ parse_and_check("key: value # trailing\n");
+ }
+
#[test]
fn nested_mapping() {
parse_and_check("parent:\n child: value\n other: stuff\n");
diff --git a/rivet-core/src/yaml_hir.rs b/rivet-core/src/yaml_hir.rs
index 26b51f8..ca8e3d0 100644
--- a/rivet-core/src/yaml_hir.rs
+++ b/rivet-core/src/yaml_hir.rs
@@ -280,7 +280,38 @@ pub fn extract_schema_driven(
}
}
}
- // Unknown keys are silently skipped (comments, metadata, etc.)
+ else {
+ // Unknown top-level keys: most are legitimate (project metadata
+ // like `name:`, `version:`, free-form fields). But a key whose
+ // *singular* form matches a known section (e.g. user wrote
+ // `control-action:` instead of `control-actions:`) is almost
+ // certainly a typo of a schema-defined section — surface it as
+ // a Warning so misspellings stop being silently dropped.
+ // We deliberately keep this advisory (not Error) so genuine
+ // metadata keys don't break existing files.
+ let known_sections: Vec<&str> = section_map.keys().copied().collect();
+ let candidate_singular = key_text.strip_suffix('s').unwrap_or(&key_text);
+ let candidate_plural = format!("{key_text}s");
+ let suspected_typo = known_sections.iter().any(|s| {
+ let s_singular = s.strip_suffix('s').unwrap_or(s);
+ s_singular == candidate_singular
+ || *s == candidate_plural
+ || (key_text.len() > 4 && s.contains(candidate_singular))
+ });
+ if suspected_typo {
+ let span = Span::from_text_range(key_node.text_range());
+ result.diagnostics.push(ParseDiagnostic {
+ span,
+ message: format!(
+ "unknown top-level key '{key_text}' looks like a typo of a \
+ known schema section — known: {}",
+ known_sections.join(", ")
+ ),
+ severity: Severity::Warning,
+ });
+ }
+ // Otherwise: silently skip (legitimate metadata).
+ }
}
// Set source_file on all artifacts and detect duplicates
@@ -912,8 +943,14 @@ fn extract_links(value_node: &SyntaxNode) -> Vec {
let mut links = Vec::new();
// Links is a Sequence of Mappings: each with "type" + "target".
+ // If the CST didn't produce a block Sequence (e.g. the user wrote
+ // flow-style `links: [{type: X, target: Y}]`, which the CST parser
+ // records as FlowSequence without nested FlowMapping nodes), fall back
+ // to a serde_yaml reparse of the value text — this guarantees flow
+ // and block styles produce identical links and prevents silent
+ // under-counting by the cardinality validator.
let Some(seq) = child_of_kind(value_node, SyntaxKind::Sequence) else {
- return links;
+ return extract_links_via_serde(value_node);
};
for item in seq.children() {
@@ -963,6 +1000,37 @@ fn extract_links(value_node: &SyntaxNode) -> Vec {
links
}
+/// Fallback parser for `links:` values the CST didn't recognise as a block
+/// `Sequence` — most importantly flow-style `[{type: X, target: Y}, ...]`.
+/// Re-parses the value text via serde_yaml and converts `type` + `target`
+/// into `Link`s. Unknown shapes and parse errors silently produce no
+/// links (matching the permissive behaviour of the primary path).
+fn extract_links_via_serde(value_node: &SyntaxNode) -> Vec {
+ #[derive(serde::Deserialize)]
+ struct RawLink {
+ #[serde(rename = "type")]
+ link_type: Option,
+ target: Option,
+ }
+ let text = value_node.text().to_string();
+ let trimmed = text.trim();
+ if trimmed.is_empty() {
+ return Vec::new();
+ }
+ let Ok(raws) = serde_yaml::from_str::>(trimmed) else {
+ return Vec::new();
+ };
+ raws.into_iter()
+ .filter_map(|r| match (r.link_type, r.target) {
+ (Some(t), Some(tgt)) if !t.is_empty() && !tgt.is_empty() => Some(Link {
+ link_type: t,
+ target: tgt,
+ }),
+ _ => None,
+ })
+ .collect()
+}
+
// ── Provenance extraction ─────────────────────────────────────────────
/// Extract a `Provenance` struct from a `provenance:` mapping value node.
@@ -1030,6 +1098,24 @@ fn extract_string_list(value_node: &SyntaxNode) -> Vec {
}
}
}
+ // If the CST produced no scalar tokens but the source clearly
+ // wasn't an empty `[]`, fall back to serde_yaml so unusual flow
+ // shapes (anchors, refs, quoted commas) don't silently drop to
+ // an empty list. Same defensive pattern as extract_links.
+ if items.is_empty() {
+ let text = flow.text().to_string();
+ let trimmed = text.trim();
+ let is_empty_brackets = trimmed == "[]"
+ || trimmed
+ .strip_prefix('[')
+ .and_then(|s| s.strip_suffix(']'))
+ .is_some_and(|inner| inner.trim().is_empty());
+ if !is_empty_brackets {
+ if let Ok(parsed) = serde_yaml::from_str::>(trimmed) {
+ items = parsed;
+ }
+ }
+ }
return items;
}
@@ -1468,6 +1554,47 @@ artifacts:
assert_eq!(links[1].target, "B-2");
}
+ /// 3b. Flow-style link syntax produces identical links to block-style.
+ /// Regression guard: v0.4.1 parsed flow-style but the cardinality counter
+ /// saw zero links, so required-link validation silently passed even when
+ /// the required link was missing (the flow-style version was accepted).
+ #[test]
+ fn links_extraction_flow_style_matches_block_style() {
+ let flow = "\
+artifacts:
+ - id: A-1
+ type: req
+ title: Flow style
+ links: [{ type: satisfies, target: B-1 }, { type: derives-from, target: B-2 }]
+";
+ let block = "\
+artifacts:
+ - id: A-1
+ type: req
+ title: Block style
+ links:
+ - type: satisfies
+ target: B-1
+ - type: derives-from
+ target: B-2
+";
+ let flow_hir = extract_generic_artifacts(flow);
+ let block_hir = extract_generic_artifacts(block);
+ assert_eq!(flow_hir.artifacts.len(), 1, "flow: no artifact parsed");
+ assert_eq!(block_hir.artifacts.len(), 1, "block: no artifact parsed");
+ let flow_links = &flow_hir.artifacts[0].artifact.links;
+ let block_links = &block_hir.artifacts[0].artifact.links;
+ assert_eq!(
+ flow_links, block_links,
+ "flow-style and block-style must yield identical links — got flow={flow_links:?} block={block_links:?}",
+ );
+ assert_eq!(
+ flow_links.len(),
+ 2,
+ "flow-style links under-counted: got {flow_links:?}"
+ );
+ }
+
/// 4. Custom fields stored as serde_yaml::Value correctly.
#[test]
fn custom_fields_typed_correctly() {
diff --git a/rivet-core/tests/proptest_operations.rs b/rivet-core/tests/proptest_operations.rs
index 6980c01..896fd56 100644
--- a/rivet-core/tests/proptest_operations.rs
+++ b/rivet-core/tests/proptest_operations.rs
@@ -123,6 +123,7 @@ fn test_schema() -> Schema {
target_types: vec![],
from_types: vec!["feature".into()],
severity: Severity::Warning,
+ alternate_backlinks: vec![],
}],
conditional_rules: vec![],
}])
diff --git a/scripts/diff-to-markdown.mjs b/scripts/diff-to-markdown.mjs
new file mode 100644
index 0000000..d1f52f0
--- /dev/null
+++ b/scripts/diff-to-markdown.mjs
@@ -0,0 +1,286 @@
+#!/usr/bin/env node
+// diff-to-markdown.mjs — convert rivet diff + impact JSON into a PR-comment
+// markdown body. Called by .github/workflows/rivet-delta.yml.
+//
+// Usage:
+// node scripts/diff-to-markdown.mjs \
+// --diff path/to/diff.json \
+// --impact path/to/impact.json \
+// --pr 123 --run 456 --repo owner/name
+//
+// Emits markdown on stdout. The first line is a hidden HTML comment
+// marker () so the workflow can find-and-replace
+// the same comment on subsequent pushes.
+//
+// Guarantees:
+// * Never throws on malformed input — emits a warning comment instead.
+// * Caps the mermaid graph at MERMAID_NODE_CAP nodes; overflow goes
+// into a collapsible list.
+// * All inputs sanitised with `escapeMd` before rendering so artifact
+// IDs or titles containing markdown metacharacters cannot break out.
+
+import { readFileSync } from "node:fs";
+import { argv, stdout, stderr } from "node:process";
+
+const MARKER = "";
+const MERMAID_NODE_CAP = 30;
+
+// ── Argv parsing ────────────────────────────────────────────────────────
+function parseArgs(argv) {
+ const out = {};
+ for (let i = 2; i < argv.length; i++) {
+ const arg = argv[i];
+ if (arg === "--diff") out.diff = argv[++i];
+ else if (arg === "--impact") out.impact = argv[++i];
+ else if (arg === "--pr") out.pr = argv[++i];
+ else if (arg === "--run") out.run = argv[++i];
+ else if (arg === "--repo") out.repo = argv[++i];
+ }
+ return out;
+}
+
+// ── Safe JSON load ──────────────────────────────────────────────────────
+function loadJson(path, fallback) {
+ if (!path) return fallback;
+ try {
+ const raw = readFileSync(path, "utf8");
+ return JSON.parse(raw);
+ } catch (e) {
+ stderr.write(`diff-to-markdown: failed to load ${path}: ${e.message}\n`);
+ return fallback;
+ }
+}
+
+// ── Sanitisation ────────────────────────────────────────────────────────
+// Escape markdown metacharacters in user-controlled strings (artifact IDs,
+// titles, link types) so a maliciously-titled artifact can't break out of
+// the comment structure.
+function escapeMd(s) {
+ if (s === null || s === undefined) return "";
+ return String(s)
+ .replaceAll("\\", "\\\\")
+ .replaceAll("|", "\\|")
+ .replaceAll("<", "<")
+ .replaceAll(">", ">")
+ .replaceAll("`", "\\`")
+ .replaceAll("*", "\\*")
+ .replaceAll("_", "\\_")
+ .replaceAll("[", "\\[")
+ .replaceAll("]", "\\]");
+}
+
+// Mermaid IDs must be alphanumeric + underscore. Replace everything else.
+function mermaidId(id) {
+ return String(id).replaceAll(/[^A-Za-z0-9_]/g, "_");
+}
+
+// ── Diff / impact extraction ────────────────────────────────────────────
+function normalize(diff, impact) {
+ const added = Array.isArray(diff?.added) ? diff.added : [];
+ const removed = Array.isArray(diff?.removed) ? diff.removed : [];
+ const modified = Array.isArray(diff?.modified) ? diff.modified : [];
+ const impacted = Array.isArray(impact?.impacted) ? impact.impacted : [];
+ return { added, removed, modified, impacted };
+}
+
+// ── Sections ────────────────────────────────────────────────────────────
+function renderCountsTable({ added, removed, modified, impacted }) {
+ const rows = [
+ ["Added", added.length],
+ ["Removed", removed.length],
+ ["Modified", modified.length],
+ ["Downstream impacted (depth ≤ 5)", impacted.length],
+ ];
+ let md = "| Change | Count |\n|---|---:|\n";
+ for (const [label, n] of rows) {
+ md += `| ${label} | ${n} |\n`;
+ }
+ return md;
+}
+
+function renderMermaid({ added, removed, modified }) {
+ const nodes = new Map(); // id → class
+ for (const id of added) nodes.set(String(id), "added");
+ for (const id of removed) nodes.set(String(id), "removed");
+ for (const m of modified) {
+ if (m && m.id) nodes.set(String(m.id), "modified");
+ }
+
+ const total = nodes.size;
+ if (total === 0) {
+ return { md: "", truncated: false, total: 0 };
+ }
+
+ // Cap at MERMAID_NODE_CAP; overflow bucket rendered as a single summary
+ // node with the remaining count so the diagram stays legible.
+ const entries = [...nodes.entries()].slice(0, MERMAID_NODE_CAP);
+ const truncated = total > MERMAID_NODE_CAP;
+
+ // Edges: modified artifacts show added/removed links.
+ const edges = [];
+ for (const m of modified) {
+ if (!m) continue;
+ const src = mermaidId(m.id);
+ if (!entries.some(([id]) => mermaidId(id) === src)) continue;
+ for (const link of m.links_added ?? []) {
+ const tgt = mermaidId(link.target);
+ edges.push(` ${src} -. "+ ${escapeMermaidLabel(link.link_type)}" .-> ${tgt}`);
+ }
+ for (const link of m.links_removed ?? []) {
+ const tgt = mermaidId(link.target);
+ edges.push(` ${src} -. "- ${escapeMermaidLabel(link.link_type)}" .-> ${tgt}`);
+ }
+ }
+
+ let md = "```mermaid\ngraph LR\n";
+ for (const [id, kind] of entries) {
+ const safeId = mermaidId(id);
+ const label = String(id).replaceAll(`"`, "'");
+ md += ` ${safeId}["${label}"]:::${kind}\n`;
+ }
+ if (truncated) {
+ md += ` overflow["+${total - MERMAID_NODE_CAP} more"]:::overflow\n`;
+ }
+ for (const edge of edges) md += `${edge}\n`;
+ md += " classDef added fill:#d4edda,stroke:#28a745,color:#155724\n";
+ md += " classDef removed fill:#f8d7da,stroke:#dc3545,color:#721c24\n";
+ md += " classDef modified fill:#fff3cd,stroke:#ffc107,color:#856404\n";
+ md +=
+ " classDef overflow fill:#e2e3e5,stroke:#6c757d,color:#495057,stroke-dasharray: 3 3\n";
+ md += "```\n";
+ return { md, truncated, total };
+}
+
+// Mermaid labels use double-quotes; we strip any that would break parsing.
+function escapeMermaidLabel(s) {
+ return String(s ?? "").replaceAll(`"`, "'");
+}
+
+function renderChangeList({ added, removed, modified }) {
+ let md = "";
+ if (added.length) {
+ md += "Added\n\n";
+ for (const id of added.slice(0, 200)) {
+ md += `- \`${escapeMd(id)}\`\n`;
+ }
+ if (added.length > 200) md += `- … +${added.length - 200} more\n`;
+ md += "\n\n\n";
+ }
+ if (removed.length) {
+ md += "Removed\n\n";
+ for (const id of removed.slice(0, 200)) {
+ md += `- \`${escapeMd(id)}\`\n`;
+ }
+ if (removed.length > 200) md += `- … +${removed.length - 200} more\n`;
+ md += "\n\n\n";
+ }
+ if (modified.length) {
+ md += "Modified\n\n";
+ md += "| ID | Changes |\n|---|---|\n";
+ for (const m of modified.slice(0, 100)) {
+ const parts = [];
+ if (m.status_changed) {
+ const [o, n] = m.status_changed;
+ parts.push(`status: ${escapeMd(o ?? "—")} → ${escapeMd(n ?? "—")}`);
+ }
+ if (m.title_changed) parts.push("title changed");
+ if (m.description_changed) parts.push("description changed");
+ if (m.type_changed) {
+ const [o, n] = m.type_changed;
+ parts.push(`type: ${escapeMd(o)} → ${escapeMd(n)}`);
+ }
+ if (m.tags_added?.length) {
+ parts.push(`+tags: ${m.tags_added.map(escapeMd).join(", ")}`);
+ }
+ if (m.tags_removed?.length) {
+ parts.push(`−tags: ${m.tags_removed.map(escapeMd).join(", ")}`);
+ }
+ if (m.links_added?.length) parts.push(`+${m.links_added.length} link(s)`);
+ if (m.links_removed?.length)
+ parts.push(`−${m.links_removed.length} link(s)`);
+ md += `| \`${escapeMd(m.id)}\` | ${parts.join("; ")} |\n`;
+ }
+ if (modified.length > 100) {
+ md += `| … | +${modified.length - 100} more modified |\n`;
+ }
+ md += "\n\n\n";
+ }
+ return md;
+}
+
+function renderImpact(impacted) {
+ if (!impacted.length) return "";
+ let md = "Downstream impact (depth ≤ 5)\n\n";
+ md += "| ID | Depth | Path |\n|---|---:|---|\n";
+ for (const i of impacted.slice(0, 100)) {
+ const path = Array.isArray(i.reason) ? i.reason.join(" → ") : "";
+ md += `| \`${escapeMd(i.id)}\` | ${Number(i.depth) || 0} | ${escapeMd(path)} |\n`;
+ }
+ if (impacted.length > 100) {
+ md += `| … | | +${impacted.length - 100} more |\n`;
+ }
+ md += "\n\n\n";
+ return md;
+}
+
+function renderArtifactLink(args) {
+ if (!args.repo || !args.run) return "";
+ return (
+ `> 📎 Full HTML dashboard attached as workflow artifact ` +
+ `\`rivet-delta-pr-${args.pr}\` — ` +
+ `[download from the workflow run](https://github.com/${args.repo}/actions/runs/${args.run}).\n\n`
+ );
+}
+
+// ── Entry point ─────────────────────────────────────────────────────────
+function main() {
+ const args = parseArgs(argv);
+ const diff = loadJson(args.diff, null);
+ const impact = loadJson(args.impact, null);
+
+ let md = `${MARKER}\n\n## 📐 Rivet artifact delta\n\n`;
+
+ if (!diff) {
+ md +=
+ "> ⚠️ Diff could not be computed (base or head failed to parse). " +
+ "See the workflow logs for details — this is informational and does " +
+ "not block merge.\n";
+ stdout.write(md);
+ return;
+ }
+
+ const n = normalize(diff, impact);
+ const total = n.added.length + n.removed.length + n.modified.length;
+
+ if (total === 0) {
+ md +=
+ "_No artifact changes in this PR._ Code-only changes (renderer, " +
+ "CLI wiring, tests) don't touch the artifact graph.\n";
+ stdout.write(md);
+ return;
+ }
+
+ md += renderCountsTable(n);
+ md += "\n";
+
+ const { md: graph, truncated, total: nodeCount } = renderMermaid(n);
+ if (graph) {
+ md += "### Graph\n\n";
+ md += graph;
+ if (truncated) {
+ md += `\n_Showing first ${MERMAID_NODE_CAP} of ${nodeCount} changed artifacts; full list below._\n\n`;
+ }
+ }
+
+ md += renderChangeList(n);
+ md += renderImpact(n.impacted);
+ md += renderArtifactLink(args);
+
+ md +=
+ "\nPosted by `rivet-delta` workflow. The graph shows only changed " +
+ "artifacts; open the HTML dashboard (above) for full context.\n";
+
+ stdout.write(md);
+}
+
+main();
diff --git a/tests/playwright/artifacts.spec.ts b/tests/playwright/artifacts.spec.ts
index 955e214..ad375bf 100644
--- a/tests/playwright/artifacts.spec.ts
+++ b/tests/playwright/artifacts.spec.ts
@@ -65,4 +65,44 @@ test.describe("Artifacts", () => {
// htmx:afterSwap. If rendering fails the pre block keeps its source.
await expect(mermaidPre.locator("svg")).toBeVisible({ timeout: 5_000 });
});
+
+ // Regression: artifact diagrams (mermaid + AADL) must wrap in the same
+ // `.svg-viewer` container as the link graph so the toolbar (zoom-fit,
+ // fullscreen, popout) is consistent across views. Catches the v0.4.1
+ // drift where graph views had a toolbar but artifact diagrams did not.
+ test("artifact mermaid diagram wrapped in svg-viewer", async ({ page }) => {
+ await page.goto("/artifacts/ARCH-CORE-001");
+ await waitForHtmx(page);
+ const viewer = page
+ .locator(".artifact-diagram .svg-viewer")
+ .first();
+ await expect(viewer).toBeVisible();
+ await expect(
+ viewer.locator(".svg-viewer-toolbar button[title='Fullscreen']"),
+ ).toBeVisible();
+ });
+
+ // B5: artifact detail must show which markdown documents reference it
+ // via [[ID]] links — reverse index of the doc-linkage forward view.
+ // Regression guard for the "data exists in core but UI doesn't show it"
+ // antipattern.
+ test("artifact detail shows referencing documents (when present)", async ({
+ page,
+ }) => {
+ // REQ-001 is referenced from at least one project doc; if your
+ // fixture set changes, this assertion only fires when there is at
+ // least one document reference (the block hides itself otherwise).
+ await page.goto("/artifacts/REQ-001");
+ await waitForHtmx(page);
+ const heading = page.locator("h3", { hasText: "Referenced in Documents" });
+ if (await heading.count() === 0) {
+ test.skip();
+ return;
+ }
+ await expect(heading).toBeVisible();
+ const docLinks = page.locator(
+ "a[hx-get^='/documents/'], a[href^='/documents/']",
+ );
+ expect(await docLinks.count()).toBeGreaterThan(0);
+ });
});
diff --git a/tests/playwright/diagram-viewer.spec.ts b/tests/playwright/diagram-viewer.spec.ts
new file mode 100644
index 0000000..1e67279
--- /dev/null
+++ b/tests/playwright/diagram-viewer.spec.ts
@@ -0,0 +1,50 @@
+import { test, expect } from "@playwright/test";
+import { waitForHtmx } from "./helpers";
+
+/**
+ * Diagram-viewer parity test.
+ *
+ * Pins the architectural invariant that every dashboard view rendering a
+ * diagram (mermaid, link graph, doc-linkage, schema linkage) wraps it in
+ * the shared `.svg-viewer` container with toolbar (zoom-fit, fullscreen,
+ * popout). Catches the v0.4.1 drift where mermaid-only views had the
+ * toolbar but schema/artifact diagrams did not.
+ */
+const VIEWER_PAGES = [
+ // Top-level link graph — always has toolbar.
+ { name: "graph", url: "/graph" },
+ // Doc linkage view.
+ { name: "doc-linkage", url: "/doc-linkage" },
+ // Help / schema page renders the schema-linkage mermaid diagram.
+ { name: "schema-linkage", url: "/help/schema" },
+];
+
+for (const page of VIEWER_PAGES) {
+ test(`${page.name}: viewer toolbar present`, async ({ page: p }) => {
+ await p.goto(page.url);
+ await waitForHtmx(p);
+ const viewer = p.locator(".svg-viewer").first();
+ await expect(viewer).toBeVisible({ timeout: 5_000 });
+ const toolbar = viewer.locator(".svg-viewer-toolbar");
+ await expect(toolbar).toBeVisible();
+ // Each toolbar must offer the same three controls.
+ await expect(
+ toolbar.locator("button[title='Zoom to fit']"),
+ ).toBeVisible();
+ await expect(
+ toolbar.locator("button[title='Fullscreen']"),
+ ).toBeVisible();
+ await expect(
+ toolbar.locator("button[title='Open in new window']"),
+ ).toBeVisible();
+ });
+
+ test(`${page.name}: fullscreen toggles class`, async ({ page: p }) => {
+ await p.goto(page.url);
+ await waitForHtmx(p);
+ const viewer = p.locator(".svg-viewer").first();
+ await expect(viewer).toBeVisible();
+ await viewer.locator("button[title='Fullscreen']").click();
+ await expect(viewer).toHaveClass(/fullscreen/);
+ });
+}
diff --git a/tests/playwright/documents.spec.ts b/tests/playwright/documents.spec.ts
index 8dd49ad..95badd7 100644
--- a/tests/playwright/documents.spec.ts
+++ b/tests/playwright/documents.spec.ts
@@ -139,6 +139,44 @@ test.describe("Documents", () => {
}
});
+ // B1: every in a rendered document body must carry an `id`
+ // attribute so in-page TOC links navigate. Catches regressions where
+ // `
Section
` slips back in without `id="section"`.
+ test("rendered document headings have id attributes for TOC anchors", async ({
+ page,
+ }) => {
+ await page.goto("/documents");
+ await waitForHtmx(page);
+ const docLinks = await page
+ .locator("a[href^='/documents/']")
+ .evaluateAll((els) =>
+ els
+ .map((el) => el.getAttribute("href"))
+ .filter((h): h is string => !!h),
+ );
+ if (docLinks.length === 0) {
+ test.skip();
+ return;
+ }
+ await page.goto(docLinks[0]);
+ await waitForHtmx(page);
+ const headings = await page
+ .locator("article h2, article h3, article h4, main h2, main h3, main h4")
+ .evaluateAll((els) =>
+ els.map((el) => ({
+ tag: el.tagName.toLowerCase(),
+ id: el.getAttribute("id"),
+ text: el.textContent?.trim() ?? "",
+ })),
+ );
+ if (headings.length === 0) {
+ test.skip();
+ return;
+ }
+ const missingId = headings.filter((h) => !h.id);
+ expect(missingId).toEqual([]);
+ });
+
test("embed-stats renders a table when present", async ({ page }) => {
// Visit each document looking for embed-stats divs
await page.goto("/documents");
diff --git a/tests/playwright/rivet-delta.spec.ts b/tests/playwright/rivet-delta.spec.ts
new file mode 100644
index 0000000..4997865
--- /dev/null
+++ b/tests/playwright/rivet-delta.spec.ts
@@ -0,0 +1,353 @@
+import { test, expect } from "@playwright/test";
+import { execFileSync } from "node:child_process";
+import { writeFileSync, mkdtempSync, readFileSync } from "node:fs";
+import { tmpdir } from "node:os";
+import { join } from "node:path";
+
+/**
+ * Pipeline test for `.github/workflows/rivet-delta.yml` output.
+ *
+ * Invokes `scripts/diff-to-markdown.mjs` with fixture diff + impact JSON,
+ * renders the resulting markdown in a real browser, and asserts that every
+ * piece the reviewer is supposed to see — counts, changed IDs, the mermaid
+ * diagram, the collapsible impact list, the workflow-artifact link — is
+ * actually visible and clickable. This pins the "what we ship back to the
+ * PR is usable" contract that the user called out.
+ */
+const REPO_ROOT = join(__dirname, "..", "..");
+
+/** Minimal HTML shell that renders a markdown body via `marked` + `mermaid`. */
+function harness(bodyHtml: string, mermaidSource: string): string {
+ return `
+
+
+
+ rivet-delta preview
+
+
+
+ ${bodyHtml}
+ ${
+ mermaidSource
+ ? ``
+ : ""
+ }
+
+`;
+}
+
+/** Very light markdown → HTML: enough for the checks below. We intentionally
+ * avoid pulling in a full markdown library to keep the test self-contained.
+ */
+function mdToHtml(md: string): string {
+ let html = md;
+ // Fenced blocks (keep as