diff --git a/.config/nextest.toml b/.config/nextest.toml new file mode 100644 index 0000000..8ac5d33 --- /dev/null +++ b/.config/nextest.toml @@ -0,0 +1,10 @@ +# Nextest configuration +# See https://nexte.st/docs/configuration/ + +[profile.ci] +# Fail-fast disabled so all tests run even if some fail +fail-fast = false + +[profile.ci.junit] +# Output JUnit XML for CI consumption +path = "junit.xml" diff --git a/.github/workflows/benchmarks.yml b/.github/workflows/benchmarks.yml new file mode 100644 index 0000000..378d135 --- /dev/null +++ b/.github/workflows/benchmarks.yml @@ -0,0 +1,49 @@ +name: Benchmarks + +on: + push: + branches: [main] + pull_request: + branches: [main] + +permissions: + contents: write + pull-requests: write + +env: + CARGO_TERM_COLOR: always + +jobs: + benchmark: + name: Criterion Benchmarks + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - uses: dtolnay/rust-toolchain@stable + + - uses: Swatinem/rust-cache@v2 + + - name: Run criterion benchmarks + run: cargo bench --bench core_benchmarks -- --output-format=bencher | tee benchmark-output.txt + + - name: Store and compare benchmark results + continue-on-error: true + uses: benchmark-action/github-action-benchmark@v1 + with: + name: Rivet Criterion Benchmarks + tool: cargo + output-file-path: benchmark-output.txt + github-token: ${{ secrets.GITHUB_TOKEN }} + # Push results to gh-pages on main branch pushes + auto-push: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} + # Comment on PRs when there is a regression + comment-on-alert: true + # Alert threshold: warn at 120% of baseline + alert-threshold: "120%" + # Do not fail the workflow on regressions for now (baseline is being established) + fail-on-alert: false + # Keep benchmark data for the last 30 entries + max-items-in-chart: 30 + # Only save data points on pushes to main (not on PRs) + save-data-file: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 503bbc8..f0b1053 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -88,22 +88,30 @@ jobs: # ── Code coverage (Rust nightly for source-based instrumentation) ─── coverage: name: Code Coverage + needs: [test] runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@nightly - uses: Swatinem/rust-cache@v2 - name: Install cargo-llvm-cov - uses: taiki-e/install-action@cargo-llvm-cov + uses: taiki-e/install-action@v2 + with: + tool: cargo-llvm-cov - name: Generate coverage (LCOV + HTML) run: | - cargo llvm-cov --all-features --workspace --lcov --output-path lcov.info - cargo llvm-cov --all-features --workspace --html --output-dir coverage-html + cargo llvm-cov -p rivet-core --lcov --output-path lcov.info + cargo llvm-cov -p rivet-core --html --output-dir coverage-html + - name: Enforce minimum coverage threshold + run: cargo llvm-cov -p rivet-core --fail-under-lines 40 - name: Upload LCOV to Codecov + if: env.CODECOV_TOKEN != '' uses: codecov/codecov-action@v4 with: files: lcov.info fail_ci_if_error: false + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} - name: Upload HTML coverage report uses: actions/upload-artifact@v4 with: @@ -121,9 +129,9 @@ jobs: components: miri - uses: Swatinem/rust-cache@v2 - name: Run Miri - run: cargo miri test --all + run: cargo miri test -p rivet-core --lib env: - MIRIFLAGS: "-Zmiri-strict-provenance" + MIRIFLAGS: "-Zmiri-strict-provenance -Zmiri-disable-isolation" # ── Property-based testing (extended) ─────────────────────────────── proptest: @@ -146,9 +154,17 @@ jobs: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@stable - name: Install cargo-vet - uses: taiki-e/install-action@cargo-vet + uses: taiki-e/install-action@v2 + with: + tool: cargo-vet + - name: Initialize cargo-vet if needed + run: | + if [ ! -d supply-chain ]; then + cargo vet init + echo "::notice::cargo-vet initialized — run 'cargo vet' locally to audit dependencies" + fi - name: Check supply chain - run: cargo vet --locked || echo "::warning::cargo-vet not yet configured — run 'cargo vet init'" + run: cargo vet --locked || echo "::warning::cargo-vet found unaudited crates — run 'cargo vet' locally" # ── MSRV check ────────────────────────────────────────────────────── msrv: diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..c4962b8 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,103 @@ +name: Release Test Evidence + +on: + push: + tags: + - "v*" + +permissions: + contents: write + +env: + CARGO_TERM_COLOR: always + +jobs: + test-evidence: + name: Build Test Evidence Bundle + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - uses: dtolnay/rust-toolchain@nightly + with: + components: llvm-tools-preview + + - uses: Swatinem/rust-cache@v2 + + # Install tools: cargo-nextest for JUnit XML, cargo-llvm-cov for coverage + - name: Install cargo-nextest and cargo-llvm-cov + uses: taiki-e/install-action@v2 + with: + tool: cargo-nextest,cargo-llvm-cov + + # ── 1. Test suite with JUnit XML output ───────────────────────────── + - name: Run tests with JUnit XML output + run: | + mkdir -p test-evidence/test-results + cargo nextest run --all --profile ci + cp target/nextest/ci/junit.xml test-evidence/test-results/junit.xml + + # ── 2. Code coverage (LCOV) ──────────────────────────────────────── + - name: Generate code coverage (LCOV) + run: | + mkdir -p test-evidence/coverage + cargo llvm-cov --all-features --workspace --lcov --output-path test-evidence/coverage/lcov.info + cargo llvm-cov report --all-features --workspace > test-evidence/coverage/summary.txt + + # ── 3. Benchmarks (criterion HTML reports) ───────────────────────── + - name: Run criterion benchmarks + run: | + cargo bench --bench core_benchmarks -- --output-format=criterion + mkdir -p test-evidence/benchmarks + cp -r target/criterion/* test-evidence/benchmarks/ 2>/dev/null || true + + # ── 4. Rivet validate ────────────────────────────────────────────── + - name: Run rivet validate + run: | + mkdir -p test-evidence/validation + set +e + cargo run --release -- validate > test-evidence/validation/validate-output.txt 2>&1 + rc=$? + set -e + echo "" >> test-evidence/validation/validate-output.txt + echo "exit_code=${rc}" >> test-evidence/validation/validate-output.txt + + # ── 5. Metadata ──────────────────────────────────────────────────── + - name: Generate metadata.json + run: | + TAG="${GITHUB_REF#refs/tags/}" + RUST_VERSION="$(rustc --version)" + OS_INFO="$(uname -srm)" + TIMESTAMP="$(date -u +%Y-%m-%dT%H:%M:%SZ)" + + jq -n \ + --arg tag "${TAG}" \ + --arg commit "${GITHUB_SHA}" \ + --arg timestamp "${TIMESTAMP}" \ + --arg rust_version "${RUST_VERSION}" \ + --arg os "${OS_INFO}" \ + '{tag: $tag, commit: $commit, timestamp: $timestamp, rust_version: $rust_version, os: $os}' \ + > test-evidence/metadata.json + + # ── 6. Package everything ────────────────────────────────────────── + - name: Package test evidence tarball + id: package + run: | + TAG="${GITHUB_REF#refs/tags/}" + ARCHIVE="test-evidence-${TAG}.tar.gz" + tar czf "${ARCHIVE}" test-evidence/ + echo "archive=${ARCHIVE}" >> "$GITHUB_OUTPUT" + echo "tag=${TAG}" >> "$GITHUB_OUTPUT" + + # ── 7. Create GitHub Release with asset ──────────────────────────── + - name: Create GitHub Release + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + TAG="${{ steps.package.outputs.tag }}" + ARCHIVE="${{ steps.package.outputs.archive }}" + + gh release create "${TAG}" \ + --title "Release ${TAG}" \ + --generate-notes \ + "${ARCHIVE}#Test Evidence (tar.gz)" diff --git a/Cargo.lock b/Cargo.lock index f8f7eb0..53b07c3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,6 +2,15 @@ # It is not intended for manual editing. version = 4 +[[package]] +name = "addr2line" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9698bf0769c641b18618039fe2ebd41eb3541f98433000f64e663fab7cea2c87" +dependencies = [ + "gimli", +] + [[package]] name = "aho-corasick" version = "1.1.4" @@ -11,6 +20,27 @@ dependencies = [ "memchr", ] +[[package]] +name = "allocator-api2" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" + +[[package]] +name = "ambient-authority" +version = "0.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9d4ee0d472d1cd2e28c97dfa124b3d8d992e10eb0a035f33f5d12e3a177ba3b" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + [[package]] name = "anes" version = "0.1.6" @@ -53,7 +83,7 @@ version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc" dependencies = [ - "windows-sys", + "windows-sys 0.61.2", ] [[package]] @@ -64,7 +94,7 @@ checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d" dependencies = [ "anstyle", "once_cell_polyfill", - "windows-sys", + "windows-sys 0.61.2", ] [[package]] @@ -73,6 +103,33 @@ version = "1.0.102" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c" +[[package]] +name = "arbitrary" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3d036a3c4ab069c7b410a2ce876bd74808d2d0888a82667669f8e783a898bf1" + +[[package]] +name = "assert-json-diff" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47e4f2b81832e72834d7518d8487a0396a28cc408186a2e8854c0f98011faf12" +dependencies = [ + "serde", + "serde_json", +] + +[[package]] +name = "async-trait" +version = "0.1.89" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "atomic-waker" version = "1.1.2" @@ -137,6 +194,12 @@ dependencies = [ "tracing", ] +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + [[package]] name = "bit-set" version = "0.8.0" @@ -158,11 +221,32 @@ version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af" +[[package]] +name = "bitmaps" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "031043d04099746d8db04daf1fa424b2bc8bd69d92b25962dcde24da39ab64a2" +dependencies = [ + "typenum", +] + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + [[package]] name = "bumpalo" version = "3.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d20789868f4b01b2f2caec9f5c4e0213b41e3e5702a50157d699ae31ced2fcb" +dependencies = [ + "allocator-api2", +] [[package]] name = "bytes" @@ -170,12 +254,102 @@ version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33" +[[package]] +name = "cap-fs-ext" +version = "3.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5528f85b1e134ae811704e41ef80930f56e795923f866813255bc342cc20654" +dependencies = [ + "cap-primitives", + "cap-std", + "io-lifetimes", + "windows-sys 0.59.0", +] + +[[package]] +name = "cap-net-ext" +version = "3.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "20a158160765c6a7d0d8c072a53d772e4cb243f38b04bfcf6b4939cfbe7482e7" +dependencies = [ + "cap-primitives", + "cap-std", + "rustix 1.1.4", + "smallvec", +] + +[[package]] +name = "cap-primitives" +version = "3.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6cf3aea8a5081171859ef57bc1606b1df6999df4f1110f8eef68b30098d1d3a" +dependencies = [ + "ambient-authority", + "fs-set-times", + "io-extras", + "io-lifetimes", + "ipnet", + "maybe-owned", + "rustix 1.1.4", + "rustix-linux-procfs", + "windows-sys 0.59.0", + "winx", +] + +[[package]] +name = "cap-rand" +version = "3.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8144c22e24bbcf26ade86cb6501a0916c46b7e4787abdb0045a467eb1645a1d" +dependencies = [ + "ambient-authority", + "rand 0.8.5", +] + +[[package]] +name = "cap-std" +version = "3.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6dc3090992a735d23219de5c204927163d922f42f575a0189b005c62d37549a" +dependencies = [ + "cap-primitives", + "io-extras", + "io-lifetimes", + "rustix 1.1.4", +] + +[[package]] +name = "cap-time-ext" +version = "3.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "def102506ce40c11710a9b16e614af0cde8e76ae51b1f48c04b8d79f4b671a80" +dependencies = [ + "ambient-authority", + "cap-primitives", + "iana-time-zone", + "once_cell", + "rustix 1.1.4", + "winx", +] + [[package]] name = "cast" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" +[[package]] +name = "cc" +version = "1.2.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aebf35691d1bfb0ac386a69bac2fde4dd276fb618cf8bf4f5318fe285e821bb2" +dependencies = [ + "find-msvc-tools", + "jobserver", + "libc", + "shlex", +] + [[package]] name = "cfg-if" version = "1.0.4" @@ -249,12 +423,215 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3a822ea5bc7590f9d40f1ba12c0dc3c2760f3482c6984db1573ad11031420831" +[[package]] +name = "cobs" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fa961b519f0b462e3a3b4a34b64d119eeaca1d59af726fe450bbba07a9fc0a1" +dependencies = [ + "thiserror 2.0.18", +] + [[package]] name = "colorchoice" version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2a6cd9ae233e7f62ba4e9353e81a88df7fc8a5987b8d445b4d90c879bd156f6" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" + +[[package]] +name = "cpp_demangle" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2bb79cb74d735044c972aae58ed0aaa9a837e85b01106a54c39e42e97f62253" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "cpufeatures" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" +dependencies = [ + "libc", +] + +[[package]] +name = "cranelift-assembler-x64" +version = "0.129.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40630d663279bc855bff805d6f5e8a0b6a1867f9df95b010511ac6dc894e9395" +dependencies = [ + "cranelift-assembler-x64-meta", +] + +[[package]] +name = "cranelift-assembler-x64-meta" +version = "0.129.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ee6aec5ceb55e5fdbcf7ef677d7c7195531360ff181ce39b2b31df11d57305f" +dependencies = [ + "cranelift-srcgen", +] + +[[package]] +name = "cranelift-bforest" +version = "0.129.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a92d78cc3f087d7e7073828f08d98c7074a3a062b6b29a1b7783ce74305685e" +dependencies = [ + "cranelift-entity", +] + +[[package]] +name = "cranelift-bitset" +version = "0.129.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edcc73d756f2e0d7eda6144fe64a2bc69c624de893cb1be51f1442aed77881d2" +dependencies = [ + "serde", + "serde_derive", + "wasmtime-internal-core", +] + +[[package]] +name = "cranelift-codegen" +version = "0.129.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "683d94c2cd0d73b41369b88da1129589bc3a2d99cf49979af1d14751f35b7a1b" +dependencies = [ + "bumpalo", + "cranelift-assembler-x64", + "cranelift-bforest", + "cranelift-bitset", + "cranelift-codegen-meta", + "cranelift-codegen-shared", + "cranelift-control", + "cranelift-entity", + "cranelift-isle", + "gimli", + "hashbrown 0.15.5", + "libm", + "log", + "pulley-interpreter", + "regalloc2", + "rustc-hash", + "serde", + "smallvec", + "target-lexicon", + "wasmtime-internal-core", +] + +[[package]] +name = "cranelift-codegen-meta" +version = "0.129.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "235da0e52ee3a0052d0e944c3470ff025b1f4234f6ec4089d3109f2d2ffa6cbd" +dependencies = [ + "cranelift-assembler-x64-meta", + "cranelift-codegen-shared", + "cranelift-srcgen", + "heck", + "pulley-interpreter", +] + +[[package]] +name = "cranelift-codegen-shared" +version = "0.129.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "20c07c6c440bd1bf920ff7597a1e743ede1f68dcd400730bd6d389effa7662af" + +[[package]] +name = "cranelift-control" +version = "0.129.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8797c022e02521901e1aee483dea3ed3c67f2bf0a26405c9dd48e8ee7a70944b" +dependencies = [ + "arbitrary", +] + +[[package]] +name = "cranelift-entity" +version = "0.129.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59d8e72637246edd2cba337939850caa8b201f6315925ec4c156fdd089999699" +dependencies = [ + "cranelift-bitset", + "serde", + "serde_derive", + "wasmtime-internal-core", +] + +[[package]] +name = "cranelift-frontend" +version = "0.129.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c31db0085c3dfa131e739c3b26f9f9c84d69a9459627aac1ac4ef8355e3411b" +dependencies = [ + "cranelift-codegen", + "log", + "smallvec", + "target-lexicon", +] + +[[package]] +name = "cranelift-isle" +version = "0.129.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524d804c1ebd8c542e6f64e71aa36934cec17c5da4a9ae3799796220317f5d23" + +[[package]] +name = "cranelift-native" +version = "0.129.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc9598f02540e382e1772416eba18e93c5275b746adbbf06ac1f3cf149415270" +dependencies = [ + "cranelift-codegen", + "libc", + "target-lexicon", +] + +[[package]] +name = "cranelift-srcgen" +version = "0.129.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d953932541249c91e3fa70a75ff1e52adc62979a2a8132145d4b9b3e6d1a9b6a" + +[[package]] +name = "crc32fast" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" +dependencies = [ + "cfg-if", +] + [[package]] name = "criterion" version = "0.5.1" @@ -267,7 +644,7 @@ dependencies = [ "clap", "criterion-plot", "is-terminal", - "itertools", + "itertools 0.10.5", "num-traits", "once_cell", "oorandom", @@ -288,7 +665,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" dependencies = [ "cast", - "itertools", + "itertools 0.10.5", ] [[package]] @@ -323,76 +700,215 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" [[package]] -name = "either" -version = "1.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" - -[[package]] -name = "env_filter" -version = "1.0.0" +name = "crypto-common" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a1c3cc8e57274ec99de65301228b537f1e4eedc1b8e0f9411c6caac8ae7308f" +checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a" dependencies = [ - "log", - "regex", + "generic-array", + "typenum", ] [[package]] -name = "env_logger" -version = "0.11.9" +name = "deadpool" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2daee4ea451f429a58296525ddf28b45a3b64f1acf6587e2067437bb11e218d" +checksum = "0be2b1d1d6ec8d846f05e137292d0b89133caf95ef33695424c09568bdd39b1b" dependencies = [ - "anstream", - "anstyle", - "env_filter", - "jiff", - "log", + "deadpool-runtime", + "lazy_static", + "num_cpus", + "tokio", ] [[package]] -name = "equivalent" -version = "1.0.2" +name = "deadpool-runtime" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" +checksum = "092966b41edc516079bdf31ec78a2e0588d1d0c08f78b91d8307215928642b2b" [[package]] -name = "errno" -version = "0.3.14" +name = "debugid" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" +checksum = "bef552e6f588e446098f6ba40d89ac146c8c7b64aade83c051ee00bb5d2bc18d" dependencies = [ - "libc", - "windows-sys", + "uuid", ] [[package]] -name = "fastrand" -version = "2.3.0" +name = "digest" +version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "crypto-common", +] [[package]] -name = "fixedbitset" -version = "0.4.2" +name = "directories-next" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" +checksum = "339ee130d97a610ea5a5872d2bbb130fdf68884ff09d3028b81bec8a1ac23bbc" +dependencies = [ + "cfg-if", + "dirs-sys-next", +] [[package]] -name = "fnv" -version = "1.0.7" +name = "dirs-sys-next" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" +checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" +dependencies = [ + "libc", + "redox_users", + "winapi", +] [[package]] -name = "foldhash" -version = "0.1.5" +name = "displaydoc" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" - -[[package]] -name = "form_urlencoded" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "either" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" + +[[package]] +name = "embedded-io" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef1a6892d9eef45c8fa6b9e0086428a2cca8491aca8f787c534a3d6d0bcb3ced" + +[[package]] +name = "embedded-io" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edd0f118536f44f5ccd48bcb8b111bdc3de888b58c74639dfb034a357d0f206d" + +[[package]] +name = "encoding_rs" +version = "0.8.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "env_filter" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a1c3cc8e57274ec99de65301228b537f1e4eedc1b8e0f9411c6caac8ae7308f" +dependencies = [ + "log", + "regex", +] + +[[package]] +name = "env_logger" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2daee4ea451f429a58296525ddf28b45a3b64f1acf6587e2067437bb11e218d" +dependencies = [ + "anstream", + "anstyle", + "env_filter", + "jiff", + "log", +] + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "errno" +version = "0.3.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" +dependencies = [ + "libc", + "windows-sys 0.61.2", +] + +[[package]] +name = "etch" +version = "0.1.0" +dependencies = [ + "petgraph", +] + +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + +[[package]] +name = "fd-lock" +version = "4.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ce92ff622d6dadf7349484f42c93271a0d49b7cc4d466a936405bacbe10aa78" +dependencies = [ + "cfg-if", + "rustix 1.1.4", + "windows-sys 0.59.0", +] + +[[package]] +name = "find-msvc-tools" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582" + +[[package]] +name = "fixedbitset" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "form_urlencoded" version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" @@ -400,6 +916,32 @@ dependencies = [ "percent-encoding", ] +[[package]] +name = "fs-set-times" +version = "0.20.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94e7099f6313ecacbe1256e8ff9d617b75d1bcb16a6fddef94866d225a01a14a" +dependencies = [ + "io-lifetimes", + "rustix 1.1.4", + "windows-sys 0.59.0", +] + +[[package]] +name = "futures" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b147ee9d1f6d097cef9ce628cd2ee62288d963e16fb287bd9286455b241382d" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + [[package]] name = "futures-channel" version = "0.3.32" @@ -407,6 +949,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d" dependencies = [ "futures-core", + "futures-sink", ] [[package]] @@ -415,6 +958,34 @@ version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d" +[[package]] +name = "futures-executor" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf29c38818342a3b26b5b923639e7b1f4a61fc5e76102d4b1981c6dc7a7579d" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718" + +[[package]] +name = "futures-macro" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "futures-sink" version = "0.3.32" @@ -433,12 +1004,52 @@ version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6" dependencies = [ + "futures-channel", "futures-core", + "futures-io", + "futures-macro", + "futures-sink", "futures-task", + "memchr", "pin-project-lite", "slab", ] +[[package]] +name = "fxprof-processed-profile" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25234f20a3ec0a962a61770cfe39ecf03cb529a6e474ad8cff025ed497eda557" +dependencies = [ + "bitflags", + "debugid", + "rustc-hash", + "serde", + "serde_derive", + "serde_json", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff2abc00be7fca6ebc474524697ae276ad847ad0a6b3faa4bcb027e9a4614ad0" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + [[package]] name = "getrandom" version = "0.3.4" @@ -464,6 +1075,37 @@ dependencies = [ "wasip3", ] +[[package]] +name = "gimli" +version = "0.33.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf7f043f89559805f8c7cacc432749b2fa0d0a0a9ee46ce47164ed5ba7f126c" +dependencies = [ + "fnv", + "hashbrown 0.16.1", + "indexmap", + "stable_deref_trait", +] + +[[package]] +name = "h2" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f44da3a8150a6703ed5d34e164b875fd14c2cdab9af1252a9a1020bde2bdc54" +dependencies = [ + "atomic-waker", + "bytes", + "fnv", + "futures-core", + "futures-sink", + "http", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", +] + [[package]] name = "half" version = "2.7.1" @@ -482,6 +1124,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" dependencies = [ "foldhash", + "serde", ] [[package]] @@ -563,6 +1206,7 @@ dependencies = [ "bytes", "futures-channel", "futures-core", + "h2", "http", "http-body", "httparse", @@ -572,6 +1216,39 @@ dependencies = [ "pin-utils", "smallvec", "tokio", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.27.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" +dependencies = [ + "http", + "hyper", + "hyper-util", + "rustls", + "rustls-pki-types", + "tokio", + "tokio-rustls", + "tower-service", +] + +[[package]] +name = "hyper-tls" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" +dependencies = [ + "bytes", + "http-body-util", + "hyper", + "hyper-util", + "native-tls", + "tokio", + "tokio-native-tls", + "tower-service", ] [[package]] @@ -580,52 +1257,234 @@ version = "0.1.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96547c2556ec9d12fb1578c4eaf448b04993e7fb79cbaad930a656880a6bdfa0" dependencies = [ + "base64", "bytes", + "futures-channel", + "futures-util", "http", "http-body", "hyper", + "ipnet", + "libc", + "percent-encoding", "pin-project-lite", + "socket2", + "system-configuration", "tokio", "tower-service", + "tracing", + "windows-registry", ] [[package]] -name = "id-arena" -version = "2.3.0" +name = "iana-time-zone" +version = "0.1.65" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954" +checksum = "e31bc9ad994ba00e440a8aa5c9ef0ec67d5cb5e5cb0cc7f8b744a35b389cc470" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "log", + "wasm-bindgen", + "windows-core", +] [[package]] -name = "indexmap" -version = "2.13.0" +name = "iana-time-zone-haiku" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" dependencies = [ - "equivalent", - "hashbrown 0.16.1", - "serde", - "serde_core", + "cc", ] [[package]] -name = "is-terminal" -version = "0.4.17" +name = "icu_collections" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3640c1c38b8e4e43584d8df18be5fc6b0aa314ce6ebf51b53313d4306cca8e46" +checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43" dependencies = [ - "hermit-abi", - "libc", - "windows-sys", + "displaydoc", + "potential_utf", + "yoke", + "zerofrom", + "zerovec", ] [[package]] -name = "is_terminal_polyfill" -version = "1.70.2" +name = "icu_locale_core" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695" +checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] [[package]] -name = "itertools" +name = "icu_normalizer" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599" +dependencies = [ + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" + +[[package]] +name = "icu_properties" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec" +dependencies = [ + "icu_collections", + "icu_locale_core", + "icu_properties_data", + "icu_provider", + "zerotrie", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af" + +[[package]] +name = "icu_provider" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" +dependencies = [ + "displaydoc", + "icu_locale_core", + "writeable", + "yoke", + "zerofrom", + "zerotrie", + "zerovec", +] + +[[package]] +name = "id-arena" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954" + +[[package]] +name = "idna" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "im-rc" +version = "15.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af1955a75fa080c677d3972822ec4bad316169ab1cfc6c257a942c2265dbe5fe" +dependencies = [ + "bitmaps", + "rand_core 0.6.4", + "rand_xoshiro", + "sized-chunks", + "typenum", + "version_check", +] + +[[package]] +name = "indexmap" +version = "2.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" +dependencies = [ + "equivalent", + "hashbrown 0.16.1", + "serde", + "serde_core", +] + +[[package]] +name = "io-extras" +version = "0.18.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2285ddfe3054097ef4b2fe909ef8c3bcd1ea52a8f0d274416caebeef39f04a65" +dependencies = [ + "io-lifetimes", + "windows-sys 0.59.0", +] + +[[package]] +name = "io-lifetimes" +version = "2.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06432fb54d3be7964ecd3649233cddf80db2832f47fec34c01f65b3d9d774983" + +[[package]] +name = "ipnet" +version = "2.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d98f6fed1fde3f8c21bc40a1abb88dd75e67924f9cffc3ef95607bad8017f8e2" + +[[package]] +name = "iri-string" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c91338f0783edbd6195decb37bae672fd3b165faffb89bf7b9e6942f8b1a731a" +dependencies = [ + "memchr", + "serde", +] + +[[package]] +name = "is-terminal" +version = "0.4.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3640c1c38b8e4e43584d8df18be5fc6b0aa314ce6ebf51b53313d4306cca8e46" +dependencies = [ + "hermit-abi", + "libc", + "windows-sys 0.61.2", +] + +[[package]] +name = "is_terminal_polyfill" +version = "1.70.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695" + +[[package]] +name = "itertools" version = "0.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" @@ -633,12 +1492,41 @@ dependencies = [ "either", ] +[[package]] +name = "itertools" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" +dependencies = [ + "either", +] + [[package]] name = "itoa" version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" +[[package]] +name = "ittapi" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b996fe614c41395cdaedf3cf408a9534851090959d90d54a535f675550b64b1" +dependencies = [ + "anyhow", + "ittapi-sys", + "log", +] + +[[package]] +name = "ittapi-sys" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52f5385394064fa2c886205dba02598013ce83d3e92d33dbdc0c52fe0e7bf4fc" +dependencies = [ + "cc", +] + [[package]] name = "jiff" version = "0.2.23" @@ -663,6 +1551,16 @@ dependencies = [ "syn", ] +[[package]] +name = "jobserver" +version = "0.1.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33" +dependencies = [ + "getrandom 0.3.4", + "libc", +] + [[package]] name = "js-sys" version = "0.3.91" @@ -673,6 +1571,18 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" + +[[package]] +name = "leb128" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67" + [[package]] name = "leb128fmt" version = "0.1.0" @@ -685,12 +1595,39 @@ version = "0.2.182" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6800badb6cb2082ffd7b6a67e6125bb39f18782f793520caee8cb8846be06112" +[[package]] +name = "libm" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6d2cec3eae94f9f509c767b45932f1ada8350c4bdb85af2fcab4a3c14807981" + +[[package]] +name = "libredox" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1744e39d1d6a9948f4f388969627434e31128196de472883b39f148769bfe30a" +dependencies = [ + "libc", +] + +[[package]] +name = "linux-raw-sys" +version = "0.4.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" + [[package]] name = "linux-raw-sys" version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a66949e030da00e8c7d4434b251670a91556f4144941d37452769c25d58a53" +[[package]] +name = "litemap" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" + [[package]] name = "lock_api" version = "0.4.14" @@ -706,18 +1643,42 @@ version = "0.4.29" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" +[[package]] +name = "mach2" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d640282b302c0bb0a2a8e0233ead9035e3bed871f0b7e81fe4a1ec829765db44" +dependencies = [ + "libc", +] + [[package]] name = "matchit" version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3" +[[package]] +name = "maybe-owned" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4facc753ae494aeb6e3c22f839b158aebd4f9270f55cd3c79906c45476c47ab4" + [[package]] name = "memchr" version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79" +[[package]] +name = "memfd" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad38eb12aea514a0466ea40a80fd8cc83637065948eb4a426e4aa46261175227" +dependencies = [ + "rustix 1.1.4", +] + [[package]] name = "mime" version = "0.3.17" @@ -742,7 +1703,24 @@ checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc" dependencies = [ "libc", "wasi", - "windows-sys", + "windows-sys 0.61.2", +] + +[[package]] +name = "native-tls" +version = "0.2.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "465500e14ea162429d264d44189adc38b199b62b1c21eea9f69e4b73cb03bbf2" +dependencies = [ + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", ] [[package]] @@ -754,6 +1732,28 @@ dependencies = [ "autocfg", ] +[[package]] +name = "num_cpus" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91df4bbde75afed763b708b7eee1e8e7651e02d97f6d5dd763e89367e957b23b" +dependencies = [ + "hermit-abi", + "libc", +] + +[[package]] +name = "object" +version = "0.37.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff76201f031d8863c38aa7f905eca4f53abbfa15f609db4277d44cd8938f33fe" +dependencies = [ + "crc32fast", + "hashbrown 0.15.5", + "indexmap", + "memchr", +] + [[package]] name = "once_cell" version = "1.21.3" @@ -772,6 +1772,50 @@ version = "11.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6790f58c7ff633d8771f42965289203411a5e5c68388703c06e14f24770b41e" +[[package]] +name = "openssl" +version = "0.10.75" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08838db121398ad17ab8531ce9de97b244589089e290a384c900cb9ff7434328" +dependencies = [ + "bitflags", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "openssl-probe" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c87def4c32ab89d880effc9e097653c8da5d6ef28e6b539d313baaacfbafcbe" + +[[package]] +name = "openssl-sys" +version = "0.9.111" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82cab2d520aa75e3c58898289429321eb788c3106963d0dc886ec7a5f4adc321" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + [[package]] name = "parking_lot" version = "0.12.5" @@ -823,6 +1867,12 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" +[[package]] +name = "pkg-config" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" + [[package]] name = "plotters" version = "0.3.7" @@ -866,6 +1916,27 @@ dependencies = [ "portable-atomic", ] +[[package]] +name = "postcard" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6764c3b5dd454e283a30e6dfe78e9b31096d9e32036b5d1eaac7a6119ccb9a24" +dependencies = [ + "cobs", + "embedded-io 0.4.0", + "embedded-io 0.6.1", + "serde", +] + +[[package]] +name = "potential_utf" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" +dependencies = [ + "zerovec", +] + [[package]] name = "ppv-lite86" version = "0.2.21" @@ -904,8 +1975,8 @@ dependencies = [ "bit-vec", "bitflags", "num-traits", - "rand", - "rand_chacha", + "rand 0.9.2", + "rand_chacha 0.9.0", "rand_xorshift", "regex-syntax", "rusty-fork", @@ -913,12 +1984,45 @@ dependencies = [ "unarray", ] +[[package]] +name = "pulley-interpreter" +version = "42.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc2d61e068654529dc196437f8df0981db93687fdc67dec6a5de92363120b9da" +dependencies = [ + "cranelift-bitset", + "log", + "pulley-macros", + "wasmtime-internal-core", +] + +[[package]] +name = "pulley-macros" +version = "42.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3f210c61b6ecfaebbba806b6d9113a222519d4e5cc4ab2d5ecca047bb7927ae" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "quick-error" version = "1.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" +[[package]] +name = "quick-xml" +version = "0.37.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "331e97a1af0bf59823e6eadffe373d7b27f485be8748f71471c662c1f269b7fb" +dependencies = [ + "memchr", + "serde", +] + [[package]] name = "quote" version = "1.0.45" @@ -940,24 +2044,54 @@ version = "6.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8dcc9c7d52a811697d2151c701e0d08956f92b0e24136cf4cf27b57a6a0d9bf" +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha 0.3.1", + "rand_core 0.6.4", +] + [[package]] name = "rand" version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" dependencies = [ - "rand_chacha", - "rand_core", + "rand_chacha 0.9.0", + "rand_core 0.9.5", ] [[package]] name = "rand_chacha" -version = "0.9.0" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_chacha" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" dependencies = [ "ppv-lite86", - "rand_core", + "rand_core 0.9.5", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom 0.2.17", ] [[package]] @@ -975,7 +2109,16 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "513962919efc330f829edb2535844d1b912b0fbe2ca165d613e4e8788bb05a5a" dependencies = [ - "rand_core", + "rand_core 0.9.5", +] + +[[package]] +name = "rand_xoshiro" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f97cdb2a36ed4183de61b2f824cc45c9f1037f28afe0a322e9fff4c108b5aaa" +dependencies = [ + "rand_core 0.6.4", ] [[package]] @@ -1007,6 +2150,31 @@ dependencies = [ "bitflags", ] +[[package]] +name = "redox_users" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" +dependencies = [ + "getrandom 0.2.17", + "libredox", + "thiserror 1.0.69", +] + +[[package]] +name = "regalloc2" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08effbc1fa53aaebff69521a5c05640523fab037b34a4a2c109506bc938246fa" +dependencies = [ + "allocator-api2", + "bumpalo", + "hashbrown 0.15.5", + "log", + "rustc-hash", + "smallvec", +] + [[package]] name = "regex" version = "1.12.3" @@ -1036,6 +2204,60 @@ version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc897dd8d9e8bd1ed8cdad82b5966c3e0ecae09fb1907d58efaa013543185d0a" +[[package]] +name = "reqwest" +version = "0.12.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eddd3ca559203180a307f12d114c268abf583f59b03cb906fd0b3ff8646c1147" +dependencies = [ + "base64", + "bytes", + "encoding_rs", + "futures-core", + "h2", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-rustls", + "hyper-tls", + "hyper-util", + "js-sys", + "log", + "mime", + "native-tls", + "percent-encoding", + "pin-project-lite", + "rustls-pki-types", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "tokio", + "tokio-native-tls", + "tower", + "tower-http", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "ring" +version = "0.17.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" +dependencies = [ + "cc", + "cfg-if", + "getrandom 0.2.17", + "libc", + "untrusted", + "windows-sys 0.52.0", +] + [[package]] name = "rivet-cli" version = "0.1.0" @@ -1044,7 +2266,9 @@ dependencies = [ "axum", "clap", "env_logger", + "etch", "log", + "petgraph", "rivet-core", "serde", "serde_json", @@ -1061,10 +2285,42 @@ dependencies = [ "log", "petgraph", "proptest", + "quick-xml", + "reqwest", "serde", "serde_json", "serde_yaml", - "thiserror", + "thiserror 2.0.18", + "tokio", + "urlencoding", + "wasmtime", + "wasmtime-wasi", + "wiremock", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b50b8869d9fc858ce7266cce0194bd74df58b9d0e3f6df3a9fc8eb470d95c09d" + +[[package]] +name = "rustc-hash" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" + +[[package]] +name = "rustix" +version = "0.38.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" +dependencies = [ + "bitflags", + "errno", + "libc", + "linux-raw-sys 0.4.15", + "windows-sys 0.59.0", ] [[package]] @@ -1076,8 +2332,51 @@ dependencies = [ "bitflags", "errno", "libc", - "linux-raw-sys", - "windows-sys", + "linux-raw-sys 0.12.1", + "windows-sys 0.61.2", +] + +[[package]] +name = "rustix-linux-procfs" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2fc84bf7e9aa16c4f2c758f27412dc9841341e16aa682d9c7ac308fe3ee12056" +dependencies = [ + "once_cell", + "rustix 1.1.4", +] + +[[package]] +name = "rustls" +version = "0.23.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "758025cb5fccfd3bc2fd74708fd4682be41d99e5dff73c377c0646c6012c73a4" +dependencies = [ + "once_cell", + "rustls-pki-types", + "rustls-webpki", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-pki-types" +version = "1.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be040f8b0a225e40375822a563fa9524378b9d63112f53e19ffff34df5d33fdd" +dependencies = [ + "zeroize", +] + +[[package]] +name = "rustls-webpki" +version = "0.103.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7df23109aa6c1567d1c575b9952556388da57401e4ace1d15f79eedad0d8f53" +dependencies = [ + "ring", + "rustls-pki-types", + "untrusted", ] [[package]] @@ -1113,17 +2412,53 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "schannel" +version = "0.1.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1" +dependencies = [ + "windows-sys 0.61.2", +] + [[package]] name = "scopeguard" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" +[[package]] +name = "security-framework" +version = "3.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7f4bc775c73d9a02cde8bf7b2ec4c9d12743edf609006c7facc23998404cd1d" +dependencies = [ + "bitflags", + "core-foundation 0.10.1", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce2691df843ecc5d231c0b14ece2acc3efb62c0a398c7e1d875f3983ce020e3" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "semver" version = "1.0.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" +dependencies = [ + "serde", + "serde_core", +] [[package]] name = "serde" @@ -1179,6 +2514,15 @@ dependencies = [ "serde_core", ] +[[package]] +name = "serde_spanned" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8bbf91e5a4d6315eee45e704372590b30e260ee83af6639d64557f51b067776" +dependencies = [ + "serde_core", +] + [[package]] name = "serde_urlencoded" version = "0.7.1" @@ -1204,6 +2548,23 @@ dependencies = [ "unsafe-libyaml", ] +[[package]] +name = "sha2" +version = "0.10.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + [[package]] name = "signal-hook-registry" version = "1.4.8" @@ -1214,6 +2575,16 @@ dependencies = [ "libc", ] +[[package]] +name = "sized-chunks" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16d69225bde7a69b235da73377861095455d298f2b970996eec25ddbb42b3d1e" +dependencies = [ + "bitmaps", + "typenum", +] + [[package]] name = "slab" version = "0.4.12" @@ -1225,6 +2596,9 @@ name = "smallvec" version = "1.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" +dependencies = [ + "serde", +] [[package]] name = "socket2" @@ -1233,15 +2607,27 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3a766e1110788c36f4fa1c2b71b387a7815aa65f88ce0229841826633d93723e" dependencies = [ "libc", - "windows-sys", + "windows-sys 0.61.2", ] +[[package]] +name = "stable_deref_trait" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" + [[package]] name = "strsim" version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + [[package]] name = "syn" version = "2.0.117" @@ -1258,6 +2644,63 @@ name = "sync_wrapper" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" +dependencies = [ + "futures-core", +] + +[[package]] +name = "synstructure" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "system-configuration" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a13f3d0daba03132c0aa9767f98351b3488edc2c100cda2d2ec2b04f3d8d3c8b" +dependencies = [ + "bitflags", + "core-foundation 0.9.4", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration-sys" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "system-interface" +version = "0.27.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc4592f674ce18521c2a81483873a49596655b179f71c5e05d10c1fe66c78745" +dependencies = [ + "bitflags", + "cap-fs-ext", + "cap-std", + "fd-lock", + "io-lifetimes", + "rustix 0.38.44", + "windows-sys 0.59.0", + "winx", +] + +[[package]] +name = "target-lexicon" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adb6935a6f5c20170eeceb1a3835a49e12e19d792f6dd344ccc76a985ca5a6ca" [[package]] name = "tempfile" @@ -1268,8 +2711,26 @@ dependencies = [ "fastrand", "getrandom 0.4.2", "once_cell", - "rustix", - "windows-sys", + "rustix 1.1.4", + "windows-sys 0.61.2", +] + +[[package]] +name = "termcolor" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "thiserror" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl 1.0.69", ] [[package]] @@ -1278,7 +2739,18 @@ version = "2.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4288b5bcbc7920c07a1149a35cf9590a2aa808e0bc1eafaade0b80947865fbc4" dependencies = [ - "thiserror-impl", + "thiserror-impl 2.0.18", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" +dependencies = [ + "proc-macro2", + "quote", + "syn", ] [[package]] @@ -1292,6 +2764,16 @@ dependencies = [ "syn", ] +[[package]] +name = "tinystr" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" +dependencies = [ + "displaydoc", + "zerovec", +] + [[package]] name = "tinytemplate" version = "1.2.1" @@ -1316,7 +2798,7 @@ dependencies = [ "signal-hook-registry", "socket2", "tokio-macros", - "windows-sys", + "windows-sys 0.61.2", ] [[package]] @@ -1330,6 +2812,26 @@ dependencies = [ "syn", ] +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61" +dependencies = [ + "rustls", + "tokio", +] + [[package]] name = "tokio-util" version = "0.7.18" @@ -1343,6 +2845,45 @@ dependencies = [ "tokio", ] +[[package]] +name = "toml" +version = "0.9.12+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf92845e79fc2e2def6a5d828f0801e29a2f8acc037becc5ab08595c7d5e9863" +dependencies = [ + "indexmap", + "serde_core", + "serde_spanned", + "toml_datetime", + "toml_parser", + "toml_writer", + "winnow", +] + +[[package]] +name = "toml_datetime" +version = "0.7.5+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92e1cfed4a3038bc5a127e35a2d360f145e1f4b971b551a2ba5fd7aedf7e1347" +dependencies = [ + "serde_core", +] + +[[package]] +name = "toml_parser" +version = "1.0.9+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "702d4415e08923e7e1ef96cd5727c0dfed80b4d2fa25db9647fe5eb6f7c5a4c4" +dependencies = [ + "winnow", +] + +[[package]] +name = "toml_writer" +version = "1.0.6+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab16f14aed21ee8bfd8ec22513f7287cd4a91aa92e44edfe2c17ddd004e92607" + [[package]] name = "tower" version = "0.5.3" @@ -1374,12 +2915,14 @@ dependencies = [ "http-body-util", "http-range-header", "httpdate", + "iri-string", "mime", "mime_guess", "percent-encoding", "pin-project-lite", "tokio", "tokio-util", + "tower", "tower-layer", "tower-service", "tracing", @@ -1405,18 +2948,42 @@ checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100" dependencies = [ "log", "pin-project-lite", + "tracing-attributes", "tracing-core", ] [[package]] -name = "tracing-core" -version = "0.1.36" +name = "tracing-attributes" +version = "0.1.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tracing-core" +version = "0.1.36" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a" dependencies = [ "once_cell", ] +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "typenum" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" + [[package]] name = "unarray" version = "0.1.4" @@ -1435,6 +3002,12 @@ version = "1.0.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75" +[[package]] +name = "unicode-width" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4ac048d71ede7ee76d585517add45da530660ef4390e49b098733c6e897f254" + [[package]] name = "unicode-xid" version = "0.2.6" @@ -1447,12 +3020,64 @@ version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "url" +version = "2.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + +[[package]] +name = "urlencoding" +version = "2.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + [[package]] name = "utf8parse" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" +[[package]] +name = "uuid" +version = "1.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a68d3c8f01c0cfa54a75291d83601161799e4a89a39e0929f4b0354d88757a37" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" + [[package]] name = "wait-timeout" version = "0.2.1" @@ -1472,6 +3097,15 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + [[package]] name = "wasi" version = "0.11.1+wasi-snapshot-preview1" @@ -1509,6 +3143,20 @@ dependencies = [ "wasm-bindgen-shared", ] +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.64" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9c5522b3a28661442748e09d40924dfb9ca614b21c00d3fd135720e48b67db8" +dependencies = [ + "cfg-if", + "futures-util", + "js-sys", + "once_cell", + "wasm-bindgen", + "web-sys", +] + [[package]] name = "wasm-bindgen-macro" version = "0.2.114" @@ -1541,6 +3189,27 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "wasm-compose" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92cda9c76ca8dcac01a8b497860c2cb15cd6f216dc07060517df5abbe82512ac" +dependencies = [ + "anyhow", + "heck", + "im-rc", + "indexmap", + "log", + "petgraph", + "serde", + "serde_derive", + "serde_yaml", + "smallvec", + "wasm-encoder 0.244.0", + "wasmparser 0.244.0", + "wat", +] + [[package]] name = "wasm-encoder" version = "0.244.0" @@ -1548,7 +3217,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "990065f2fe63003fe337b932cfb5e3b80e0b4d0f5ff650e6985b1048f62c8319" dependencies = [ "leb128fmt", - "wasmparser", + "wasmparser 0.244.0", +] + +[[package]] +name = "wasm-encoder" +version = "0.245.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9dca005e69bf015e45577e415b9af8c67e8ee3c0e38b5b0add5aa92581ed5c" +dependencies = [ + "leb128fmt", + "wasmparser 0.245.1", ] [[package]] @@ -1559,8 +3238,8 @@ checksum = "bb0e353e6a2fbdc176932bbaab493762eb1255a7900fe0fea1a2f96c296cc909" dependencies = [ "anyhow", "indexmap", - "wasm-encoder", - "wasmparser", + "wasm-encoder 0.244.0", + "wasmparser 0.244.0", ] [[package]] @@ -1573,6 +3252,356 @@ dependencies = [ "hashbrown 0.15.5", "indexmap", "semver", + "serde", +] + +[[package]] +name = "wasmparser" +version = "0.245.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f08c9adee0428b7bddf3890fc27e015ac4b761cc608c822667102b8bfd6995e" +dependencies = [ + "bitflags", + "indexmap", + "semver", +] + +[[package]] +name = "wasmprinter" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09390d7b2bd7b938e563e4bff10aa345ef2e27a3bc99135697514ef54495e68f" +dependencies = [ + "anyhow", + "termcolor", + "wasmparser 0.244.0", +] + +[[package]] +name = "wasmtime" +version = "42.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39bef52be4fb4c5b47d36f847172e896bc94b35c9c6a6f07117686bd16ed89a7" +dependencies = [ + "addr2line", + "async-trait", + "bitflags", + "bumpalo", + "cc", + "cfg-if", + "encoding_rs", + "futures", + "fxprof-processed-profile", + "gimli", + "ittapi", + "libc", + "log", + "mach2", + "memfd", + "object", + "once_cell", + "postcard", + "pulley-interpreter", + "rayon", + "rustix 1.1.4", + "semver", + "serde", + "serde_derive", + "serde_json", + "smallvec", + "target-lexicon", + "tempfile", + "wasm-compose", + "wasm-encoder 0.244.0", + "wasmparser 0.244.0", + "wasmtime-environ", + "wasmtime-internal-cache", + "wasmtime-internal-component-macro", + "wasmtime-internal-component-util", + "wasmtime-internal-core", + "wasmtime-internal-cranelift", + "wasmtime-internal-fiber", + "wasmtime-internal-jit-debug", + "wasmtime-internal-jit-icache-coherence", + "wasmtime-internal-unwinder", + "wasmtime-internal-versioned-export-macros", + "wasmtime-internal-winch", + "wat", + "windows-sys 0.61.2", +] + +[[package]] +name = "wasmtime-environ" +version = "42.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb637d5aa960ac391ca5a4cbf3e45807632e56beceeeb530e14dfa67fdfccc62" +dependencies = [ + "anyhow", + "cpp_demangle", + "cranelift-bitset", + "cranelift-entity", + "gimli", + "hashbrown 0.15.5", + "indexmap", + "log", + "object", + "postcard", + "rustc-demangle", + "semver", + "serde", + "serde_derive", + "smallvec", + "target-lexicon", + "wasm-encoder 0.244.0", + "wasmparser 0.244.0", + "wasmprinter", + "wasmtime-internal-component-util", + "wasmtime-internal-core", +] + +[[package]] +name = "wasmtime-internal-cache" +version = "42.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ab6c428c610ae3e7acd25ca2681b4d23672c50d8769240d9dda99b751d4deec" +dependencies = [ + "base64", + "directories-next", + "log", + "postcard", + "rustix 1.1.4", + "serde", + "serde_derive", + "sha2", + "toml", + "wasmtime-environ", + "windows-sys 0.61.2", + "zstd", +] + +[[package]] +name = "wasmtime-internal-component-macro" +version = "42.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca768b11d5e7de017e8c3d4d444da6b4ce3906f565bcbc253d76b4ecbb5d2869" +dependencies = [ + "anyhow", + "proc-macro2", + "quote", + "syn", + "wasmtime-internal-component-util", + "wasmtime-internal-wit-bindgen", + "wit-parser", +] + +[[package]] +name = "wasmtime-internal-component-util" +version = "42.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "763f504faf96c9b409051e96a1434655eea7f56a90bed9cb1e22e22c941253fd" + +[[package]] +name = "wasmtime-internal-core" +version = "42.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03a4a3f055a804a2f3d86e816a9df78a8fa57762212a8506164959224a40cd48" +dependencies = [ + "anyhow", + "libm", +] + +[[package]] +name = "wasmtime-internal-cranelift" +version = "42.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55154a91d22ad51f9551124ce7fb49ddddc6a82c4910813db4c790c97c9ccf32" +dependencies = [ + "cfg-if", + "cranelift-codegen", + "cranelift-control", + "cranelift-entity", + "cranelift-frontend", + "cranelift-native", + "gimli", + "itertools 0.14.0", + "log", + "object", + "pulley-interpreter", + "smallvec", + "target-lexicon", + "thiserror 2.0.18", + "wasmparser 0.244.0", + "wasmtime-environ", + "wasmtime-internal-core", + "wasmtime-internal-unwinder", + "wasmtime-internal-versioned-export-macros", +] + +[[package]] +name = "wasmtime-internal-fiber" +version = "42.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05decfad1021ad2efcca5c1be9855acb54b6ee7158ac4467119b30b7481508e3" +dependencies = [ + "cc", + "cfg-if", + "libc", + "rustix 1.1.4", + "wasmtime-environ", + "wasmtime-internal-versioned-export-macros", + "windows-sys 0.61.2", +] + +[[package]] +name = "wasmtime-internal-jit-debug" +version = "42.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "924980c50427885fd4feed2049b88380178e567768aaabf29045b02eb262eaa7" +dependencies = [ + "cc", + "object", + "rustix 1.1.4", + "wasmtime-internal-versioned-export-macros", +] + +[[package]] +name = "wasmtime-internal-jit-icache-coherence" +version = "42.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c57d24e8d1334a0e5a8b600286ffefa1fc4c3e8176b110dff6fbc1f43c4a599b" +dependencies = [ + "cfg-if", + "libc", + "wasmtime-internal-core", + "windows-sys 0.61.2", +] + +[[package]] +name = "wasmtime-internal-unwinder" +version = "42.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a1a144bd4393593a868ba9df09f34a6a360cb5db6e71815f20d3f649c6e6735" +dependencies = [ + "cfg-if", + "cranelift-codegen", + "log", + "object", + "wasmtime-environ", +] + +[[package]] +name = "wasmtime-internal-versioned-export-macros" +version = "42.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a6948b56bb00c62dbd205ea18a4f1ceccbe1e4b8479651fdb0bab2553790f20" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "wasmtime-internal-winch" +version = "42.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9130b3ab6fb01be80b27b9a2c84817af29ae8224094f2503d2afa9fea5bf9d00" +dependencies = [ + "cranelift-codegen", + "gimli", + "log", + "object", + "target-lexicon", + "wasmparser 0.244.0", + "wasmtime-environ", + "wasmtime-internal-cranelift", + "winch-codegen", +] + +[[package]] +name = "wasmtime-internal-wit-bindgen" +version = "42.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "102d0d70dbfede00e4cc9c24e86df6d32c03bf6f5ad06b5d6c76b0a4a5004c4a" +dependencies = [ + "anyhow", + "bitflags", + "heck", + "indexmap", + "wit-parser", +] + +[[package]] +name = "wasmtime-wasi" +version = "42.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea938f6f4f11e5ffe6d8b6f34c9a994821db9511c3e9c98e535896f27d06bb92" +dependencies = [ + "async-trait", + "bitflags", + "bytes", + "cap-fs-ext", + "cap-net-ext", + "cap-rand", + "cap-std", + "cap-time-ext", + "fs-set-times", + "futures", + "io-extras", + "io-lifetimes", + "rustix 1.1.4", + "system-interface", + "thiserror 2.0.18", + "tokio", + "tracing", + "url", + "wasmtime", + "wasmtime-wasi-io", + "wiggle", + "windows-sys 0.61.2", +] + +[[package]] +name = "wasmtime-wasi-io" +version = "42.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71cb16a88d0443b509d6eca4298617233265179090abf03e0a8042b9b251e9da" +dependencies = [ + "async-trait", + "bytes", + "futures", + "tracing", + "wasmtime", +] + +[[package]] +name = "wast" +version = "35.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ef140f1b49946586078353a453a1d28ba90adfc54dde75710bc1931de204d68" +dependencies = [ + "leb128", +] + +[[package]] +name = "wast" +version = "245.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28cf1149285569120b8ce39db8b465e8a2b55c34cbb586bd977e43e2bc7300bf" +dependencies = [ + "bumpalo", + "leb128fmt", + "memchr", + "unicode-width", + "wasm-encoder 0.245.1", +] + +[[package]] +name = "wat" +version = "1.245.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd48d1679b6858988cb96b154dda0ec5bbb09275b71db46057be37332d5477be" +dependencies = [ + "wast 245.0.1", ] [[package]] @@ -1585,13 +3614,129 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "wiggle" +version = "42.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dca2bf96d20f0c70e6741cc6c8c1a9ee4c3c0310c7ad1971242628c083cc9a5" +dependencies = [ + "bitflags", + "thiserror 2.0.18", + "tracing", + "wasmtime", + "wasmtime-environ", + "wiggle-macro", +] + +[[package]] +name = "wiggle-generate" +version = "42.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0d8c016d6d3ec6dc6b8c80c23cede4ee2386ccf347d01984f7991d7659f73ef" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn", + "wasmtime-environ", + "witx", +] + +[[package]] +name = "wiggle-macro" +version = "42.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91a267096e48857096f035fffca29e22f0bbe840af4d74a6725eb695e1782110" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "wiggle-generate", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + [[package]] name = "winapi-util" version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" dependencies = [ - "windows-sys", + "windows-sys 0.61.2", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "winch-codegen" +version = "42.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1977857998e4dd70d26e2bfc0618a9684a2fb65b1eca174dc13f3b3e9c2159ca" +dependencies = [ + "cranelift-assembler-x64", + "cranelift-codegen", + "gimli", + "regalloc2", + "smallvec", + "target-lexicon", + "thiserror 2.0.18", + "wasmparser 0.244.0", + "wasmtime-environ", + "wasmtime-internal-core", + "wasmtime-internal-cranelift", +] + +[[package]] +name = "windows-core" +version = "0.62.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb" +dependencies = [ + "windows-implement", + "windows-interface", + "windows-link", + "windows-result", + "windows-strings", +] + +[[package]] +name = "windows-implement" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "windows-interface" +version = "0.59.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358" +dependencies = [ + "proc-macro2", + "quote", + "syn", ] [[package]] @@ -1600,6 +3745,53 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" +[[package]] +name = "windows-registry" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02752bf7fbdcce7f2a27a742f798510f3e5ad88dbe84871e5168e2120c3d5720" +dependencies = [ + "windows-link", + "windows-result", + "windows-strings", +] + +[[package]] +name = "windows-result" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-strings" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets", +] + [[package]] name = "windows-sys" version = "0.61.2" @@ -1609,6 +3801,109 @@ dependencies = [ "windows-link", ] +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_gnullvm", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "winnow" +version = "0.7.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df79d97927682d2fd8adb29682d1140b343be4ac0f08fd68b7765d9c059d3945" + +[[package]] +name = "winx" +version = "0.36.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f3fd376f71958b862e7afb20cfe5a22830e1963462f3a17f49d82a6c1d1f42d" +dependencies = [ + "bitflags", + "windows-sys 0.59.0", +] + +[[package]] +name = "wiremock" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08db1edfb05d9b3c1542e521aea074442088292f00b5f28e435c714a98f85031" +dependencies = [ + "assert-json-diff", + "base64", + "deadpool", + "futures", + "http", + "http-body-util", + "hyper", + "hyper-util", + "log", + "once_cell", + "regex", + "serde", + "serde_json", + "tokio", + "url", +] + [[package]] name = "wit-bindgen" version = "0.51.0" @@ -1673,9 +3968,9 @@ dependencies = [ "serde", "serde_derive", "serde_json", - "wasm-encoder", + "wasm-encoder 0.244.0", "wasm-metadata", - "wasmparser", + "wasmparser 0.244.0", "wit-parser", ] @@ -1694,7 +3989,48 @@ dependencies = [ "serde_derive", "serde_json", "unicode-xid", - "wasmparser", + "wasmparser 0.244.0", +] + +[[package]] +name = "witx" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e366f27a5cabcddb2706a78296a40b8fcc451e1a6aba2fc1d94b4a01bdaaef4b" +dependencies = [ + "anyhow", + "log", + "thiserror 1.0.69", + "wast 35.0.2", +] + +[[package]] +name = "writeable" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" + +[[package]] +name = "yoke" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" +dependencies = [ + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", ] [[package]] @@ -1717,8 +4053,96 @@ dependencies = [ "syn", ] +[[package]] +name = "zerofrom" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zeroize" +version = "1.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" + +[[package]] +name = "zerotrie" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", +] + +[[package]] +name = "zerovec" +version = "0.11.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "zmij" version = "1.0.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa" + +[[package]] +name = "zstd" +version = "0.13.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a" +dependencies = [ + "zstd-safe", +] + +[[package]] +name = "zstd-safe" +version = "7.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d" +dependencies = [ + "zstd-sys", +] + +[[package]] +name = "zstd-sys" +version = "2.0.16+zstd.1.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e19ebc2adc8f83e43039e79776e3fda8ca919132d68a1fed6a5faca2683748" +dependencies = [ + "cc", + "pkg-config", +] diff --git a/Cargo.toml b/Cargo.toml index 677d622..95cc9cb 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,6 +3,7 @@ resolver = "2" members = [ "rivet-core", "rivet-cli", + "etch", ] [workspace.package] @@ -34,11 +35,17 @@ env_logger = "0.11" # HTTP / serve axum = "0.8" +reqwest = { version = "0.12", features = ["json"] } tokio = { version = "1", features = ["full"] } tower-http = { version = "0.6", features = ["cors", "fs"] } +urlencoding = "2" # XML (ReqIF) quick-xml = { version = "0.37", features = ["serialize"] } +# WASM component model +wasmtime = { version = "42", features = ["component-model"] } +wasmtime-wasi = "42" + # Benchmarking criterion = { version = "0.5", features = ["html_reports"] } diff --git a/README.md b/README.md index cd5d81e..d7121a1 100644 --- a/README.md +++ b/README.md @@ -7,6 +7,7 @@   [![CI](https://github.com/pulseengine/rivet/actions/workflows/ci.yml/badge.svg)](https://github.com/pulseengine/rivet/actions/workflows/ci.yml) +[![codecov](https://codecov.io/gh/pulseengine/rivet/branch/main/graph/badge.svg)](https://codecov.io/gh/pulseengine/rivet) ![Rust](https://img.shields.io/badge/Rust-CE422B?style=flat-square&logo=rust&logoColor=white&labelColor=1a1b27) ![YAML](https://img.shields.io/badge/YAML-654FF0?style=flat-square&logoColor=white&labelColor=1a1b27) ![STPA](https://img.shields.io/badge/STPA-654FF0?style=flat-square&logoColor=white&labelColor=1a1b27) diff --git a/artifacts/decisions.yaml b/artifacts/decisions.yaml index c40b206..229c078 100644 --- a/artifacts/decisions.yaml +++ b/artifacts/decisions.yaml @@ -189,3 +189,30 @@ artifacts: alternatives: > Manual timing with std::time::Instant. Rejected because it lacks statistical rigor and regression detection. + + - id: DD-010 + type: design-decision + title: ASPICE 4.0 verification terminology and composable cybersecurity schema + status: approved + description: > + Update aspice.yaml to v4.0 terminology: rename artifact types from + *-test to *-verification, expand method values, add verification-criteria. + Cybersecurity (SEC.1-4, ISO 21434) is a separate composable schema + file rather than embedded in aspice.yaml. + tags: [schema, aspice, cybersecurity] + links: + - type: satisfies + target: REQ-015 + - type: satisfies + target: REQ-016 + fields: + rationale: > + ASPICE 4.0 broadened "testing" to "verification measures" — the + schema should reflect this. Cybersecurity as a separate schema + follows the composable merge pattern (common + aspice + cybersecurity) + so projects can opt in without forcing cybersecurity types on + safety-only projects. + alternatives: > + Keep old test terminology for backward compatibility. Rejected + because the schema is pre-1.0 and alignment with the standard + is more valuable than backward compatibility at this stage. diff --git a/artifacts/features.yaml b/artifacts/features.yaml index 0978943..4d1866e 100644 --- a/artifacts/features.yaml +++ b/artifacts/features.yaml @@ -121,7 +121,7 @@ artifacts: - id: FEAT-009 type: feature title: HTTP serve with HTMX dashboard - status: draft + status: approved description: > axum HTTP server serving an HTMX-powered dashboard for browsing artifacts, viewing traceability matrices, and validation results. @@ -137,7 +137,7 @@ artifacts: - id: FEAT-010 type: feature title: ReqIF 1.2 adapter - status: draft + status: approved description: > Import/export adapter for OMG ReqIF 1.2 XML format. tags: [interchange, phase-2] @@ -224,3 +224,41 @@ artifacts: target: DD-009 fields: phase: phase-1 + + - id: FEAT-016 + type: feature + title: ASPICE 4.0 schema alignment + status: approved + description: > + Updated aspice.yaml schema to v4.0: renamed types (unit-verification, + sw-integration-verification, sw-verification, sys-integration-verification, + sys-verification, verification-execution, verification-verdict), + expanded method values (8 verification methods), added + verification-criteria to requirement types. + tags: [schema, aspice, phase-1] + links: + - type: satisfies + target: REQ-015 + - type: implements + target: DD-010 + fields: + phase: phase-1 + + - id: FEAT-017 + type: feature + title: Cybersecurity schema (SEC.1-4) + status: approved + description: > + New cybersecurity.yaml schema with 10 artifact types covering + TARA (asset, threat-scenario, risk-assessment), SEC.1 (cybersecurity-goal, + cybersecurity-req), SEC.2 (cybersecurity-design), SEC.3 + (cybersecurity-implementation), SEC.4 (cybersecurity-verification). + Includes 2 link types and 10 traceability rules. + tags: [schema, cybersecurity, phase-1] + links: + - type: satisfies + target: REQ-016 + - type: implements + target: DD-010 + fields: + phase: phase-1 diff --git a/artifacts/requirements.yaml b/artifacts/requirements.yaml index 19f1d0b..b3e2932 100644 --- a/artifacts/requirements.yaml +++ b/artifacts/requirements.yaml @@ -177,3 +177,34 @@ artifacts: fields: priority: must category: non-functional + + - id: REQ-015 + type: requirement + title: ASPICE 4.0 aligned schemas + status: approved + description: > + Schema definitions must align with Automotive SPICE v4.0 process + reference model. SWE.5 is "Software Component Verification and + Integration Verification", SWE.6 is "Software Verification". + Use "verification measure" terminology instead of "test". + Expanded method values (static-analysis, formal-verification, + simulation, inspection, walkthrough). Verification-criteria field + on requirement types. + tags: [aspice, schema] + fields: + priority: must + category: functional + + - id: REQ-016 + type: requirement + title: Cybersecurity schema (ISO 21434 / ASPICE SEC.1-4) + status: approved + description: > + Support automotive cybersecurity engineering artifacts aligned with + ISO/SAE 21434 and ASPICE v4.0 SEC.1-4 processes. Cover TARA + (assets, threats, risk assessment), cybersecurity goals/requirements, + cybersecurity design, implementation, and verification. + tags: [cybersecurity, aspice, schema] + fields: + priority: should + category: functional diff --git a/codecov.yml b/codecov.yml new file mode 100644 index 0000000..800da15 --- /dev/null +++ b/codecov.yml @@ -0,0 +1,15 @@ +--- +coverage: + status: + project: + default: + target: 60% + threshold: auto + patch: + default: + target: 70% + +comment: + layout: "reach,diff,flags,files" + behavior: default + require_changes: false diff --git a/deny.toml b/deny.toml index 82ffa03..c3c8c4e 100644 --- a/deny.toml +++ b/deny.toml @@ -2,15 +2,13 @@ # https://embarkstudios.github.io/cargo-deny/ [advisories] -# Lint level for crates with known security vulnerabilities -vulnerability = "deny" -# Lint level for crates that have been yanked -yanked = "warn" +# Vulnerabilities are denied by default in cargo-deny >=0.16. +# Yanked crates produce warnings by default. [licenses] # Confidence threshold for license detection confidence-threshold = 0.8 -# List of allowed licenses +# List of allowed licenses (anything not listed is denied) allow = [ "MIT", "Apache-2.0", @@ -18,6 +16,7 @@ allow = [ "BSD-2-Clause", "BSD-3-Clause", "ISC", + "MPL-2.0", "Unicode-3.0", "Unicode-DFS-2016", "Zlib", @@ -25,13 +24,6 @@ allow = [ "BSL-1.0", "CC0-1.0", ] -# Deny copyleft licenses -deny = [ - "GPL-2.0", - "GPL-3.0", - "AGPL-1.0", - "AGPL-3.0", -] [bans] # Lint level for multiple versions of the same crate diff --git a/docs/getting-started.md b/docs/getting-started.md new file mode 100644 index 0000000..bc53708 --- /dev/null +++ b/docs/getting-started.md @@ -0,0 +1,588 @@ +# Getting Started + +Rivet is a schema-driven SDLC artifact manager for safety-critical systems. It keeps +requirements, architecture, design, verification, and safety analysis artifacts as YAML +files in git, then validates link integrity, traceability coverage, and completeness +rules against mergeable schemas. + +No database. No external service. Text files and a fast CLI. + +--- + +## Installation + +### From source + +```bash +git clone https://github.com/pulseengine/rivet.git +cd rivet +cargo install --path rivet-cli +``` + +### Build only + +```bash +cargo build --release +# Binary at target/release/rivet +``` + +Requires Rust edition 2024 (MSRV 1.85). + +--- + +## Quick Start + +### 1. Create a project + +Create `rivet.yaml` in your project root: + +```yaml +project: + name: my-project + version: "0.1.0" + schemas: + - common + - dev + +sources: + - path: artifacts + format: generic-yaml +``` + +### 2. Add a schema directory + +Copy or symlink the built-in schemas into your project: + +```bash +cp -r path/to/rivet/schemas ./schemas +``` + +Or point Rivet at an external schemas directory with `--schemas`: + +```bash +rivet --schemas /path/to/rivet/schemas validate +``` + +Rivet resolves schemas relative to the project directory by default (looks for `schemas/`). + +### 3. Write artifacts + +Create `artifacts/requirements.yaml`: + +```yaml +artifacts: + - id: REQ-001 + type: requirement + title: Text-file-first artifact management + status: approved + description: > + All lifecycle artifacts are stored as YAML files in git repositories. + No database or external service required for core operation. + tags: [core] + fields: + priority: must + category: functional + + - id: REQ-002 + type: requirement + title: Secure boot support + status: draft + description: > + The system shall verify firmware image authenticity before installation. + tags: [security] + fields: + priority: must + category: functional +``` + +### 4. Validate + +```bash +rivet validate +``` + +Output: + +``` +Diagnostics: + WARN: [REQ-001] Every requirement should be satisfied by at least one design decision or feature + WARN: [REQ-002] Every requirement should be satisfied by at least one design decision or feature + +Result: PASS (2 warnings) +``` + +Warnings indicate incomplete traceability coverage. Errors indicate broken rules +(missing required links, unknown types, invalid field values). A project with errors +returns a non-zero exit code. + +--- + +## Project Configuration + +The `rivet.yaml` file defines what schemas to load and where to find artifacts. + +```yaml +project: + name: my-project # Project name + version: "0.1.0" # Project version (informational) + schemas: # Schema names to load and merge + - common # Always include common for base link types + - dev # Domain schema (dev, stpa, aspice, cybersecurity) + +sources: # Artifact sources + - path: artifacts # Directory or file path (relative to rivet.yaml) + format: generic-yaml # Adapter format: generic-yaml or stpa-yaml +``` + +Multiple sources can be listed. Each source specifies a path and a format adapter: + +```yaml +sources: + - path: artifacts/requirements.yaml + format: generic-yaml + - path: artifacts/features.yaml + format: generic-yaml + - path: safety/stpa + format: stpa-yaml +``` + +--- + +## Artifact YAML Format + +Artifacts use the generic YAML format. Each file contains an `artifacts` list: + +```yaml +artifacts: + - id: DD-001 + type: design-decision + title: OSLC over per-tool REST adapters + status: approved + description: > + Use OSLC as the integration protocol instead of writing + per-tool REST adapters. + tags: [architecture, oslc] + links: + - type: satisfies + target: REQ-006 + fields: + rationale: > + OSLC is an OASIS standard that Polarion, DOORS, and codebeamer + already support natively. + alternatives: > + Per-tool REST adapters. Rejected due to maintenance burden. +``` + +### Base fields (all artifact types) + +| Field | Type | Required | Description | +|---------------|---------------|----------|------------------------------------------| +| `id` | string | yes | Unique identifier (e.g. `REQ-001`) | +| `type` | string | yes | Artifact type defined in a loaded schema | +| `title` | string | yes | Human-readable title | +| `description` | text | no | Detailed description (supports markdown) | +| `status` | enum | no | Lifecycle status | +| `tags` | list\ | no | Arbitrary tags for categorization | + +### Links + +The `links` list declares typed, directional relationships to other artifacts: + +```yaml +links: + - type: satisfies # Link type name (defined in schema) + target: REQ-001 # Target artifact ID + - type: derives-from + target: SYSREQ-003 +``` + +Link types have automatic inverse computation. If artifact A `satisfies` artifact B, +then B is automatically `satisfied-by` A. Inverses do not need to be declared manually. + +### Custom fields + +The `fields` map holds type-specific data. Schemas define which fields are allowed, +required, and what values are valid: + +```yaml +fields: + priority: must # enum field + category: functional # enum field + rationale: "Performance was the driver." # text field + preconditions: # list field + - HSM provisioned + - Test firmware available +``` + +--- + +## Schema System + +Rivet schemas are YAML files that define artifact types, link types, field constraints, +and traceability rules. Schemas are **mergeable** -- a project loads multiple schemas +and Rivet combines them into a single rule set. + +Every project should include `common` as the base schema. Domain schemas extend it: + +```yaml +schemas: + - common # Base link types (satisfies, verifies, derives-from, ...) + - aspice # ASPICE V-model types and traceability rules +``` + +Or combine multiple domains: + +```yaml +schemas: + - common + - aspice + - cybersecurity +``` + +See [schemas.md](schemas.md) for the full schema reference. + +### Schema structure + +A schema file declares: + +```yaml +schema: + name: my-schema + version: "0.1.0" + extends: [common] + description: My domain schema. + +artifact-types: + - name: my-artifact + description: A custom artifact type + fields: + - name: priority + type: string + required: false + allowed-values: [high, medium, low] + link-fields: + - name: satisfies + link-type: satisfies + target-types: [requirement] + required: true + cardinality: one-or-many + +link-types: + - name: my-link + inverse: my-link-inverse + description: A custom link type + +traceability-rules: + - name: my-rule + description: Every my-artifact must satisfy a requirement + source-type: my-artifact + required-link: satisfies + target-types: [requirement] + severity: error +``` + +### Field types + +| Type | Description | +|------------------|---------------------------------------| +| `string` | Single-line text, optionally with `allowed-values` | +| `text` | Multi-line text (markdown supported) | +| `number` | Numeric value | +| `list` | List of strings | +| `structured` | Arbitrary nested YAML structure | +| `enum` | String with `allowed-values` list | + +### Link field cardinality + +| Cardinality | Meaning | +|------------------|--------------------------------------| +| `exactly-one` | Must link to exactly one target | +| `one-or-many` | Must link to one or more targets | +| `zero-or-one` | Optional, at most one target | +| `zero-or-many` | Optional, any number of targets | + +### Traceability rules + +Rules define coverage and completeness checks. Two directions: + +**Forward** (`required-link`): The source artifact must have an outgoing link of +the specified type to one of the target types. + +```yaml +- name: decision-justification + description: Every design decision must link to at least one requirement + source-type: design-decision + required-link: satisfies + target-types: [requirement] + severity: error +``` + +**Backward** (`required-backlink`): The source artifact must be the target of an +incoming link of the specified type from one of the listed `from-types`. + +```yaml +- name: requirement-coverage + description: Every requirement should be satisfied by a design decision or feature + source-type: requirement + required-backlink: satisfies + from-types: [design-decision, feature] + severity: warning +``` + +Severity levels: `error` (validation fails), `warning` (reported but passes), +`info` (informational). + +--- + +## Link Types + +### Common link types (always available) + +| Link | Inverse | Description | +|-----------------|-----------------|--------------------------------------------| +| `traces-to` | `traced-from` | General traceability between any artifacts | +| `satisfies` | `satisfied-by` | Source fulfils the target | +| `refines` | `refined-by` | Source is a refinement of the target | +| `verifies` | `verified-by` | Source verifies or validates the target | +| `implements` | `implemented-by`| Source implements the target | +| `derives-from` | `derived-into` | Source is derived from the target | +| `mitigates` | `mitigated-by` | Source mitigates or prevents the target | +| `allocated-to` | `allocated-from`| Source is allocated to the target | +| `constrained-by`| `constrains` | Source is constrained by the target | + +Domain schemas add additional link types. See [schemas.md](schemas.md) for the full list. + +--- + +## CLI Commands + +### Global options + +``` +rivet [OPTIONS] + +Options: + -p, --project Path to project directory (default: .) + --schemas Path to schemas directory + -v, --verbose Increase verbosity (-v info, -vv debug) +``` + +### `rivet validate` + +Validate all artifacts against loaded schemas. Checks field constraints, link integrity, +target type restrictions, cardinality rules, and traceability rules. + +```bash +rivet validate +``` + +Returns exit code 0 on pass (warnings allowed), non-zero on errors. + +### `rivet list` + +List artifacts with optional filters. + +```bash +rivet list # All artifacts +rivet list -t requirement # Filter by type +rivet list --status approved # Filter by status +rivet list -t feature --status draft +``` + +### `rivet stats` + +Print artifact summary statistics, orphan artifacts (no links), and broken link counts. + +```bash +rivet stats +``` + +Output: + +``` +Artifact summary: + design-decision 6 + feature 12 + requirement 12 + TOTAL 30 + +Orphan artifacts (no links): 3 + REQ-009 + REQ-011 + REQ-012 +``` + +### `rivet matrix` + +Generate a traceability matrix showing coverage between two artifact types. + +```bash +rivet matrix --from requirement --to feature --link satisfies --direction backward +``` + +Options: + +| Flag | Description | +|---------------|----------------------------------------------| +| `--from` | Source artifact type | +| `--to` | Target artifact type | +| `--link` | Link type to trace (auto-detected if omitted)| +| `--direction` | `forward` or `backward` (default: backward) | + +Output: + +``` +Traceability: requirement -> feature (via 'satisfies') + + REQ-001 -> FEAT-001, FEAT-002 + REQ-002 -> FEAT-001 + REQ-009 -> (none) + +Coverage: 8/12 (66.7%) +``` + +### `rivet stpa` + +Load and validate STPA files directly, without a `rivet.yaml` configuration. Useful for +standalone STPA analysis directories. + +```bash +rivet stpa path/to/stpa/ +rivet stpa path/to/stpa/ --schema custom-stpa.yaml +``` + +Automatically loads the `common` and `stpa` schemas from the schemas directory. + +### `rivet export` + +Export all project artifacts to a specified format. + +```bash +rivet export --format reqif --output artifacts.reqif +rivet export --format generic-yaml # stdout +``` + +Supported formats: `reqif` (ReqIF 1.2 XML), `generic-yaml`. + +### `rivet serve` + +Start the HTMX-powered dashboard server. + +```bash +rivet serve # Default port 3000 +rivet serve --port 8080 +``` + +Opens a web dashboard at `http://localhost:3000` with: + +- Artifact listing and detail views +- Validation diagnostics +- Traceability matrix +- Statistics summary +- Schema browser + +### `rivet import` (requires `wasm` feature) + +Import artifacts using a custom WASM adapter component. + +```bash +rivet import --adapter my-adapter.wasm --source data/ --config key=value +``` + +--- + +## Dashboard + +`rivet serve` starts an axum HTTP server with an HTMX-driven dashboard. The dashboard +provides a read-only view of the project state -- it loads all artifacts and schemas at +startup. + +### Routes + +| Path | View | +|--------------------|-----------------------------| +| `/` | Dashboard index | +| `/artifacts` | Artifact list (filterable) | +| `/artifacts/{id}` | Artifact detail with links | +| `/validate` | Validation diagnostics | +| `/matrix` | Traceability matrix | +| `/stats` | Statistics summary | +| `/schemas` | Schema browser | + +Start the dashboard and open `http://localhost:3000` in a browser: + +```bash +rivet serve +# rivet dashboard listening on http://localhost:3000 +``` + +--- + +## Examples + +### Dev dogfooding + +Rivet tracks its own development. The repository root contains: + +``` +rivet.yaml # Loads common + dev schemas +schemas/ + common.yaml # Base link types + dev.yaml # requirement, design-decision, feature types +artifacts/ + requirements.yaml # 12 requirements + decisions.yaml # 6 design decisions + features.yaml # 12 features +``` + +Run `rivet validate` in the repo root to validate 30+ artifacts with traceability +coverage checks. + +### STPA analysis + +For standalone STPA analysis (e.g. from [Meld](https://github.com/pulseengine/meld)): + +```bash +rivet stpa /path/to/meld/safety/stpa/ +``` + +This loads STPA YAML files (losses, hazards, control structure, UCAs, controller +constraints, loss scenarios) and validates them against the STPA schema's 7 completeness +rules. + +### Cybersecurity (ASPICE SEC.1-4) + +The `examples/cybersecurity/` directory demonstrates a full cybersecurity traceability +chain aligned with ISO/SAE 21434 and ASPICE v4.0 SEC processes. + +``` +examples/cybersecurity/ + rivet.yaml # Loads common + cybersecurity schemas + cybersecurity.yaml # Assets, threats, risk assessments, goals, requirements, + # designs, implementations, verifications +``` + +The traceability chain flows: + +``` +Asset -> Threat Scenario -> Risk Assessment + -> Cybersecurity Goal -> Cybersecurity Req + -> Cybersecurity Design + -> Cybersecurity Implementation + -> Cybersecurity Verification +``` + +Run from the example directory: + +```bash +cd examples/cybersecurity +rivet --schemas ../../schemas validate +``` + +--- + +## Next Steps + +- Read the [schema reference](schemas.md) for full details on all built-in schemas +- Browse the `artifacts/` directory in the repo for real-world examples +- Run `rivet validate` on your own project to see it in action diff --git a/docs/schemas.md b/docs/schemas.md new file mode 100644 index 0000000..e718506 --- /dev/null +++ b/docs/schemas.md @@ -0,0 +1,626 @@ +# Schema Reference + +Rivet schemas are YAML files that define artifact types, link types, field constraints, +and traceability rules. Multiple schemas are merged at load time -- a project typically +loads `common` plus one or more domain schemas. + +--- + +## Available Schemas + +| Schema | Version | Types | Rules | Domain | +|-----------------|---------|-------|-------|---------------------------------------| +| `common` | 0.1.0 | -- | -- | Base fields and link types | +| `dev` | 0.1.0 | 3 | 2 | Software development tracking | +| `stpa` | 0.1.0 | 10 | 7 | STPA safety analysis | +| `aspice` | 0.2.0 | 14 | 10 | Automotive SPICE V-model | +| `cybersecurity` | 0.1.0 | 10 | 10 | Cybersecurity (SEC.1-4 / ISO 21434) | + +Schemas are located in `schemas/` relative to the project directory. + +--- + +## Common Schema + +**File:** `schemas/common.yaml` + +The common schema defines base fields present on every artifact and the link types shared +across all domains. Every domain schema implicitly extends `common`. + +### Base fields + +| Field | Type | Required | Description | +|---------------|---------------|----------|------------------------------------------| +| `id` | string | yes | Unique identifier | +| `title` | string | yes | Human-readable title | +| `description` | text | no | Detailed description (supports markdown) | +| `status` | enum | no | Lifecycle status | +| `tags` | list\ | no | Arbitrary tags for categorization | + +### Link types + +| Link | Inverse | Description | +|------------------|------------------|------------------------------------------------| +| `traces-to` | `traced-from` | General traceability between any artifacts | +| `satisfies` | `satisfied-by` | Source fulfils the target | +| `refines` | `refined-by` | Source is a refinement of the target | +| `verifies` | `verified-by` | Source verifies or validates the target | +| `implements` | `implemented-by` | Source implements the target | +| `derives-from` | `derived-into` | Source is derived from the target | +| `mitigates` | `mitigated-by` | Source mitigates or prevents the target | +| `allocated-to` | `allocated-from` | Source is allocated to the target | +| `constrained-by` | `constrains` | Source is constrained by the target | + +When `source-types` and `target-types` are omitted on a link type, any artifact type may +use it. Domain schemas may add restrictions. + +--- + +## Dev Schema + +**File:** `schemas/dev.yaml` | **Extends:** common + +Lightweight artifact types for tracking requirements, design decisions, and features +within a software project. Used by Rivet to track its own development (dogfooding). + +### Artifact types + +#### `requirement` + +A functional or non-functional requirement. + +| Field | Type | Required | Allowed values | +|------------|--------|----------|-------------------------------------------------| +| `priority` | string | no | `must`, `should`, `could`, `wont` | +| `category` | string | no | `functional`, `non-functional`, `constraint`, `interface` | + +Link fields: +- `satisfies` (zero-or-many) -- link to any artifact this requirement satisfies +- `derives-from` (zero-or-many) -- link to parent requirements + +#### `design-decision` + +An architectural or design decision with rationale. + +| Field | Type | Required | Description | +|----------------|------|----------|----------------------------| +| `rationale` | text | yes | Why this decision was made | +| `alternatives` | text | no | Rejected alternatives | + +Link fields: +- `satisfies` (one-or-many, required) -- must link to at least one `requirement` + +#### `feature` + +A user-visible capability or feature. + +| Field | Type | Required | Allowed values | +|---------|--------|----------|-------------------------------------------| +| `phase` | string | no | `phase-1`, `phase-2`, `phase-3`, `future` | + +Link fields: +- `satisfies` (one-or-many) -- link to requirements this feature satisfies +- `implements` (zero-or-many) -- link to artifacts this feature implements + +### Additional link types + +| Link | Inverse | Description | +|---------------|-------------------|------------------------------------------| +| `depends-on` | `depended-on-by` | Source depends on target being completed | + +### Traceability rules + +| Rule | Severity | Description | +|--------------------------|----------|----------------------------------------------------| +| `requirement-coverage` | warning | Every requirement should be satisfied by a decision or feature | +| `decision-justification` | error | Every design decision must link to a requirement | + +### Example artifact + +```yaml +artifacts: + - id: DD-001 + type: design-decision + title: OSLC over per-tool REST adapters + status: approved + tags: [architecture, oslc] + links: + - type: satisfies + target: REQ-006 + fields: + rationale: > + OSLC is an OASIS standard that Polarion, DOORS, and codebeamer + already support natively. One adapter handles all tools. + alternatives: > + Per-tool REST adapters. Rejected due to maintenance burden. +``` + +--- + +## STPA Schema + +**File:** `schemas/stpa.yaml` | **Extends:** common | **Version:** 0.1.0 + +Artifact types for a complete STPA (Systems-Theoretic Process Analysis) following the +STPA Handbook (Leveson & Thomas, 2018). Covers all four STPA steps. + +### Artifact types + +#### Step 1a -- `loss` + +An undesired event involving something of value to stakeholders. Losses define what the +analysis aims to prevent. + +| Field | Type | Required | +|----------------|---------------|----------| +| `stakeholders` | list\ | no | + +No required links. Losses are the root of the STPA hierarchy. + +#### Step 1b -- `hazard` + +A system state that, together with worst-case environmental conditions, leads to a loss. + +| Field | Type | Required | Allowed values | +|------------|--------|----------|---------------------------------------------------| +| `severity` | string | no | `catastrophic`, `critical`, `marginal`, `negligible` | + +Link fields: +- `losses` via `leads-to-loss` (one-or-many, required) -- must link to at least one `loss` + +#### Step 1b -- `sub-hazard` + +A refinement of a hazard into a more specific unsafe condition. + +Link fields: +- `parent` via `refines` (exactly-one, required) -- must refine exactly one `hazard` + +#### Step 1c -- `system-constraint` + +A condition that must be satisfied to prevent a hazard. Each constraint is the inversion +of a hazard. + +| Field | Type | Required | +|-----------------|--------|----------| +| `spec-baseline` | string | no | + +Link fields: +- `hazards` via `prevents` (one-or-many, required) -- must prevent at least one `hazard` or `sub-hazard` + +#### Step 2 -- `controller` + +A system component (human or automated) responsible for issuing control actions. + +| Field | Type | Required | Allowed values | +|-------------------|---------------|----------|---------------------------------------------| +| `controller-type` | string | no | `human`, `automated`, `human-and-automated` | +| `source-file` | string | no | Source file implementing this controller | +| `process-model` | list\ | no | Controller's beliefs about process state | + +No required links. + +#### Step 2 -- `controlled-process` + +A process being controlled -- the physical or data transformation acted upon by controllers. +No required fields or links. + +#### Step 2 -- `control-action` + +An action issued by a controller to a controlled process or another controller. + +| Field | Type | Required | +|----------|--------|----------| +| `action` | string | yes | + +Link fields: +- `source` via `issued-by` (exactly-one, required) -- the issuing `controller` +- `target` via `acts-on` (exactly-one, required) -- the target `controlled-process` or `controller` + +#### Step 3 -- `uca` (Unsafe Control Action) + +A control action that, in a particular context, leads to a hazard. Four UCA types +(provably complete): not-providing, providing, too-early-too-late, stopped-too-soon. + +| Field | Type | Required | Allowed values | +|-------------|--------|----------|---------------------------------------------------------| +| `uca-type` | string | yes | `not-providing`, `providing`, `too-early-too-late`, `stopped-too-soon` | +| `context` | text | no | The context in which the control action is unsafe | +| `rationale` | text | no | Why this UCA leads to the linked hazards | + +Link fields: +- `controller` via `issued-by` (exactly-one, required) -- the responsible `controller` +- `hazards` via `leads-to-hazard` (one-or-many, required) -- hazards this UCA leads to + +#### Step 3b -- `controller-constraint` + +A constraint on a controller's behavior derived by inverting a UCA. + +| Field | Type | Required | +|--------------|------|----------| +| `constraint` | text | yes | + +Link fields: +- `controller` via `constrains-controller` (exactly-one, required) +- `ucas` via `inverts-uca` (one-or-many, required) +- `hazards` via `prevents` (one-or-many, required) + +#### Step 4 -- `loss-scenario` + +A causal pathway describing how a UCA could occur or how the control action could be +improperly executed. + +| Field | Type | Required | Allowed values | +|------------------|---------------|----------|------------------------------------------------------------------| +| `scenario-type` | string | no | `controller-failure`, `inadequate-control-algorithm`, `inadequate-process-model`, `inadequate-feedback`, `process-model-flaw`, `coordination-failure`, `actuator-failure`, `sensor-failure`, `control-path` | +| `causal-factors` | list\ | no | Contributing causes | + +Link fields: +- `uca` via `caused-by-uca` (zero-or-many) -- the UCA(s) this scenario explains +- `hazards` via `leads-to-hazard` (one-or-many, required) -- hazards this scenario leads to + +### STPA link types + +| Link | Inverse | Source types | Target types | +|-------------------------|---------------------------|---------------------------------|----------------------------| +| `leads-to-loss` | `loss-caused-by` | hazard, sub-hazard | loss | +| `prevents` | `prevented-by` | system-constraint, controller-constraint | hazard, sub-hazard | +| `leads-to-hazard` | `hazard-caused-by` | uca, loss-scenario | hazard, sub-hazard | +| `inverts-uca` | `inverted-by` | controller-constraint | uca | +| `issued-by` | `issues` | uca, control-action | controller | +| `constrains-controller` | `controller-constrained-by` | controller-constraint | controller | +| `acts-on` | `acted-on-by` | control-action | controlled-process, controller | +| `caused-by-uca` | `causes-scenario` | loss-scenario | uca | + +### Traceability rules + +| Rule | Severity | Description | +|----------------------------------|----------|----------------------------------------------------------| +| `hazard-has-loss` | error | Every hazard must link to at least one loss | +| `constraint-has-hazard` | error | Every system constraint must link to a hazard | +| `uca-has-hazard` | error | Every UCA must link to at least one hazard | +| `uca-has-controller` | error | Every UCA must link to a controller | +| `controller-constraint-has-uca` | error | Every controller constraint must link to a UCA | +| `hazard-has-constraint` | warning | Every hazard should be addressed by a constraint | +| `uca-has-controller-constraint` | warning | Every UCA should be addressed by a controller constraint | + +--- + +## ASPICE Schema + +**File:** `schemas/aspice.yaml` | **Extends:** common | **Version:** 0.2.0 + +Artifact types and traceability rules for the full Automotive SPICE V-model, aligned +with ASPICE PAM v4.0. + +### Terminology (ASPICE 4.0) + +ASPICE 4.0 introduced key terminology changes from v3.x: + +- "Test case" is now **"verification measure"** -- broader scope including review, + static analysis, formal verification, simulation, and inspection (not just testing) +- SWE.5 expanded to include component verification +- SWE.6 and SYS.5 renamed from "qualification test" to "verification" + +All verification method fields accept: `automated-test`, `manual-test`, `review`, +`static-analysis`, `formal-verification`, `simulation`, `inspection`, `walkthrough`. + +### Artifact types -- Left side of V (specification) + +| Type | ASPICE Process | Description | +|------------------------|----------------|---------------------------------------| +| `stakeholder-req` | SYS.1 | Stakeholder requirement | +| `system-req` | SYS.2 | System requirement | +| `system-arch-component`| SYS.3 | System architectural element | +| `sw-req` | SWE.1 | Software requirement | +| `sw-arch-component` | SWE.2 | Software architectural element | +| `sw-detail-design` | SWE.3 | Detailed design / unit specification | + +### Artifact types -- Right side of V (verification) + +| Type | ASPICE Process | Description | +|-------------------------------|----------------|--------------------------------------------| +| `unit-verification` | SWE.4 | Unit verification measure | +| `sw-integration-verification` | SWE.5 | SW component + integration verification | +| `sw-verification` | SWE.6 | SW verification against SW requirements | +| `sys-integration-verification`| SYS.4 | System integration verification | +| `sys-verification` | SYS.5 | System verification against system reqs | + +### Artifact types -- Execution results + +| Type | Description | +|---------------------------|----------------------------------------------------------| +| `verification-execution` | A verification run against a specific version | +| `verification-verdict` | Pass/fail verdict for a single measure in an execution | + +Verdict values: `pass`, `fail`, `blocked`, `skipped`, `error`. + +### Key fields + +**`verification-criteria`** (on `system-req`, `sw-req`): ASPICE 4.0 requires requirements +to specify how they will be verified. This field captures those criteria. + +**`method`** (on all verification types): The verification method used. + +### Required link chains + +The ASPICE schema enforces the V-model traceability: + +``` +stakeholder-req sys-verification + | | + v derives-from verifies v + system-req system-req + | | + v allocated-from verifies v + system-arch-component sys-integration-verification + | | + v derives-from verifies v + sw-req system-arch-component + | | + v allocated-from verifies v + sw-arch-component sw-integration-verification + | | + v refines verifies v + sw-detail-design sw-arch-component + | + verifies v + unit-verification + | + verifies v + sw-detail-design +``` + +### ASPICE-specific link types + +| Link | Inverse | Description | +|-----------------------|---------------------|-------------------------------------------| +| `result-of` | `has-result` | Verdict is result of a verification measure | +| `part-of-execution` | `contains-verdict` | Verdict belongs to an execution run | + +### Traceability rules + +| Rule | Severity | Description | +|----------------------------|----------|--------------------------------------------------------| +| `sys2-derives-from-sys1` | error | System req must derive from stakeholder req | +| `swe1-derives-from-sys` | error | SW req must derive from system req or arch component | +| `swe2-allocated-from-swe1` | error | SW arch must be allocated from SW req | +| `swe3-refines-swe2` | error | Detailed design must refine an arch component | +| `swe4-verifies-swe3` | error | Unit verification must verify a detailed design | +| `swe6-verifies-swe1` | error | SW verification must verify a SW requirement | +| `sys5-verifies-sys2` | error | System verification must verify a system requirement | +| `swe1-has-verification` | warning | Every SW req should be verified | +| `sys2-has-verification` | warning | Every system req should be verified | +| `swe3-has-verification` | warning | Every detailed design should be verified | + +--- + +## Cybersecurity Schema + +**File:** `schemas/cybersecurity.yaml` | **Extends:** common | **Version:** 0.1.0 + +Artifact types for automotive cybersecurity engineering aligned with Automotive SPICE +v4.0 cybersecurity plug-in (SEC.1-4) and ISO/SAE 21434. + +### Artifact types -- TARA (MAN.7) + +| Type | Description | +|--------------------|--------------------------------------------------------| +| `asset` | Item of value requiring protection (data, function, component) | +| `threat-scenario` | Potential attack scenario against an asset | +| `risk-assessment` | Combined risk level from threat feasibility and impact | + +#### `asset` fields + +| Field | Type | Required | Allowed values | +|-----------------------------|---------------|----------|----------------------------------------------------| +| `asset-type` | string | no | `data`, `function`, `component`, `interface`, `key-material` | +| `cybersecurity-properties` | list\ | no | CIA properties (confidentiality, integrity, etc.) | + +#### `threat-scenario` fields + +| Field | Type | Required | Allowed values | +|----------------------|--------|----------|---------------------------------------| +| `attack-vector` | string | no | `network`, `physical`, `local`, `adjacent` | +| `attack-feasibility` | string | no | `high`, `medium`, `low`, `very-low` | +| `impact` | string | no | `severe`, `major`, `moderate`, `negligible` | + +Link fields: `targets` via `threatens` (one-or-many, required) -- must target at least one `asset` + +#### `risk-assessment` fields + +| Field | Type | Required | Allowed values | +|------------------|--------|----------|----------------------------------------------| +| `risk-level` | string | yes | `unacceptable`, `conditional`, `acceptable` | +| `risk-treatment` | string | no | `mitigate`, `avoid`, `transfer`, `accept` | + +Link fields: `threat` via `assesses` (exactly-one, required) -- must assess one `threat-scenario` + +### Artifact types -- SEC processes + +| Type | ASPICE Process | Description | +|--------------------------------|----------------|------------------------------------------| +| `cybersecurity-goal` | SEC.1 | Top-level cybersecurity requirement | +| `cybersecurity-req` | SEC.1 | Detailed cybersecurity requirement | +| `cybersecurity-design` | SEC.2 | Security mechanism or architecture | +| `cybersecurity-implementation` | SEC.3 | Code, configuration, key provisioning | +| `cybersecurity-verification` | SEC.4 | Verification measure (pentest, fuzz, review) | + +#### `cybersecurity-goal` fields + +| Field | Type | Required | Allowed values | +|-------------------------|--------|----------|-----------------| +| `cal` | string | no | `1`, `2`, `3`, `4` (Cybersecurity Assurance Level) | +| `verification-criteria` | text | no | | + +Link fields: `mitigates` (one-or-many, required) -- must mitigate at least one `threat-scenario` + +#### `cybersecurity-verification` methods + +The cybersecurity verification type includes security-specific methods: +`penetration-test`, `fuzz-test`, `code-review`, `static-analysis`, `vulnerability-scan`, +`automated-test`, `manual-test`, `formal-verification`. + +### Required link chain + +``` +asset <- threatens <- threat-scenario <- assesses <- risk-assessment + ^ + | mitigates + cybersecurity-goal + | derives-from + cybersecurity-req + | satisfies + cybersecurity-design + | implements + cybersecurity-implementation + | verifies + cybersecurity-verification +``` + +### Cybersecurity-specific link types + +| Link | Inverse | Source types | Target types | +|--------------|-----------------|--------------------|--------------------| +| `threatens` | `threatened-by` | threat-scenario | asset | +| `assesses` | `assessed-by` | risk-assessment | threat-scenario | + +### Traceability rules + +| Rule | Severity | Description | +|-----------------------------|----------|-------------------------------------------------------| +| `threat-has-asset` | error | Every threat must target an asset | +| `risk-has-threat` | error | Every risk assessment must assess a threat | +| `goal-mitigates-threat` | error | Every goal must mitigate a threat | +| `req-derives-from-goal` | error | Every cybersecurity req must derive from a goal | +| `design-satisfies-req` | error | Every design must satisfy a cybersecurity req | +| `impl-implements-design` | error | Every implementation must implement a design | +| `verification-verifies-sec` | error | Every verification must verify a req, design, or impl | +| `sec-req-has-verification` | warning | Every cybersecurity req should be verified | +| `sec-req-has-design` | warning | Every cybersecurity req should have a design | +| `threat-has-goal` | warning | Every unacceptable threat should be mitigated | + +### Example artifact + +```yaml +artifacts: + - id: CSV-001 + type: cybersecurity-verification + title: Secure boot signature rejection test + status: approved + tags: [secure-boot, test] + fields: + method: automated-test + preconditions: + - HSM provisioned with OEM test root-of-trust key + - Test firmware images with valid, corrupted, and absent signatures + steps: + - step: 1 + action: Flash a validly signed firmware image + expected: Image accepted and written to application partition + - step: 2 + action: Flash an image with a corrupted signature + expected: Bootloader rejects the image and retains previous firmware + links: + - type: verifies + target: CSREQ-001 +``` + +--- + +## Creating Custom Schemas + +To create a custom schema for your domain: + +### 1. Create the schema file + +```yaml +schema: + name: my-domain + version: "0.1.0" + extends: [common] + description: Schema for my domain. +``` + +### 2. Define artifact types + +```yaml +artifact-types: + - name: safety-goal + description: A top-level safety goal derived from HARA + fields: + - name: asil + type: string + required: true + allowed-values: [QM, A, B, C, D] + - name: safe-state + type: text + required: false + link-fields: + - name: mitigates + link-type: mitigates + target-types: [hazardous-event] + required: true + cardinality: one-or-many +``` + +### 3. Add domain-specific link types (if needed) + +```yaml +link-types: + - name: decomposes + inverse: decomposed-into + description: Source is a decomposition of the target + source-types: [safety-req] + target-types: [safety-goal] +``` + +Common link types (`satisfies`, `verifies`, `derives-from`, etc.) are inherited from the +`common` schema and do not need to be redeclared. + +### 4. Add traceability rules + +```yaml +traceability-rules: + - name: goal-has-mitigation + description: Every safety goal must mitigate a hazardous event + source-type: safety-goal + required-link: mitigates + target-types: [hazardous-event] + severity: error + + - name: goal-has-requirement + description: Every safety goal should be decomposed into requirements + source-type: safety-goal + required-backlink: decomposes + from-types: [safety-req] + severity: warning +``` + +### 5. Register in rivet.yaml + +```yaml +project: + name: my-project + schemas: + - common + - my-domain + +sources: + - path: artifacts + format: generic-yaml +``` + +Place `my-domain.yaml` in the `schemas/` directory alongside `common.yaml`. + +### Schema merging behavior + +When multiple schemas are loaded, Rivet merges them: + +- **Artifact types** are unioned. If two schemas define the same type name, the later + schema's definition takes precedence. +- **Link types** are unioned by name. Duplicates are deduplicated. +- **Traceability rules** are concatenated. All rules from all schemas apply. +- **Base fields** are defined by `common` and always present. + +This allows composition: load `common` + `aspice` + `cybersecurity` to get V-model +traceability and SEC process coverage in a single project. diff --git a/etch/Cargo.toml b/etch/Cargo.toml new file mode 100644 index 0000000..fc78f50 --- /dev/null +++ b/etch/Cargo.toml @@ -0,0 +1,9 @@ +[package] +name = "etch" +description = "Hierarchical graph layout and SVG rendering — Etch renders" +version.workspace = true +edition.workspace = true +license.workspace = true + +[dependencies] +petgraph = { workspace = true } diff --git a/etch/src/filter.rs b/etch/src/filter.rs new file mode 100644 index 0000000..bef1c9e --- /dev/null +++ b/etch/src/filter.rs @@ -0,0 +1,244 @@ +//! Graph filtering utilities. +//! +//! These helpers produce new [`petgraph::Graph`] instances by subsetting an +//! existing graph. They are generic over node and edge weights and therefore +//! work with any domain model. + +use std::collections::{HashMap, HashSet, VecDeque}; + +use petgraph::Direction; +use petgraph::graph::{Graph, NodeIndex}; +use petgraph::visit::EdgeRef; + +/// Extract a subgraph centred on `focus` within `depth` hops. +/// +/// Both incoming and outgoing edges are followed. The returned graph +/// preserves all edges whose **both** endpoints are within reach. +/// +/// # Examples +/// +/// ``` +/// use petgraph::Graph; +/// use etch::filter::ego_subgraph; +/// +/// let mut g = Graph::new(); +/// let a = g.add_node("A"); +/// let b = g.add_node("B"); +/// let c = g.add_node("C"); +/// let d = g.add_node("D"); +/// g.add_edge(a, b, "ab"); +/// g.add_edge(b, c, "bc"); +/// g.add_edge(c, d, "cd"); +/// +/// let sub = ego_subgraph(&g, b, 1); +/// assert_eq!(sub.node_count(), 3); // A, B, C +/// assert_eq!(sub.edge_count(), 2); // ab, bc +/// ``` +pub fn ego_subgraph( + graph: &Graph, + focus: NodeIndex, + depth: usize, +) -> Graph { + // BFS in both directions from `focus`, up to `depth` hops. + let mut visited: HashSet = HashSet::new(); + let mut queue: VecDeque<(NodeIndex, usize)> = VecDeque::new(); + + visited.insert(focus); + queue.push_back((focus, 0)); + + while let Some((node, dist)) = queue.pop_front() { + if dist >= depth { + continue; + } + + for dir in &[Direction::Outgoing, Direction::Incoming] { + for neighbour in graph.neighbors_directed(node, *dir) { + if visited.insert(neighbour) { + queue.push_back((neighbour, dist + 1)); + } + } + } + } + + build_subgraph(graph, &visited) +} + +/// Filter graph to only include nodes matching `predicate`. +/// +/// Edges are retained when **both** endpoints satisfy the predicate. +/// +/// # Examples +/// +/// ``` +/// use petgraph::Graph; +/// use etch::filter::filter_nodes; +/// +/// let mut g = Graph::new(); +/// let a = g.add_node("A"); +/// let b = g.add_node("B"); +/// let c = g.add_node("C"); +/// g.add_edge(a, b, "ab"); +/// g.add_edge(b, c, "bc"); +/// +/// let sub = filter_nodes(&g, |_idx, label| *label != "C"); +/// assert_eq!(sub.node_count(), 2); +/// assert_eq!(sub.edge_count(), 1); +/// ``` +pub fn filter_nodes( + graph: &Graph, + predicate: impl Fn(NodeIndex, &N) -> bool, +) -> Graph { + let keep: HashSet = graph + .node_indices() + .filter(|&idx| predicate(idx, &graph[idx])) + .collect(); + + build_subgraph(graph, &keep) +} + +/// Internal helper: build a new graph containing only the nodes in `keep` +/// and edges whose both endpoints are in `keep`. +fn build_subgraph( + graph: &Graph, + keep: &HashSet, +) -> Graph { + let mut sub = Graph::new(); + let mut idx_map: HashMap = HashMap::new(); + + for &old_idx in keep { + let new_idx = sub.add_node(graph[old_idx].clone()); + idx_map.insert(old_idx, new_idx); + } + + for edge in graph.edge_references() { + if let (Some(&new_src), Some(&new_tgt)) = + (idx_map.get(&edge.source()), idx_map.get(&edge.target())) + { + sub.add_edge(new_src, new_tgt, edge.weight().clone()); + } + } + + sub +} + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +#[cfg(test)] +mod tests { + use super::*; + use petgraph::Graph; + + #[test] + fn ego_depth_zero() { + let mut g = Graph::new(); + let a = g.add_node("A"); + let b = g.add_node("B"); + g.add_edge(a, b, "ab"); + + let sub = ego_subgraph(&g, a, 0); + assert_eq!(sub.node_count(), 1); + assert_eq!(sub.edge_count(), 0); + } + + #[test] + fn ego_depth_one() { + let mut g = Graph::new(); + let a = g.add_node("A"); + let b = g.add_node("B"); + let c = g.add_node("C"); + let d = g.add_node("D"); + g.add_edge(a, b, "ab"); + g.add_edge(b, c, "bc"); + g.add_edge(c, d, "cd"); + + let sub = ego_subgraph(&g, b, 1); + assert_eq!(sub.node_count(), 3); // A, B, C + assert_eq!(sub.edge_count(), 2); // ab, bc + } + + #[test] + fn ego_depth_two() { + let mut g = Graph::new(); + let a = g.add_node("A"); + let b = g.add_node("B"); + let c = g.add_node("C"); + let d = g.add_node("D"); + g.add_edge(a, b, "ab"); + g.add_edge(b, c, "bc"); + g.add_edge(c, d, "cd"); + + let sub = ego_subgraph(&g, b, 2); + assert_eq!(sub.node_count(), 4); // all nodes + assert_eq!(sub.edge_count(), 3); // all edges + } + + #[test] + fn ego_follows_incoming_edges() { + let mut g = Graph::new(); + let a = g.add_node("A"); + let b = g.add_node("B"); + let c = g.add_node("C"); + g.add_edge(a, b, "ab"); + g.add_edge(c, b, "cb"); + + // B has incoming from A and C. + let sub = ego_subgraph(&g, b, 1); + assert_eq!(sub.node_count(), 3); + assert_eq!(sub.edge_count(), 2); + } + + #[test] + fn filter_nodes_excludes_matching() { + let mut g = Graph::new(); + let a = g.add_node("A"); + let b = g.add_node("B"); + let c = g.add_node("C"); + g.add_edge(a, b, "ab"); + g.add_edge(b, c, "bc"); + + let sub = filter_nodes(&g, |_idx, label| *label != "C"); + assert_eq!(sub.node_count(), 2); + assert_eq!(sub.edge_count(), 1); // only ab remains + } + + #[test] + fn filter_nodes_keep_all() { + let mut g = Graph::new(); + let a = g.add_node("A"); + let b = g.add_node("B"); + g.add_edge(a, b, "ab"); + + let sub = filter_nodes(&g, |_idx, _label| true); + assert_eq!(sub.node_count(), 2); + assert_eq!(sub.edge_count(), 1); + } + + #[test] + fn filter_nodes_keep_none() { + let mut g = Graph::new(); + let a = g.add_node("A"); + let b = g.add_node("B"); + g.add_edge(a, b, "ab"); + + let sub = filter_nodes(&g, |_idx, _label| false); + assert_eq!(sub.node_count(), 0); + assert_eq!(sub.edge_count(), 0); + } + + #[test] + fn filter_removes_cross_edges() { + // If we remove B from A->B->C, both edges should be removed. + let mut g = Graph::new(); + let a = g.add_node("A"); + let b = g.add_node("B"); + let c = g.add_node("C"); + g.add_edge(a, b, "ab"); + g.add_edge(b, c, "bc"); + + let sub = filter_nodes(&g, |_idx, label| *label != "B"); + assert_eq!(sub.node_count(), 2); // A, C + assert_eq!(sub.edge_count(), 0); // no edges survive + } +} diff --git a/etch/src/layout.rs b/etch/src/layout.rs new file mode 100644 index 0000000..4256b08 --- /dev/null +++ b/etch/src/layout.rs @@ -0,0 +1,746 @@ +//! Sugiyama-style hierarchical (layered) DAG layout. +//! +//! The algorithm has four phases: +//! +//! 1. **Rank assignment** — topological longest-path, with optional forced +//! ranks per node type. +//! 2. **Ordering within ranks** — barycenter heuristic (4 sweeps) to minimise +//! edge crossings. +//! 3. **Coordinate assignment** — simple placement on a grid with centering. +//! 4. **Edge routing** — polyline waypoints through intermediate ranks. + +use std::collections::HashMap; + +use petgraph::Direction; +use petgraph::graph::{EdgeIndex, Graph, NodeIndex}; +use petgraph::visit::EdgeRef; + +// --------------------------------------------------------------------------- +// Public types +// --------------------------------------------------------------------------- + +/// Direction of the rank axis. +#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)] +pub enum RankDirection { + /// Ranks grow downward (root at top). + #[default] + TopToBottom, + /// Ranks grow rightward (root at left). + LeftToRight, +} + +/// Options that control the layout algorithm. +#[derive(Debug, Clone)] +pub struct LayoutOptions { + /// Width of every node box (px). + pub node_width: f64, + /// Height of every node box (px). + pub node_height: f64, + /// Vertical distance between rank baselines. + pub rank_separation: f64, + /// Horizontal distance between adjacent nodes in the same rank. + pub node_separation: f64, + /// Overall flow direction. + pub rank_direction: RankDirection, + /// Force nodes whose `node_type` matches a key to a specific rank. + /// Ranks are 0-based; lower ranks are rendered closer to the root. + pub type_ranks: HashMap, +} + +impl Default for LayoutOptions { + fn default() -> Self { + Self { + node_width: 180.0, + node_height: 50.0, + rank_separation: 80.0, + node_separation: 40.0, + rank_direction: RankDirection::default(), + type_ranks: HashMap::new(), + } + } +} + +/// Display-level information about a node supplied by the caller. +#[derive(Debug, Clone)] +pub struct NodeInfo { + /// Unique identifier (used in SVG `data-id` and edge lookup). + pub id: String, + /// Primary label rendered inside the node. + pub label: String, + /// Logical type — used for coloring / grouping. + pub node_type: String, + /// Optional secondary text (e.g. a title below the ID). + pub sublabel: Option, +} + +/// Display-level information about an edge supplied by the caller. +#[derive(Debug, Clone)] +pub struct EdgeInfo { + /// Label rendered along the edge path. + pub label: String, +} + +/// A positioned node produced by the layout algorithm. +#[derive(Debug, Clone)] +pub struct LayoutNode { + /// Unique identifier (mirrors [`NodeInfo::id`]). + pub id: String, + /// X coordinate of the top-left corner. + pub x: f64, + /// Y coordinate of the top-left corner. + pub y: f64, + /// Width of the node box. + pub width: f64, + /// Height of the node box. + pub height: f64, + /// Assigned rank (layer index). + pub rank: usize, + /// Primary label. + pub label: String, + /// Node type (for theming). + pub node_type: String, + /// Optional secondary label. + pub sublabel: Option, +} + +/// A routed edge produced by the layout algorithm. +#[derive(Debug, Clone)] +pub struct LayoutEdge { + /// Source node ID. + pub source_id: String, + /// Target node ID. + pub target_id: String, + /// Edge label. + pub label: String, + /// Ordered polyline waypoints `(x, y)`. + pub points: Vec<(f64, f64)>, +} + +/// Complete layout result. +#[derive(Debug, Clone)] +pub struct GraphLayout { + /// Positioned nodes. + pub nodes: Vec, + /// Routed edges. + pub edges: Vec, + /// Total width of the bounding box. + pub width: f64, + /// Total height of the bounding box. + pub height: f64, +} + +// --------------------------------------------------------------------------- +// Public entry point +// --------------------------------------------------------------------------- + +/// Compute a hierarchical layout for the given directed graph. +/// +/// `node_info` and `edge_info` are closures that translate caller-owned +/// node/edge weights into the display-level [`NodeInfo`] / [`EdgeInfo`] +/// structs. This keeps the crate completely domain-agnostic. +/// +/// The function handles cycles gracefully — edges participating in cycles +/// are still routed, but their source nodes are placed according to the +/// topological order of the underlying DAG (after ignoring back-edges for +/// rank assignment). +pub fn layout( + graph: &Graph, + node_info: &impl Fn(NodeIndex, &N) -> NodeInfo, + edge_info: &impl Fn(EdgeIndex, &E) -> EdgeInfo, + options: &LayoutOptions, +) -> GraphLayout { + if graph.node_count() == 0 { + return GraphLayout { + nodes: Vec::new(), + edges: Vec::new(), + width: 0.0, + height: 0.0, + }; + } + + // Collect node info up-front so we can reference it throughout. + let infos: HashMap = graph + .node_indices() + .map(|idx| (idx, node_info(idx, &graph[idx]))) + .collect(); + + // Build NodeIndex → id map for edge routing. + let idx_to_id: HashMap = infos + .iter() + .map(|(&idx, info)| (idx, info.id.clone())) + .collect(); + + // Phase 1 — rank assignment. + let ranks = assign_ranks(graph, &infos, options); + + // Phase 2 — ordering within ranks (barycenter heuristic). + let mut rank_lists = build_rank_lists(graph, &ranks); + for _ in 0..4 { + sweep_down(graph, &mut rank_lists, &ranks); + sweep_up(graph, &mut rank_lists, &ranks); + } + + // Phase 3 — coordinate assignment. + let (layout_nodes, total_w, total_h) = assign_coordinates(&rank_lists, &infos, &ranks, options); + + // Phase 4 — edge routing. + let layout_edges = route_edges(graph, edge_info, &layout_nodes, &idx_to_id, options); + + GraphLayout { + nodes: layout_nodes, + edges: layout_edges, + width: total_w, + height: total_h, + } +} + +// --------------------------------------------------------------------------- +// Phase 1: Rank assignment (longest-path from sources) +// --------------------------------------------------------------------------- + +fn assign_ranks( + graph: &Graph, + infos: &HashMap, + options: &LayoutOptions, +) -> HashMap { + let mut ranks: HashMap = HashMap::new(); + + // Compute in-degrees for a Kahn-style topological traversal that + // tolerates cycles (cycle members are appended after the DAG portion). + let mut in_deg: HashMap = HashMap::new(); + for idx in graph.node_indices() { + in_deg.insert(idx, 0); + } + for edge in graph.edge_references() { + *in_deg.entry(edge.target()).or_insert(0) += 1; + } + + let mut queue: Vec = in_deg + .iter() + .filter(|&(_, deg)| *deg == 0) + .map(|(&idx, _)| idx) + .collect(); + + // Stable sort for deterministic output. + queue.sort_by(|a, b| infos[a].id.cmp(&infos[b].id)); + + let mut topo_order: Vec = Vec::with_capacity(graph.node_count()); + let mut head = 0usize; + + while head < queue.len() { + let node = queue[head]; + head += 1; + topo_order.push(node); + + let mut successors: Vec = graph + .neighbors_directed(node, Direction::Outgoing) + .collect(); + successors.sort_by(|a, b| infos[a].id.cmp(&infos[b].id)); + + for succ in successors { + let deg = in_deg.get_mut(&succ).unwrap(); + *deg -= 1; + if *deg == 0 { + queue.push(succ); + } + } + } + + // Any remaining nodes are in cycles — append them. + if topo_order.len() < graph.node_count() { + for idx in graph.node_indices() { + if !topo_order.contains(&idx) { + topo_order.push(idx); + } + } + } + + // Longest-path rank assignment (forward pass). + for &idx in &topo_order { + let parent_rank: Option = graph + .neighbors_directed(idx, Direction::Incoming) + .filter_map(|pred| ranks.get(&pred).copied()) + .max(); + let rank = match parent_rank { + Some(r) => r + 1, + None => 0, + }; + ranks.insert(idx, rank); + } + + // Apply forced type_ranks overrides. + if !options.type_ranks.is_empty() { + for (&idx, info) in infos { + if let Some(&forced) = options.type_ranks.get(&info.node_type) { + ranks.insert(idx, forced); + } + } + } + + ranks +} + +// --------------------------------------------------------------------------- +// Phase 2: Ordering within ranks (barycenter heuristic) +// --------------------------------------------------------------------------- + +/// Build a `Vec>`, one inner `Vec` per rank, ordered by rank. +fn build_rank_lists( + _graph: &Graph, + ranks: &HashMap, +) -> Vec> { + let max_rank = ranks.values().copied().max().unwrap_or(0); + let mut lists: Vec> = vec![Vec::new(); max_rank + 1]; + for (&idx, &rank) in ranks { + lists[rank].push(idx); + } + // Initial deterministic order by node index. + for list in &mut lists { + list.sort_by_key(|idx| idx.index()); + } + lists +} + +/// One downward sweep: for each rank (top to bottom), reorder nodes by the +/// average position of their predecessors in the rank above. +fn sweep_down( + graph: &Graph, + rank_lists: &mut [Vec], + ranks: &HashMap, +) { + let num_ranks = rank_lists.len(); + for r in 1..num_ranks { + let positions_above: HashMap = rank_lists[r - 1] + .iter() + .enumerate() + .map(|(pos, &idx)| (idx, pos)) + .collect(); + + let mut barycenters: Vec<(NodeIndex, f64)> = rank_lists[r] + .iter() + .map(|&idx| { + let preds: Vec = graph + .neighbors_directed(idx, Direction::Incoming) + .filter(|pred| ranks.get(pred).copied() == Some(r - 1)) + .filter_map(|pred| positions_above.get(&pred).copied()) + .collect(); + let bc = if preds.is_empty() { + f64::MAX + } else { + preds.iter().sum::() as f64 / preds.len() as f64 + }; + (idx, bc) + }) + .collect(); + + barycenters.sort_by(|a, b| a.1.partial_cmp(&b.1).unwrap_or(std::cmp::Ordering::Equal)); + rank_lists[r] = barycenters.into_iter().map(|(idx, _)| idx).collect(); + } +} + +/// One upward sweep: for each rank (bottom to top), reorder nodes by the +/// average position of their successors in the rank below. +fn sweep_up( + graph: &Graph, + rank_lists: &mut [Vec], + ranks: &HashMap, +) { + let num_ranks = rank_lists.len(); + if num_ranks < 2 { + return; + } + for r in (0..num_ranks - 1).rev() { + let positions_below: HashMap = rank_lists[r + 1] + .iter() + .enumerate() + .map(|(pos, &idx)| (idx, pos)) + .collect(); + + let mut barycenters: Vec<(NodeIndex, f64)> = rank_lists[r] + .iter() + .map(|&idx| { + let succs: Vec = graph + .neighbors_directed(idx, Direction::Outgoing) + .filter(|succ| ranks.get(succ).copied() == Some(r + 1)) + .filter_map(|succ| positions_below.get(&succ).copied()) + .collect(); + let bc = if succs.is_empty() { + f64::MAX + } else { + succs.iter().sum::() as f64 / succs.len() as f64 + }; + (idx, bc) + }) + .collect(); + + barycenters.sort_by(|a, b| a.1.partial_cmp(&b.1).unwrap_or(std::cmp::Ordering::Equal)); + rank_lists[r] = barycenters.into_iter().map(|(idx, _)| idx).collect(); + } +} + +// --------------------------------------------------------------------------- +// Phase 3: Coordinate assignment +// --------------------------------------------------------------------------- + +fn assign_coordinates( + rank_lists: &[Vec], + infos: &HashMap, + ranks: &HashMap, + options: &LayoutOptions, +) -> (Vec, f64, f64) { + let mut nodes: Vec = Vec::new(); + let mut max_x: f64 = 0.0; + let mut max_y: f64 = 0.0; + + // Compute the maximum rank width so we can center narrower ranks. + let rank_widths: Vec = rank_lists + .iter() + .map(|list| { + if list.is_empty() { + 0.0 + } else { + list.len() as f64 * options.node_width + + (list.len() as f64 - 1.0) * options.node_separation + } + }) + .collect(); + + let global_max_width = rank_widths.iter().cloned().fold(0.0f64, f64::max); + + for (rank, list) in rank_lists.iter().enumerate() { + let rank_width = rank_widths[rank]; + let x_offset = (global_max_width - rank_width) / 2.0; + + for (pos, &idx) in list.iter().enumerate() { + let info = &infos[&idx]; + let (x, y) = match options.rank_direction { + RankDirection::TopToBottom => { + let x = x_offset + pos as f64 * (options.node_width + options.node_separation); + let y = rank as f64 * (options.node_height + options.rank_separation); + (x, y) + } + RankDirection::LeftToRight => { + let x = rank as f64 * (options.node_width + options.rank_separation); + let y = x_offset + pos as f64 * (options.node_height + options.node_separation); + (x, y) + } + }; + + if x + options.node_width > max_x { + max_x = x + options.node_width; + } + if y + options.node_height > max_y { + max_y = y + options.node_height; + } + + nodes.push(LayoutNode { + id: info.id.clone(), + x, + y, + width: options.node_width, + height: options.node_height, + rank: *ranks.get(&idx).unwrap_or(&rank), + label: info.label.clone(), + node_type: info.node_type.clone(), + sublabel: info.sublabel.clone(), + }); + } + } + + (nodes, max_x, max_y) +} + +// --------------------------------------------------------------------------- +// Phase 4: Edge routing +// --------------------------------------------------------------------------- + +fn route_edges( + graph: &Graph, + edge_info: &impl Fn(EdgeIndex, &E) -> EdgeInfo, + layout_nodes: &[LayoutNode], + idx_to_id: &HashMap, + options: &LayoutOptions, +) -> Vec { + let node_pos: HashMap<&str, &LayoutNode> = + layout_nodes.iter().map(|n| (n.id.as_str(), n)).collect(); + + let mut edges: Vec = Vec::new(); + + for edge_ref in graph.edge_references() { + let src_idx = edge_ref.source(); + let tgt_idx = edge_ref.target(); + let eidx = edge_ref.id(); + let info = edge_info(eidx, edge_ref.weight()); + + let src_id = match idx_to_id.get(&src_idx) { + Some(id) => id, + None => continue, + }; + let tgt_id = match idx_to_id.get(&tgt_idx) { + Some(id) => id, + None => continue, + }; + + let src_node = match node_pos.get(src_id.as_str()) { + Some(n) => n, + None => continue, + }; + let tgt_node = match node_pos.get(tgt_id.as_str()) { + Some(n) => n, + None => continue, + }; + + let points = compute_waypoints(src_node, tgt_node, options); + + edges.push(LayoutEdge { + source_id: src_id.clone(), + target_id: tgt_id.clone(), + label: info.label, + points, + }); + } + + edges +} + +/// Compute polyline waypoints between two positioned nodes. +/// +/// For adjacent ranks the path is source-bottom-center to target-top-center. +/// For edges spanning multiple ranks, intermediate waypoints are inserted at +/// each intervening rank boundary. +fn compute_waypoints( + src: &LayoutNode, + tgt: &LayoutNode, + _options: &LayoutOptions, +) -> Vec<(f64, f64)> { + let src_cx = src.x + src.width / 2.0; + let tgt_cx = tgt.x + tgt.width / 2.0; + + let (src_attach_y, tgt_attach_y) = if src.rank <= tgt.rank { + (src.y + src.height, tgt.y) // normal: bottom of source to top of target + } else { + (src.y, tgt.y + tgt.height) // back-edge: top of source to bottom of target + }; + + let mut points = vec![(src_cx, src_attach_y)]; + + // Insert intermediate waypoints for long edges (spanning > 1 rank). + let rank_diff = tgt.rank.abs_diff(src.rank); + + if rank_diff > 1 { + for i in 1..rank_diff { + let frac = i as f64 / rank_diff as f64; + let mid_x = src_cx + (tgt_cx - src_cx) * frac; + let mid_y = src_attach_y + (tgt_attach_y - src_attach_y) * frac; + points.push((mid_x, mid_y)); + } + } + + points.push((tgt_cx, tgt_attach_y)); + points +} + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +#[cfg(test)] +mod tests { + use super::*; + use petgraph::Graph; + + fn simple_node_info(_idx: NodeIndex, label: &&str) -> NodeInfo { + NodeInfo { + id: label.to_string(), + label: label.to_string(), + node_type: "default".into(), + sublabel: None, + } + } + + fn simple_edge_info(_idx: EdgeIndex, label: &&str) -> EdgeInfo { + EdgeInfo { + label: label.to_string(), + } + } + + #[test] + fn empty_graph() { + let g: Graph<&str, &str> = Graph::new(); + let result = layout( + &g, + &simple_node_info, + &simple_edge_info, + &LayoutOptions::default(), + ); + assert!(result.nodes.is_empty()); + assert!(result.edges.is_empty()); + assert_eq!(result.width, 0.0); + assert_eq!(result.height, 0.0); + } + + #[test] + fn single_node() { + let mut g = Graph::new(); + g.add_node("A"); + let result = layout( + &g, + &simple_node_info, + &simple_edge_info, + &LayoutOptions::default(), + ); + assert_eq!(result.nodes.len(), 1); + assert_eq!(result.nodes[0].id, "A"); + assert_eq!(result.nodes[0].rank, 0); + } + + #[test] + fn chain_a_b_c() { + let mut g = Graph::new(); + let a = g.add_node("A"); + let b = g.add_node("B"); + let c = g.add_node("C"); + g.add_edge(a, b, "ab"); + g.add_edge(b, c, "bc"); + + let result = layout( + &g, + &simple_node_info, + &simple_edge_info, + &LayoutOptions::default(), + ); + assert_eq!(result.nodes.len(), 3); + assert_eq!(result.edges.len(), 2); + + let node_a = result.nodes.iter().find(|n| n.id == "A").unwrap(); + let node_b = result.nodes.iter().find(|n| n.id == "B").unwrap(); + let node_c = result.nodes.iter().find(|n| n.id == "C").unwrap(); + + assert_eq!(node_a.rank, 0); + assert_eq!(node_b.rank, 1); + assert_eq!(node_c.rank, 2); + + // Ranks increase downward. + assert!(node_a.y < node_b.y); + assert!(node_b.y < node_c.y); + } + + #[test] + fn diamond_graph() { + let mut g = Graph::new(); + let a = g.add_node("A"); + let b = g.add_node("B"); + let c = g.add_node("C"); + let d = g.add_node("D"); + g.add_edge(a, b, "ab"); + g.add_edge(a, c, "ac"); + g.add_edge(b, d, "bd"); + g.add_edge(c, d, "cd"); + + let result = layout( + &g, + &simple_node_info, + &simple_edge_info, + &LayoutOptions::default(), + ); + assert_eq!(result.nodes.len(), 4); + assert_eq!(result.edges.len(), 4); + + let node_a = result.nodes.iter().find(|n| n.id == "A").unwrap(); + let node_b = result.nodes.iter().find(|n| n.id == "B").unwrap(); + let node_c = result.nodes.iter().find(|n| n.id == "C").unwrap(); + let node_d = result.nodes.iter().find(|n| n.id == "D").unwrap(); + + assert_eq!(node_a.rank, 0); + assert_eq!(node_b.rank, 1); + assert_eq!(node_c.rank, 1); + assert_eq!(node_d.rank, 2); + + // B and C are in the same rank but at different x positions. + assert!((node_b.y - node_c.y).abs() < f64::EPSILON); + assert!((node_b.x - node_c.x).abs() > 1.0); + } + + #[test] + fn type_ranks_override() { + let mut g = Graph::new(); + let a = g.add_node("A"); + let b = g.add_node("B"); + g.add_edge(a, b, "link"); + + let mut opts = LayoutOptions::default(); + opts.type_ranks.insert("default".into(), 5); + + let result = layout(&g, &simple_node_info, &simple_edge_info, &opts); + for node in &result.nodes { + assert_eq!(node.rank, 5, "node {} should be forced to rank 5", node.id); + } + } + + #[test] + fn cycle_handling() { + let mut g = Graph::new(); + let a = g.add_node("A"); + let b = g.add_node("B"); + let c = g.add_node("C"); + g.add_edge(a, b, "ab"); + g.add_edge(b, c, "bc"); + g.add_edge(c, a, "ca"); // creates cycle + + // Should not panic. + let result = layout( + &g, + &simple_node_info, + &simple_edge_info, + &LayoutOptions::default(), + ); + assert_eq!(result.nodes.len(), 3); + assert_eq!(result.edges.len(), 3); + } + + #[test] + fn left_to_right_direction() { + let mut g = Graph::new(); + let a = g.add_node("A"); + let b = g.add_node("B"); + g.add_edge(a, b, "link"); + + let opts = LayoutOptions { + rank_direction: RankDirection::LeftToRight, + ..Default::default() + }; + let result = layout(&g, &simple_node_info, &simple_edge_info, &opts); + let node_a = result.nodes.iter().find(|n| n.id == "A").unwrap(); + let node_b = result.nodes.iter().find(|n| n.id == "B").unwrap(); + + // In left-to-right layout, rank increases along x axis. + assert!(node_a.x < node_b.x); + } + + #[test] + fn multi_rank_edge_waypoints() { + let mut g = Graph::new(); + let a = g.add_node("A"); + let b = g.add_node("B"); + let c = g.add_node("C"); + g.add_edge(a, b, "ab"); + g.add_edge(b, c, "bc"); + g.add_edge(a, c, "ac"); // spans 2 ranks + + let result = layout( + &g, + &simple_node_info, + &simple_edge_info, + &LayoutOptions::default(), + ); + + let long_edge = result + .edges + .iter() + .find(|e| e.source_id == "A" && e.target_id == "C") + .expect("should find A->C edge"); + + // A->C spans ranks 0..2, so should have 3 waypoints (start, mid, end). + assert_eq!(long_edge.points.len(), 3); + } +} diff --git a/etch/src/lib.rs b/etch/src/lib.rs new file mode 100644 index 0000000..e37427b --- /dev/null +++ b/etch/src/lib.rs @@ -0,0 +1,39 @@ +//! # etch +//! +//! Hierarchical graph layout and SVG rendering for PulseEngine tools. +//! +//! This crate provides a domain-agnostic Sugiyama-style layered DAG layout +//! algorithm and an SVG renderer. It is shared between **rivet** (SDLC +//! traceability) and **spar** (AADL architecture) — so it intentionally knows +//! nothing about artifacts, requirements, or AADL components. Callers supply +//! a `petgraph::Graph` plus closures that extract display-level information +//! from their own node/edge types. +//! +//! ## Quick start +//! +//! ```rust +//! use petgraph::Graph; +//! use etch::{ +//! layout::{layout, LayoutOptions, NodeInfo, EdgeInfo}, +//! svg::{render_svg, SvgOptions}, +//! }; +//! +//! let mut g = Graph::<&str, &str>::new(); +//! let a = g.add_node("A"); +//! let b = g.add_node("B"); +//! g.add_edge(a, b, "links-to"); +//! +//! let gl = layout( +//! &g, +//! &|_idx, n| NodeInfo { id: n.to_string(), label: n.to_string(), node_type: "default".into(), sublabel: None }, +//! &|_idx, e| EdgeInfo { label: e.to_string() }, +//! &LayoutOptions::default(), +//! ); +//! +//! let svg = render_svg(&gl, &SvgOptions::default()); +//! assert!(svg.contains(", + /// Font family for all text. + pub font_family: String, + /// Font size in px. + pub font_size: f64, + /// Padding around the entire graph (px). + pub padding: f64, + /// Optional background fill colour for the SVG. + pub background: Option, + /// Stroke colour for edges. + pub edge_color: String, + /// Size of the arrowhead marker (px). + pub arrow_size: f64, + /// Corner radius for node rectangles. + pub rounded_corners: f64, + /// When `true`, emit `data-id` on nodes and `data-href` links. + pub interactive: bool, + /// Base URL prepended to node IDs for `data-href` attributes. + pub base_url: Option, + /// Optional node ID to visually highlight (thicker border). + pub highlight: Option, +} + +impl Default for SvgOptions { + fn default() -> Self { + Self { + type_colors: HashMap::new(), + font_family: "system-ui, -apple-system, sans-serif".into(), + font_size: 13.0, + padding: 20.0, + background: None, + edge_color: "#666".into(), + arrow_size: 8.0, + rounded_corners: 4.0, + interactive: false, + base_url: None, + highlight: None, + } + } +} + +// --------------------------------------------------------------------------- +// Public entry point +// --------------------------------------------------------------------------- + +/// Render a [`GraphLayout`] to an SVG string. +/// +/// The returned string is a complete, self-contained `` document +/// suitable for embedding in HTML or writing to a `.svg` file. +pub fn render_svg(layout: &GraphLayout, options: &SvgOptions) -> String { + let pad = options.padding; + let vb_w = layout.width + pad * 2.0; + let vb_h = layout.height + pad * 2.0; + + let mut svg = String::with_capacity(4096); + + // Opening tag. + writeln!( + svg, + "" + ) + .unwrap(); + + // — arrowhead marker. + write_defs(&mut svg, options); + + // \n"); + svg +} + +// --------------------------------------------------------------------------- +// Internals +// --------------------------------------------------------------------------- + +fn write_defs(svg: &mut String, options: &SvgOptions) { + let s = options.arrow_size; + write!( + svg, + " \n\ + \x20 \n\ + \x20 \n\ + \x20 \n\ + \x20 \n", + s / 2.0, + s / 2.0, + options.edge_color, + ) + .unwrap(); +} + +fn write_style(svg: &mut String, options: &SvgOptions) { + let font = &options.font_family; + let fs = options.font_size; + let ec = &options.edge_color; + + write!( + svg, + " \n", + fs - 2.0, + fs - 2.0, + ) + .unwrap(); +} + +fn write_edges(svg: &mut String, layout: &GraphLayout) { + svg.push_str(" \n"); + + for edge in &layout.edges { + if edge.points.len() < 2 { + continue; + } + + // Build a cubic bezier path through the waypoints. + let path_d = build_bezier_path(&edge.points); + + writeln!( + svg, + " ", + xml_escape(&edge.source_id), + xml_escape(&edge.target_id), + ) + .unwrap(); + + writeln!(svg, " ").unwrap(); + + // Edge label at midpoint. + if !edge.label.is_empty() { + let mid = edge.points.len() / 2; + let (mx, my) = edge.points[mid]; + writeln!( + svg, + " {}", + my - 4.0, + xml_escape(&edge.label), + ) + .unwrap(); + } + + svg.push_str(" \n"); + } + + svg.push_str(" \n"); +} + +fn write_nodes(svg: &mut String, layout: &GraphLayout, options: &SvgOptions) { + svg.push_str(" \n"); + + let default_fill = "#e8e8e8".to_string(); + + for node in &layout.nodes { + let fill = options + .type_colors + .get(&node.node_type) + .unwrap_or(&default_fill); + + let mut attrs = String::new(); + if options.interactive { + write!(attrs, " data-id=\"{}\"", xml_escape(&node.id)).unwrap(); + if let Some(ref base) = options.base_url { + write!(attrs, " data-href=\"{}/{}\"", base, xml_escape(&node.id)).unwrap(); + } + } + + writeln!( + svg, + " ", + css_class_safe(&node.node_type), + ) + .unwrap(); + + // Rectangle. + let r = options.rounded_corners; + let is_highlighted = options.highlight.as_ref().is_some_and(|h| h == &node.id); + let stroke_w = if is_highlighted { "3.0" } else { "1.5" }; + let stroke_c = if is_highlighted { "#ff6600" } else { "#333" }; + writeln!( + svg, + " ", + node.x, node.y, node.width, node.height, + ) + .unwrap(); + + // Primary label. + let text_y = if node.sublabel.is_some() { + node.y + node.height / 2.0 - options.font_size * 0.45 + } else { + node.y + node.height / 2.0 + }; + writeln!( + svg, + " {}", + node.x + node.width / 2.0, + xml_escape(&node.label), + ) + .unwrap(); + + // Sublabel. + if let Some(ref sub) = node.sublabel { + let sub_y = node.y + node.height / 2.0 + options.font_size * 0.65; + writeln!( + svg, + " {}", + node.x + node.width / 2.0, + xml_escape(sub), + ) + .unwrap(); + } + + // Tooltip. + writeln!(svg, " {}", xml_escape(&node.id),).unwrap(); + + svg.push_str(" \n"); + } + + svg.push_str(" \n"); +} + +/// Build a smooth cubic bezier SVG path through the given waypoints. +/// +/// For two points this produces a straight line (`M ... L ...`). +/// For three or more points it produces a `C` (cubic bezier) curve that +/// passes through all waypoints using Catmull-Rom-to-Bezier conversion. +fn build_bezier_path(points: &[(f64, f64)]) -> String { + let mut d = String::new(); + let (x0, y0) = points[0]; + write!(d, "M {x0} {y0}").unwrap(); + + if points.len() == 2 { + let (x1, y1) = points[1]; + write!(d, " L {x1} {y1}").unwrap(); + } else { + // Simple cubic bezier: for each segment use vertical tangent handles. + for i in 0..points.len() - 1 { + let (x1, y1) = points[i]; + let (x2, y2) = points[i + 1]; + let cy1 = y1 + (y2 - y1) * 0.5; + let cy2 = y2 - (y2 - y1) * 0.5; + write!(d, " C {x1} {cy1}, {x2} {cy2}, {x2} {y2}").unwrap(); + } + } + + d +} + +/// Minimal XML escaping for attribute values and text content. +fn xml_escape(s: &str) -> String { + s.replace('&', "&") + .replace('<', "<") + .replace('>', ">") + .replace('"', """) +} + +/// Convert a node-type string into a CSS-class-safe identifier. +fn css_class_safe(s: &str) -> String { + s.chars() + .map(|c| { + if c.is_alphanumeric() || c == '-' { + c + } else { + '-' + } + }) + .collect() +} + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +#[cfg(test)] +mod tests { + use super::*; + use crate::layout::{EdgeInfo, LayoutOptions, NodeInfo, layout}; + use petgraph::Graph; + use petgraph::graph::{EdgeIndex, NodeIndex}; + + fn build_test_layout() -> GraphLayout { + let mut g = Graph::new(); + let a = g.add_node("A"); + let b = g.add_node("B"); + g.add_edge(a, b, "link"); + + layout( + &g, + &|_idx: NodeIndex, n: &&str| NodeInfo { + id: n.to_string(), + label: n.to_string(), + node_type: "req".into(), + sublabel: Some("Title".into()), + }, + &|_idx: EdgeIndex, e: &&str| EdgeInfo { + label: e.to_string(), + }, + &LayoutOptions::default(), + ) + } + + #[test] + fn svg_contains_root_element() { + let gl = build_test_layout(); + let svg = render_svg(&gl, &SvgOptions::default()); + assert!(svg.starts_with("")); + } + + #[test] + fn svg_contains_defs_and_style() { + let gl = build_test_layout(); + let svg = render_svg(&gl, &SvgOptions::default()); + assert!(svg.contains("")); + assert!(svg.contains("")); + assert!(svg.contains("")); + assert!(svg.contains("arrowhead")); + } + + #[test] + fn svg_contains_nodes() { + let gl = build_test_layout(); + let svg = render_svg(&gl, &SvgOptions::default()); + assert!(svg.contains("class=\"nodes\"")); + assert!(svg.contains("class=\"node type-req\"")); + assert!(svg.contains(">A<")); + assert!(svg.contains(">B<")); + } + + #[test] + fn svg_contains_edges() { + let gl = build_test_layout(); + let svg = render_svg(&gl, &SvgOptions::default()); + assert!(svg.contains("class=\"edges\"")); + assert!(svg.contains("class=\"edge\"")); + assert!(svg.contains("Title<")); + } + + #[test] + fn svg_tooltip() { + let gl = build_test_layout(); + let svg = render_svg(&gl, &SvgOptions::default()); + assert!(svg.contains("A")); + assert!(svg.contains("B")); + } + + #[test] + fn xml_escape_special_chars() { + assert_eq!( + xml_escape("&\"x\""), + "<b>&"x"</b>" + ); + } +} diff --git a/examples/cybersecurity/cybersecurity-example.yaml b/examples/cybersecurity/cybersecurity-example.yaml new file mode 100644 index 0000000..7cbfbe8 --- /dev/null +++ b/examples/cybersecurity/cybersecurity-example.yaml @@ -0,0 +1,5 @@ +name: cybersecurity-example +schemas: [common, aspice, cybersecurity] +sources: + - path: . + format: generic-yaml diff --git a/examples/cybersecurity/cybersecurity.yaml b/examples/cybersecurity/cybersecurity.yaml new file mode 100644 index 0000000..bdec622 --- /dev/null +++ b/examples/cybersecurity/cybersecurity.yaml @@ -0,0 +1,336 @@ +artifacts: + # ── Assets (TARA — items of value) ───────────────────────────────────── + + - id: ASSET-001 + type: asset + title: Firmware Update Channel + description: > + OTA and wired firmware update communication channel between the backend + server and the ECU bootloader. Compromise could allow installation of + manipulated firmware. + status: approved + tags: [ota, firmware, sec] + fields: + asset-type: interface + cybersecurity-properties: + - integrity + - authenticity + + - id: ASSET-002 + type: asset + title: UDS Diagnostic Interface + description: > + Unified Diagnostic Services (ISO 14229) interface exposed on the OBD-II + port and over DoIP. Provides security-sensitive services such as + ECU reset, memory read/write, and routine control. + status: approved + tags: [diagnostics, uds, sec] + fields: + asset-type: interface + cybersecurity-properties: + - confidentiality + - integrity + - authenticity + + # ── Threat Scenarios ──────────────────────────────────────────────────── + + - id: TS-001 + type: threat-scenario + title: Malicious firmware injection via OTA channel + description: > + An attacker intercepts or spoofs the OTA update server to deliver a + manipulated firmware image to the ECU bootloader, bypassing + authenticity checks. + status: approved + tags: [ota, spoofing] + fields: + attack-vector: network + attack-feasibility: medium + impact: severe + links: + - type: threatens + target: ASSET-001 + + - id: TS-002 + type: threat-scenario + title: Unauthorized diagnostic session via UDS + description: > + An attacker gains physical or adjacent-network access to the diagnostic + interface and opens a SecurityAccess session without valid credentials, + enabling safety-critical service execution. + status: approved + tags: [diagnostics, elevation-of-privilege] + fields: + attack-vector: physical + attack-feasibility: medium + impact: major + links: + - type: threatens + target: ASSET-002 + + # ── Risk Assessments ──────────────────────────────────────────────────── + + - id: RA-001 + type: risk-assessment + title: Risk of malicious firmware injection + description: > + Network-based attack with medium feasibility and severe safety impact. + Risk level is unacceptable — mitigation required. + status: approved + tags: [ota] + fields: + risk-level: unacceptable + risk-treatment: mitigate + links: + - type: assesses + target: TS-001 + + - id: RA-002 + type: risk-assessment + title: Risk of unauthorized diagnostic access + description: > + Physical attack with medium feasibility and major impact on vehicle + safety functions. Risk level is unacceptable — mitigation required. + status: approved + tags: [diagnostics] + fields: + risk-level: unacceptable + risk-treatment: mitigate + links: + - type: assesses + target: TS-002 + + # ── Cybersecurity Goals (SEC.1 — top-level requirements) ──────────────── + + - id: CSG-001 + type: cybersecurity-goal + title: Ensure firmware image authenticity and integrity + description: > + The system shall verify the authenticity and integrity of every firmware + image before installation to prevent execution of manipulated code. + status: approved + tags: [secure-boot, ota] + fields: + cal: "3" + verification-criteria: > + Demonstrate that the bootloader rejects any firmware image whose + cryptographic signature is invalid or missing. + links: + - type: mitigates + target: TS-001 + + - id: CSG-002 + type: cybersecurity-goal + title: Authenticate diagnostic service requests + description: > + The system shall authenticate every security-sensitive UDS diagnostic + request before granting access to protected services. + status: approved + tags: [diagnostics, authentication] + fields: + cal: "2" + verification-criteria: > + Demonstrate that all SecurityAccess (0x27) and Authentication (0x29) + requests are validated before granting an elevated session. + links: + - type: mitigates + target: TS-002 + + # ── Cybersecurity Requirements (SEC.1 — detailed requirements) ────────── + + - id: CSREQ-001 + type: cybersecurity-req + title: Verify firmware signature using ECDSA-P256 + description: > + The bootloader shall verify the ECDSA-P256-SHA256 signature of every + firmware image against the OEM root-of-trust public key stored in the + HSM before writing to flash. + status: approved + tags: [secure-boot, crypto] + fields: + req-type: integrity + priority: must + verification-criteria: > + Test with valid, corrupted, and missing signatures; confirm only + validly signed images are accepted. + links: + - type: derives-from + target: CSG-001 + + - id: CSREQ-002 + type: cybersecurity-req + title: Protect firmware transfer with TLS 1.3 + description: > + All OTA firmware downloads shall use TLS 1.3 with mutual authentication + between the ECU and the backend update server. + status: approved + tags: [ota, tls] + fields: + req-type: encryption + priority: must + verification-criteria: > + Verify that downgrade to TLS 1.2 or plaintext is rejected; confirm + mutual certificate validation. + links: + - type: derives-from + target: CSG-001 + + - id: CSREQ-003 + type: cybersecurity-req + title: UDS SecurityAccess challenge-response via HSM + description: > + The UDS SecurityAccess service (0x27) shall implement a + challenge-response scheme using AES-128-CMAC with keys managed by the + on-chip HSM. Failed attempts shall trigger exponential back-off. + status: approved + tags: [diagnostics, hsm] + fields: + req-type: authentication + priority: must + verification-criteria: > + Test correct challenge-response flow, invalid key rejection, and + back-off timer activation after 3 consecutive failures. + links: + - type: derives-from + target: CSG-002 + + # ── Cybersecurity Designs (SEC.2) ─────────────────────────────────────── + + - id: CSD-001 + type: cybersecurity-design + title: Secure Boot chain with HSM-backed signature verification + description: > + A two-stage secure boot chain: the HSM verifies the bootloader first + stage using a ROM-rooted public key, then the bootloader verifies the + application image using an OEM key stored in HSM secure storage. + ECDSA-P256-SHA256 is used throughout. + status: approved + tags: [secure-boot, hsm] + fields: + mechanism: secure boot + algorithm: ECDSA-P256-SHA256 + links: + - type: satisfies + target: CSREQ-001 + + - id: CSD-002 + type: cybersecurity-design + title: TLS 1.3 mutual-auth OTA transport and HSM-backed UDS auth + description: > + OTA channel secured with TLS 1.3 (ECDHE key exchange, AES-128-GCM + cipher suite, X.509 mutual authentication). UDS SecurityAccess uses + AES-128-CMAC challenge-response with keys stored in HSM SHE+ + key slots. + status: approved + tags: [tls, hsm, uds] + fields: + mechanism: TLS 1.3 + HSM CMAC + algorithm: AES-128-GCM / AES-128-CMAC + links: + - type: satisfies + target: CSREQ-002 + - type: satisfies + target: CSREQ-003 + + # ── Cybersecurity Implementations (SEC.3) ─────────────────────────────── + + - id: CSI-001 + type: cybersecurity-implementation + title: Bootloader secure-boot verification module + description: > + Implementation of the secure boot verification logic in the bootloader, + using the HSM driver API for ECDSA verify operations and secure key + storage access. + status: approved + tags: [secure-boot, code] + fields: + unit: src/boot/secure_boot.c + implementation-type: code + links: + - type: implements + target: CSD-001 + + - id: CSI-002 + type: cybersecurity-implementation + title: TLS 1.3 stack and UDS authentication handler + description: > + Integration of the wolfSSL TLS 1.3 library for OTA transport and + implementation of the UDS SecurityAccess handler calling the HSM + CMAC API for challenge-response authentication. + status: approved + tags: [tls, uds, code] + fields: + unit: src/comms/tls_ota.c, src/diag/uds_security.c + implementation-type: code + links: + - type: implements + target: CSD-002 + + # ── Cybersecurity Verifications (SEC.4) ───────────────────────────────── + + - id: CSV-001 + type: cybersecurity-verification + title: Secure boot signature rejection test + description: > + Verify that the bootloader rejects firmware images with invalid, + corrupted, or missing ECDSA signatures and only accepts correctly + signed images. + status: approved + tags: [secure-boot, test] + fields: + method: automated-test + preconditions: + - HSM provisioned with OEM test root-of-trust key + - Test firmware images with valid, corrupted, and absent signatures + steps: + - step: 1 + action: Flash a validly signed firmware image + expected: Image accepted and written to application partition + - step: 2 + action: Flash an image with a corrupted signature + expected: Bootloader rejects the image and retains previous firmware + - step: 3 + action: Flash an image with no signature block + expected: Bootloader rejects the image with authentication error + links: + - type: verifies + target: CSREQ-001 + + - id: CSV-002 + type: cybersecurity-verification + title: OTA transport and UDS authentication penetration test + description: > + Penetration test covering the TLS 1.3 OTA transport channel and the + UDS SecurityAccess service. Confirms that TLS downgrade is rejected, + mutual authentication is enforced, and brute-force/replay attacks on + UDS are mitigated by the challenge-response scheme and exponential + back-off timer. + status: approved + tags: [ota, diagnostics, pentest] + fields: + method: penetration-test + preconditions: + - ECU connected via OBD-II with DoIP enabled + - OTA backend test environment with valid and expired certificates + - Valid and invalid SecurityAccess key material available + steps: + - step: 1 + action: Initiate OTA download over TLS 1.3 with valid mutual certificates + expected: TLS session established and firmware downloaded + - step: 2 + action: Attempt TLS downgrade to 1.2 or plaintext + expected: ECU rejects the connection + - step: 3 + action: Attempt SecurityAccess with correct challenge-response + expected: Security session granted + - step: 4 + action: Replay a previously valid response to a new challenge + expected: Authentication rejected + - step: 5 + action: Send 3 consecutive invalid responses + expected: ECU enforces exponential back-off delay before next attempt + links: + - type: verifies + target: CSREQ-002 + - type: verifies + target: CSREQ-003 diff --git a/examples/cybersecurity/rivet.yaml b/examples/cybersecurity/rivet.yaml new file mode 100644 index 0000000..d3f20a1 --- /dev/null +++ b/examples/cybersecurity/rivet.yaml @@ -0,0 +1,10 @@ +# Run: rivet --schemas ../../schemas validate +project: + name: cybersecurity-example + schemas: + - common + - cybersecurity + +sources: + - path: cybersecurity.yaml + format: generic-yaml diff --git a/rivet-cli/Cargo.toml b/rivet-cli/Cargo.toml index ade3c2e..2b744d1 100644 --- a/rivet-cli/Cargo.toml +++ b/rivet-cli/Cargo.toml @@ -11,6 +11,10 @@ rust-version.workspace = true name = "rivet" path = "src/main.rs" +[features] +default = [] +wasm = ["rivet-core/wasm"] + [dependencies] rivet-core = { path = "../rivet-core" } clap = { workspace = true } @@ -23,3 +27,5 @@ serde_json = { workspace = true } axum = { workspace = true } tokio = { workspace = true } tower-http = { workspace = true } +etch = { path = "../etch" } +petgraph = { workspace = true } diff --git a/rivet-cli/src/main.rs b/rivet-cli/src/main.rs index f67f215..33dcae0 100644 --- a/rivet-cli/src/main.rs +++ b/rivet-cli/src/main.rs @@ -4,6 +4,7 @@ use std::process::ExitCode; use anyhow::{Context, Result}; use clap::{Parser, Subcommand}; +use rivet_core::diff::{ArtifactDiff, DiagnosticDiff}; use rivet_core::links::LinkGraph; use rivet_core::matrix::{self, Direction}; use rivet_core::schema::Severity; @@ -79,12 +80,50 @@ enum Command { schema: Option, }, + /// Compare two versions of artifacts and show what changed + Diff { + /// Path to the base artifact directory (older version) + #[arg(long)] + base: Option, + + /// Path to the head artifact directory (newer version) + #[arg(long)] + head: Option, + }, + + /// Export artifacts to a specified format + Export { + /// Output format: "reqif", "generic-yaml" + #[arg(short, long)] + format: String, + + /// Output file path (stdout if omitted) + #[arg(short, long)] + output: Option, + }, + /// Start the HTMX-powered dashboard server Serve { /// Port to listen on #[arg(short = 'P', long, default_value = "3000")] port: u16, }, + + /// Import artifacts using a custom WASM adapter component + #[cfg(feature = "wasm")] + Import { + /// Path to the WASM adapter component file (.wasm) + #[arg(long)] + adapter: PathBuf, + + /// Path to the source data (file or directory) + #[arg(long)] + source: PathBuf, + + /// Adapter configuration entries (key=value pairs) + #[arg(long = "config", value_parser = parse_key_val)] + config_entries: Vec<(String, String)>, + }, } fn main() -> ExitCode { @@ -126,6 +165,8 @@ fn run(cli: Cli) -> Result { link, direction, } => cmd_matrix(&cli, from, to, link.as_deref(), direction), + Command::Diff { base, head } => cmd_diff(&cli, base.as_deref(), head.as_deref()), + Command::Export { format, output } => cmd_export(&cli, format, output.as_deref()), Command::Serve { port } => { let port = *port; let (store, schema, graph) = load_project(&cli)?; @@ -133,6 +174,12 @@ fn run(cli: Cli) -> Result { rt.block_on(serve::run(store, schema, graph, port))?; Ok(true) } + #[cfg(feature = "wasm")] + Command::Import { + adapter, + source, + config_entries, + } => cmd_import(adapter, source, config_entries), } } @@ -327,6 +374,198 @@ fn cmd_matrix( Ok(true) } +/// Export all project artifacts in the specified format. +fn cmd_export(cli: &Cli, format: &str, output: Option<&std::path::Path>) -> Result { + use rivet_core::adapter::{Adapter, AdapterConfig}; + + let (store, _, _) = load_project(cli)?; + let artifacts: Vec<_> = store.iter().cloned().collect(); + let config = AdapterConfig::default(); + + let bytes = match format { + "reqif" => { + let adapter = rivet_core::reqif::ReqIfAdapter::new(); + adapter + .export(&artifacts, &config) + .map_err(|e| anyhow::anyhow!("{e}"))? + } + "generic-yaml" | "generic" => { + let adapter = rivet_core::formats::generic::GenericYamlAdapter::new(); + adapter + .export(&artifacts, &config) + .map_err(|e| anyhow::anyhow!("{e}"))? + } + other => { + anyhow::bail!("unsupported export format: {other} (supported: reqif, generic-yaml)") + } + }; + + if let Some(path) = output { + std::fs::write(path, &bytes).with_context(|| format!("writing {}", path.display()))?; + println!( + "Exported {} artifacts to {}", + artifacts.len(), + path.display() + ); + } else { + use std::io::Write; + std::io::stdout() + .write_all(&bytes) + .context("writing to stdout")?; + } + + Ok(true) +} + +/// Compare two artifact sets and display the differences. +fn cmd_diff( + cli: &Cli, + base_path: Option<&std::path::Path>, + head_path: Option<&std::path::Path>, +) -> Result { + let (base_store, base_schema, base_graph, head_store, head_schema, head_graph) = + match (base_path, head_path) { + (Some(bp), Some(hp)) => { + // Explicit --base and --head directories: load each as a + // standalone project. + let base_cli = Cli { + project: bp.to_path_buf(), + schemas: cli.schemas.clone(), + verbose: cli.verbose, + command: Command::Validate, + }; + let head_cli = Cli { + project: hp.to_path_buf(), + schemas: cli.schemas.clone(), + verbose: cli.verbose, + command: Command::Validate, + }; + let (bs, bsc, bg) = load_project(&base_cli)?; + let (hs, hsc, hg) = load_project(&head_cli)?; + (bs, bsc, bg, hs, hsc, hg) + } + _ => { + // Default: load the project twice (same working tree). This + // is a placeholder — a future version will compare against + // the last clean git state. + let (s1, sc1, g1) = load_project(cli)?; + let (s2, sc2, g2) = load_project(cli)?; + (s1, sc1, g1, s2, sc2, g2) + } + }; + + // Compute artifact diff + let diff = ArtifactDiff::compute(&base_store, &head_store); + + // Compute diagnostic diff + let base_diags = validate::validate(&base_store, &base_schema, &base_graph); + let head_diags = validate::validate(&head_store, &head_schema, &head_graph); + let diag_diff = DiagnosticDiff::compute(&base_diags, &head_diags); + + // ── Display ────────────────────────────────────────────────────── + + let use_color = std::io::IsTerminal::is_terminal(&std::io::stdout()); + + let green = |s: &str| { + if use_color { + format!("\x1b[32m{s}\x1b[0m") + } else { + format!("+ {s}") + } + }; + let red = |s: &str| { + if use_color { + format!("\x1b[31m{s}\x1b[0m") + } else { + format!("- {s}") + } + }; + let yellow = |s: &str| { + if use_color { + format!("\x1b[33m{s}\x1b[0m") + } else { + format!("~ {s}") + } + }; + + // Added + for id in &diff.added { + let title = head_store.get(id).map(|a| a.title.as_str()).unwrap_or(""); + println!("{}", green(&format!("{id} {title}"))); + } + + // Removed + for id in &diff.removed { + let title = base_store.get(id).map(|a| a.title.as_str()).unwrap_or(""); + println!("{}", red(&format!("{id} {title}"))); + } + + // Modified + for change in &diff.modified { + println!("{}", yellow(&change.id)); + + if let Some((old, new)) = &change.title_changed { + println!(" title: {} -> {}", red(old), green(new)); + } + if change.description_changed { + println!(" description: changed"); + } + if let Some((old, new)) = &change.status_changed { + let old_s = old.as_deref().unwrap_or("(none)"); + let new_s = new.as_deref().unwrap_or("(none)"); + println!(" status: {} -> {}", red(old_s), green(new_s)); + } + if let Some((old, new)) = &change.type_changed { + println!(" type: {} -> {}", red(old), green(new)); + } + for tag in &change.tags_added { + println!(" tag: {}", green(tag)); + } + for tag in &change.tags_removed { + println!(" tag: {}", red(tag)); + } + for link in &change.links_added { + println!( + " link: {}", + green(&format!("{} -> {}", link.link_type, link.target)) + ); + } + for link in &change.links_removed { + println!( + " link: {}", + red(&format!("{} -> {}", link.link_type, link.target)) + ); + } + for field in &change.fields_changed { + println!(" field changed: {field}"); + } + } + + // Summary + println!(); + println!("{}", diff.summary()); + + // Diagnostic diff + if !diag_diff.is_empty() { + println!(); + for d in &diag_diff.new_errors { + println!("{}", red(&format!("NEW {d}"))); + } + for d in &diag_diff.resolved_errors { + println!("{}", green(&format!("RESOLVED {d}"))); + } + for d in &diag_diff.new_warnings { + println!("{}", yellow(&format!("NEW {d}"))); + } + for d in &diag_diff.resolved_warnings { + println!("{}", green(&format!("RESOLVED {d}"))); + } + println!("{}", diag_diff.summary()); + } + + Ok(true) +} + // ── Helpers ────────────────────────────────────────────────────────────── fn resolve_schemas_dir(cli: &Cli) -> PathBuf { @@ -391,6 +630,65 @@ fn print_stats(store: &Store) { println!(" {:30} {:>4}", "TOTAL", store.len()); } +/// Parse a key=value pair for adapter configuration. +#[cfg(feature = "wasm")] +fn parse_key_val(s: &str) -> Result<(String, String), String> { + let pos = s + .find('=') + .ok_or_else(|| format!("invalid KEY=VALUE: no '=' found in '{s}'"))?; + Ok((s[..pos].to_string(), s[pos + 1..].to_string())) +} + +/// Import artifacts using a WASM adapter component. +#[cfg(feature = "wasm")] +fn cmd_import( + adapter_path: &std::path::Path, + source_path: &std::path::Path, + config_entries: &[(String, String)], +) -> Result { + use rivet_core::adapter::{Adapter, AdapterConfig, AdapterSource}; + use rivet_core::wasm_runtime::WasmAdapterRuntime; + use std::collections::BTreeMap; + + println!("Loading WASM adapter: {}", adapter_path.display()); + + let runtime = WasmAdapterRuntime::with_defaults().context("failed to create WASM runtime")?; + + let adapter = runtime + .load_adapter(adapter_path) + .context("failed to load WASM adapter")?; + + println!(" Adapter ID: {}", adapter.id()); + println!(" Adapter name: {}", adapter.name()); + + let source = if source_path.is_dir() { + AdapterSource::Directory(source_path.to_path_buf()) + } else { + AdapterSource::Path(source_path.to_path_buf()) + }; + + let config = AdapterConfig { + entries: config_entries + .iter() + .cloned() + .collect::>(), + }; + + let artifacts = adapter + .import(&source, &config) + .context("adapter import failed")?; + + println!("\nImported {} artifacts:", artifacts.len()); + for artifact in &artifacts { + println!( + " {:20} {:25} {}", + artifact.id, artifact.artifact_type, artifact.title + ); + } + + Ok(true) +} + fn print_diagnostics(diagnostics: &[validate::Diagnostic]) { if diagnostics.is_empty() { println!("\nNo issues found."); diff --git a/rivet-cli/src/serve.rs b/rivet-cli/src/serve.rs index efd5b8a..e96a33e 100644 --- a/rivet-cli/src/serve.rs +++ b/rivet-cli/src/serve.rs @@ -1,3 +1,4 @@ +use std::collections::HashMap; use std::sync::Arc; use anyhow::Result; @@ -5,7 +6,12 @@ use axum::Router; use axum::extract::{Path, Query, State}; use axum::response::Html; use axum::routing::get; +use petgraph::graph::{Graph, NodeIndex}; +use petgraph::visit::EdgeRef; +use etch::filter::ego_subgraph; +use etch::layout::{self as pgv_layout, EdgeInfo, LayoutOptions, NodeInfo}; +use etch::svg::{SvgOptions, render_svg}; use rivet_core::links::LinkGraph; use rivet_core::matrix::{self, Direction}; use rivet_core::schema::{Schema, Severity}; @@ -31,8 +37,10 @@ pub async fn run(store: Store, schema: Schema, graph: LinkGraph, port: u16) -> R .route("/", get(index)) .route("/artifacts", get(artifacts_list)) .route("/artifacts/{id}", get(artifact_detail)) + .route("/artifacts/{id}/graph", get(artifact_graph)) .route("/validate", get(validate_view)) .route("/matrix", get(matrix_view)) + .route("/graph", get(graph_view)) .route("/stats", get(stats_view)) .with_state(state); @@ -44,6 +52,57 @@ pub async fn run(store: Store, schema: Schema, graph: LinkGraph, port: u16) -> R Ok(()) } +// ── Color palette ──────────────────────────────────────────────────────── + +fn type_color_map() -> HashMap { + let pairs: &[(&str, &str)] = &[ + // STPA + ("loss", "#dc3545"), + ("hazard", "#fd7e14"), + ("system-constraint", "#20c997"), + ("controller", "#6f42c1"), + ("uca", "#e83e8c"), + ("control-action", "#17a2b8"), + ("feedback", "#6610f2"), + ("causal-factor", "#d63384"), + ("safety-constraint", "#20c997"), + ("loss-scenario", "#e83e8c"), + // ASPICE + ("stakeholder-req", "#0d6efd"), + ("system-req", "#0dcaf0"), + ("system-architecture", "#198754"), + ("sw-req", "#198754"), + ("sw-architecture", "#0d6efd"), + ("sw-detailed-design", "#6610f2"), + ("sw-unit", "#6f42c1"), + ("system-verification", "#6610f2"), + ("sw-verification", "#6610f2"), + ("system-integration-verification", "#6610f2"), + ("sw-integration-verification", "#6610f2"), + ("sw-unit-verification", "#6610f2"), + ("qualification-verification", "#6610f2"), + // Dev + ("requirement", "#0d6efd"), + ("design-decision", "#198754"), + ("feature", "#6f42c1"), + // Cybersecurity + ("asset", "#ffc107"), + ("threat", "#dc3545"), + ("cybersecurity-req", "#fd7e14"), + ("vulnerability", "#e83e8c"), + ("attack-path", "#dc3545"), + ("cybersecurity-goal", "#0d6efd"), + ("cybersecurity-control", "#198754"), + ("security-verification", "#6610f2"), + ("risk-assessment", "#fd7e14"), + ("security-event", "#e83e8c"), + ]; + pairs + .iter() + .map(|(k, v)| (k.to_string(), v.to_string())) + .collect() +} + // ── CSS ────────────────────────────────────────────────────────────────── const CSS: &str = r#" @@ -89,11 +148,62 @@ dl{margin:.5rem 0} dt{font-weight:600;font-size:.85rem;color:#495057;margin-top:.5rem} dd{margin-left:0;margin-bottom:.25rem} .meta{color:#6c757d;font-size:.85rem} +.nav-icon{display:inline-block;width:1.1rem;text-align:center;margin-right:.3rem;font-size:.85rem} +.graph-container{border:1px solid #dee2e6;border-radius:8px;overflow:hidden;background:#fff;cursor:grab} +.graph-container:active{cursor:grabbing} +.graph-container svg{display:block;width:100%;height:auto} +.filter-grid{display:flex;flex-wrap:wrap;gap:.5rem;margin-bottom:.75rem} +.filter-grid label{font-size:.82rem;display:flex;align-items:center;gap:.25rem} +.filter-grid input[type="checkbox"]{margin:0} +"#; + +// ── Pan/zoom JS ────────────────────────────────────────────────────────── + +const GRAPH_JS: &str = r#" + "#; // ── Layout ─────────────────────────────────────────────────────────────── -fn layout(content: &str) -> Html { +fn page_layout(content: &str) -> Html { Html(format!( r##" @@ -109,16 +219,18 @@ fn layout(content: &str) -> Html {
{content}
+{GRAPH_JS} "## )) @@ -128,7 +240,7 @@ fn layout(content: &str) -> Html { async fn index(State(state): State>) -> Html { let inner = stats_partial(&state); - layout(&inner) + page_layout(&inner) } async fn stats_view(State(state): State>) -> Html { @@ -347,11 +459,374 @@ async fn artifact_detail( html.push_str(""); } - html.push_str("

← Back to artifacts

"); + // Show in graph link + html.push_str(&format!( + r##"

Show in graph +  |  + ← Back to artifacts

"##, + id_esc = html_escape(&id), + )); + + Html(html) +} + +// ── Graph visualization ────────────────────────────────────────────────── + +#[derive(Debug, serde::Deserialize)] +struct GraphParams { + types: Option, + link_types: Option, + #[serde(default = "default_depth")] + depth: usize, + focus: Option, +} + +fn default_depth() -> usize { + 0 +} + +/// Build a filtered subgraph based on query params and return SVG. +async fn graph_view( + State(state): State>, + Query(params): Query, +) -> Html { + let store = &state.store; + let link_graph = &state.graph; + let pg = link_graph.graph(); + let node_map = link_graph.node_map(); + + // Parse filter sets + let type_filter: Option> = params + .types + .as_ref() + .filter(|s| !s.is_empty()) + .map(|s| s.split(',').map(|t| t.trim().to_string()).collect()); + let link_filter: Option> = params + .link_types + .as_ref() + .filter(|s| !s.is_empty()) + .map(|s| s.split(',').map(|t| t.trim().to_string()).collect()); + + // Build the subgraph to visualize + let sub: Graph; + + if let Some(focus_id) = ¶ms.focus { + if focus_id.is_empty() { + // No focus, fall through to full graph + sub = build_filtered_subgraph(pg, store, node_map, &type_filter, &link_filter); + } else if let Some(&focus_idx) = node_map.get(focus_id.as_str()) { + let hops = if params.depth > 0 { params.depth } else { 3 }; + let ego = ego_subgraph(pg, focus_idx, hops); + // Apply type/link filters on the ego subgraph + sub = apply_filters_to_graph(&ego, store, &type_filter, &link_filter); + } else { + sub = build_filtered_subgraph(pg, store, node_map, &type_filter, &link_filter); + } + } else { + sub = build_filtered_subgraph(pg, store, node_map, &type_filter, &link_filter); + } + + let colors = type_color_map(); + let svg_opts = SvgOptions { + type_colors: colors, + highlight: params.focus.clone().filter(|s| !s.is_empty()), + ..SvgOptions::default() + }; + + let gl = pgv_layout::layout( + &sub, + &|_idx, n| { + let atype = store + .get(n.as_str()) + .map(|a| a.artifact_type.clone()) + .unwrap_or_default(); + let title = store + .get(n.as_str()) + .map(|a| a.title.clone()) + .unwrap_or_default(); + let sublabel = if title.len() > 24 { + Some(format!("{}...", &title[..22])) + } else if title.is_empty() { + None + } else { + Some(title) + }; + NodeInfo { + id: n.clone(), + label: n.clone(), + node_type: atype, + sublabel, + } + }, + &|_idx, e| EdgeInfo { label: e.clone() }, + &LayoutOptions::default(), + ); + + let svg = render_svg(&gl, &svg_opts); + + // Build filter controls + let mut html = String::from("

Graph

"); + + // Filter form + html.push_str("
"); + html.push_str( + "
", + ); + + // Type checkboxes + let mut all_types: Vec<&str> = store.types().collect(); + all_types.sort(); + html.push_str("
"); + for t in &all_types { + let checked = match &type_filter { + Some(f) => { + if f.iter().any(|x| x == *t) { + " checked" + } else { + "" + } + } + None => " checked", + }; + html.push_str(&format!( + "" + )); + } + html.push_str("
"); + + // Focus input + let focus_val = params.focus.as_deref().unwrap_or(""); + html.push_str(&format!( + "

\ +
", + html_escape(focus_val) + )); + + // Depth slider + let depth_val = if params.depth > 0 { params.depth } else { 3 }; + html.push_str(&format!( + "

\ +
" + )); + + // Link types input + let lt_val = params.link_types.as_deref().unwrap_or(""); + html.push_str(&format!( + "

\ +
", + html_escape(lt_val) + )); + + html.push_str("

"); + html.push_str("
"); + + // SVG card + html.push_str("
"); + html.push_str(&svg); + html.push_str("
"); + + html.push_str(&format!( + "

{} nodes, {} edges

", + gl.nodes.len(), + gl.edges.len() + )); + + Html(html) +} + +// ── Ego graph for a single artifact ────────────────────────────────────── + +#[derive(Debug, serde::Deserialize)] +struct EgoParams { + #[serde(default = "default_ego_hops")] + hops: usize, +} + +fn default_ego_hops() -> usize { + 2 +} + +async fn artifact_graph( + State(state): State>, + Path(id): Path, + Query(params): Query, +) -> Html { + let store = &state.store; + let link_graph = &state.graph; + let pg = link_graph.graph(); + let node_map = link_graph.node_map(); + + let Some(&focus_idx) = node_map.get(id.as_str()) else { + return Html(format!( + "

Not Found

Artifact {} not in graph.

", + html_escape(&id) + )); + }; + + let hops = if params.hops > 0 { params.hops } else { 2 }; + let sub = ego_subgraph(pg, focus_idx, hops); + + let colors = type_color_map(); + let svg_opts = SvgOptions { + type_colors: colors, + highlight: Some(id.clone()), + ..SvgOptions::default() + }; + + let gl = pgv_layout::layout( + &sub, + &|_idx, n| { + let atype = store + .get(n.as_str()) + .map(|a| a.artifact_type.clone()) + .unwrap_or_default(); + let title = store + .get(n.as_str()) + .map(|a| a.title.clone()) + .unwrap_or_default(); + let sublabel = if title.len() > 24 { + Some(format!("{}...", &title[..22])) + } else if title.is_empty() { + None + } else { + Some(title) + }; + NodeInfo { + id: n.clone(), + label: n.clone(), + node_type: atype, + sublabel, + } + }, + &|_idx, e| EdgeInfo { label: e.clone() }, + &LayoutOptions::default(), + ); + + let svg = render_svg(&gl, &svg_opts); + + let mut html = format!("

Neighborhood of {}

", html_escape(&id),); + + // Hop control + html.push_str("
"); + html.push_str(&format!( + "
\ +

\ +
\ +

\ +
", + id_esc = html_escape(&id), + )); + + html.push_str("
"); + html.push_str(&svg); + html.push_str("
"); + + html.push_str(&format!( + "

{} nodes, {} edges ({}-hop neighborhood)

", + gl.nodes.len(), + gl.edges.len(), + hops + )); + + html.push_str(&format!( + r##"

← Back to {id_esc} +  |  + Open in full graph

"##, + id_esc = html_escape(&id), + )); Html(html) } +/// Build a filtered subgraph from the full petgraph, keeping only nodes +/// whose artifact types match `type_filter` and edges matching `link_filter`. +fn build_filtered_subgraph( + pg: &petgraph::Graph, + store: &Store, + node_map: &HashMap, + type_filter: &Option>, + link_filter: &Option>, +) -> Graph { + let mut sub = Graph::new(); + let mut old_to_new: HashMap = HashMap::new(); + + // Add nodes that pass the type filter. + for (id, &old_idx) in node_map { + let include = match type_filter { + Some(types) => store + .get(id.as_str()) + .map(|a| types.iter().any(|t| t == &a.artifact_type)) + .unwrap_or(false), + None => true, + }; + if include { + let new_idx = sub.add_node(pg[old_idx].clone()); + old_to_new.insert(old_idx, new_idx); + } + } + + // Add edges where both endpoints survived and link type matches. + for edge in pg.edge_references() { + if let (Some(&new_src), Some(&new_dst)) = ( + old_to_new.get(&edge.source()), + old_to_new.get(&edge.target()), + ) { + let include = match link_filter { + Some(lt) => lt.iter().any(|t| t == edge.weight()), + None => true, + }; + if include { + sub.add_edge(new_src, new_dst, edge.weight().clone()); + } + } + } + + sub +} + +/// Apply type and link filters to an already-extracted subgraph. +fn apply_filters_to_graph( + graph: &Graph, + store: &Store, + type_filter: &Option>, + link_filter: &Option>, +) -> Graph { + let mut sub = Graph::new(); + let mut old_to_new: HashMap = HashMap::new(); + + for idx in graph.node_indices() { + let id = &graph[idx]; + let include = match type_filter { + Some(types) => store + .get(id.as_str()) + .map(|a| types.iter().any(|t| t == &a.artifact_type)) + .unwrap_or(false), + None => true, + }; + if include { + let new_idx = sub.add_node(id.clone()); + old_to_new.insert(idx, new_idx); + } + } + + for edge in graph.edge_references() { + if let (Some(&new_src), Some(&new_dst)) = ( + old_to_new.get(&edge.source()), + old_to_new.get(&edge.target()), + ) { + let include = match link_filter { + Some(lt) => lt.iter().any(|t| t == edge.weight()), + None => true, + }; + if include { + sub.add_edge(new_src, new_dst, edge.weight().clone()); + } + } + } + + sub +} + // ── Validation ─────────────────────────────────────────────────────────── async fn validate_view(State(state): State>) -> Html { diff --git a/rivet-core/Cargo.toml b/rivet-core/Cargo.toml index 3dcb4f9..f7efcd7 100644 --- a/rivet-core/Cargo.toml +++ b/rivet-core/Cargo.toml @@ -7,6 +7,11 @@ edition.workspace = true license.workspace = true rust-version.workspace = true +[features] +default = [] +oslc = ["dep:reqwest", "dep:urlencoding"] +wasm = ["dep:wasmtime", "dep:wasmtime-wasi"] + [dependencies] serde = { workspace = true } serde_yaml = { workspace = true } @@ -14,10 +19,21 @@ serde_json = { workspace = true } thiserror = { workspace = true } petgraph = { workspace = true } log = { workspace = true } +quick-xml = { workspace = true } + +# OSLC client (optional, behind "oslc" feature) +reqwest = { workspace = true, optional = true } +urlencoding = { workspace = true, optional = true } + +# WASM runtime (optional, behind "wasm" feature) +wasmtime = { workspace = true, optional = true } +wasmtime-wasi = { workspace = true, optional = true } [dev-dependencies] proptest = "1.5" criterion = { workspace = true } +wiremock = "0.6" +tokio = { workspace = true } [[bench]] name = "core_benchmarks" diff --git a/rivet-core/src/diff.rs b/rivet-core/src/diff.rs new file mode 100644 index 0000000..e68a798 --- /dev/null +++ b/rivet-core/src/diff.rs @@ -0,0 +1,350 @@ +//! Diff engine — compare two artifact stores and produce a structured delta. +//! +//! [`ArtifactDiff`] captures added, removed, modified, and unchanged artifacts +//! between a *base* and a *head* [`Store`]. [`DiagnosticDiff`] does the same +//! for validation diagnostics. + +use std::collections::BTreeSet; + +use crate::model::Link; +use crate::schema::Severity; +use crate::store::Store; +use crate::validate::Diagnostic; + +// ── Artifact-level diff ────────────────────────────────────────────────── + +/// Difference between two artifact sets. +#[derive(Debug)] +pub struct ArtifactDiff { + /// Artifact IDs only present in head. + pub added: Vec, + /// Artifact IDs only present in base. + pub removed: Vec, + /// Artifacts present in both but structurally different. + pub modified: Vec, + /// Count of artifacts that are identical in both stores. + pub unchanged: usize, +} + +/// Per-artifact change record. +#[derive(Debug)] +pub struct ArtifactChange { + pub id: String, + pub title_changed: Option<(String, String)>, + pub description_changed: bool, + pub status_changed: Option<(Option, Option)>, + pub type_changed: Option<(String, String)>, + pub tags_added: Vec, + pub tags_removed: Vec, + pub links_added: Vec, + pub links_removed: Vec, + pub fields_changed: Vec, +} + +impl ArtifactChange { + /// Returns `true` if this change record carries no actual differences. + pub fn is_empty(&self) -> bool { + self.title_changed.is_none() + && !self.description_changed + && self.status_changed.is_none() + && self.type_changed.is_none() + && self.tags_added.is_empty() + && self.tags_removed.is_empty() + && self.links_added.is_empty() + && self.links_removed.is_empty() + && self.fields_changed.is_empty() + } +} + +impl ArtifactDiff { + /// Compare two stores and produce a diff. + pub fn compute(base: &Store, head: &Store) -> Self { + let base_ids: BTreeSet = base.iter().map(|a| a.id.clone()).collect(); + let head_ids: BTreeSet = head.iter().map(|a| a.id.clone()).collect(); + + let added: Vec = head_ids.difference(&base_ids).cloned().collect(); + let removed: Vec = base_ids.difference(&head_ids).cloned().collect(); + + let common: BTreeSet<&String> = base_ids.intersection(&head_ids).collect(); + + let mut modified = Vec::new(); + let mut unchanged: usize = 0; + + for id in &common { + let b = base.get(id).unwrap(); + let h = head.get(id).unwrap(); + + let title_changed = if b.title != h.title { + Some((b.title.clone(), h.title.clone())) + } else { + None + }; + + let description_changed = b.description != h.description; + + let status_changed = if b.status != h.status { + Some((b.status.clone(), h.status.clone())) + } else { + None + }; + + let type_changed = if b.artifact_type != h.artifact_type { + Some((b.artifact_type.clone(), h.artifact_type.clone())) + } else { + None + }; + + // Tags diff (order-insensitive) + let base_tags: BTreeSet<&String> = b.tags.iter().collect(); + let head_tags: BTreeSet<&String> = h.tags.iter().collect(); + let tags_added: Vec = head_tags + .difference(&base_tags) + .map(|s| (*s).clone()) + .collect(); + let tags_removed: Vec = base_tags + .difference(&head_tags) + .map(|s| (*s).clone()) + .collect(); + + // Links diff (order-insensitive) + let base_links: BTreeSet<(&String, &String)> = + b.links.iter().map(|l| (&l.link_type, &l.target)).collect(); + let head_links: BTreeSet<(&String, &String)> = + h.links.iter().map(|l| (&l.link_type, &l.target)).collect(); + let links_added: Vec = head_links + .difference(&base_links) + .map(|(lt, tgt)| Link { + link_type: (*lt).clone(), + target: (*tgt).clone(), + }) + .collect(); + let links_removed: Vec = base_links + .difference(&head_links) + .map(|(lt, tgt)| Link { + link_type: (*lt).clone(), + target: (*tgt).clone(), + }) + .collect(); + + // Fields diff — compare serialised YAML values for equality + let mut fields_changed = Vec::new(); + let all_field_keys: BTreeSet<&String> = + b.fields.keys().chain(h.fields.keys()).collect(); + for key in all_field_keys { + let bv = b.fields.get(key); + let hv = h.fields.get(key); + if bv != hv { + fields_changed.push(key.clone()); + } + } + + let change = ArtifactChange { + id: (*id).clone(), + title_changed, + description_changed, + status_changed, + type_changed, + tags_added, + tags_removed, + links_added, + links_removed, + fields_changed, + }; + + if change.is_empty() { + unchanged += 1; + } else { + modified.push(change); + } + } + + Self { + added, + removed, + modified, + unchanged, + } + } + + /// Returns `true` when there are no differences at all. + pub fn is_empty(&self) -> bool { + self.added.is_empty() && self.removed.is_empty() && self.modified.is_empty() + } + + /// Human-readable one-line summary. + pub fn summary(&self) -> String { + format!( + "{} added, {} removed, {} modified, {} unchanged", + self.added.len(), + self.removed.len(), + self.modified.len(), + self.unchanged, + ) + } +} + +// ── Diagnostic-level diff ──────────────────────────────────────────────── + +/// Difference in validation diagnostics between two versions. +#[derive(Debug)] +pub struct DiagnosticDiff { + pub new_errors: Vec, + pub resolved_errors: Vec, + pub new_warnings: Vec, + pub resolved_warnings: Vec, +} + +impl DiagnosticDiff { + /// Compare two diagnostic sets. + pub fn compute(base: &[Diagnostic], head: &[Diagnostic]) -> Self { + let base_errors: Vec<&Diagnostic> = base + .iter() + .filter(|d| d.severity == Severity::Error) + .collect(); + let head_errors: Vec<&Diagnostic> = head + .iter() + .filter(|d| d.severity == Severity::Error) + .collect(); + let base_warnings: Vec<&Diagnostic> = base + .iter() + .filter(|d| d.severity == Severity::Warning) + .collect(); + let head_warnings: Vec<&Diagnostic> = head + .iter() + .filter(|d| d.severity == Severity::Warning) + .collect(); + + let new_errors = head_errors + .iter() + .filter(|d| !base_errors.contains(d)) + .cloned() + .cloned() + .collect(); + let resolved_errors = base_errors + .iter() + .filter(|d| !head_errors.contains(d)) + .cloned() + .cloned() + .collect(); + let new_warnings = head_warnings + .iter() + .filter(|d| !base_warnings.contains(d)) + .cloned() + .cloned() + .collect(); + let resolved_warnings = base_warnings + .iter() + .filter(|d| !head_warnings.contains(d)) + .cloned() + .cloned() + .collect(); + + Self { + new_errors, + resolved_errors, + new_warnings, + resolved_warnings, + } + } + + /// Returns `true` when there is no diagnostic change. + pub fn is_empty(&self) -> bool { + self.new_errors.is_empty() + && self.resolved_errors.is_empty() + && self.new_warnings.is_empty() + && self.resolved_warnings.is_empty() + } + + /// Human-readable one-line summary. + pub fn summary(&self) -> String { + format!( + "{} new errors, {} resolved errors, {} new warnings, {} resolved warnings", + self.new_errors.len(), + self.resolved_errors.len(), + self.new_warnings.len(), + self.resolved_warnings.len(), + ) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::model::Artifact; + use std::collections::BTreeMap; + + fn make_artifact(id: &str, art_type: &str, title: &str) -> Artifact { + Artifact { + id: id.into(), + artifact_type: art_type.into(), + title: title.into(), + description: None, + status: None, + tags: vec![], + links: vec![], + fields: BTreeMap::new(), + source_file: None, + } + } + + #[test] + fn empty_diff() { + let a = Store::new(); + let b = Store::new(); + let diff = ArtifactDiff::compute(&a, &b); + assert!(diff.is_empty()); + assert_eq!(diff.unchanged, 0); + } + + #[test] + fn identical_stores() { + let mut a = Store::new(); + a.insert(make_artifact("X-1", "loss", "Loss one")).unwrap(); + let mut b = Store::new(); + b.insert(make_artifact("X-1", "loss", "Loss one")).unwrap(); + let diff = ArtifactDiff::compute(&a, &b); + assert!(diff.is_empty()); + assert_eq!(diff.unchanged, 1); + } + + #[test] + fn added_artifact() { + let base = Store::new(); + let mut head = Store::new(); + head.insert(make_artifact("N-1", "loss", "New loss")) + .unwrap(); + let diff = ArtifactDiff::compute(&base, &head); + assert_eq!(diff.added, vec!["N-1".to_string()]); + assert!(diff.removed.is_empty()); + assert!(diff.modified.is_empty()); + } + + #[test] + fn removed_artifact() { + let mut base = Store::new(); + base.insert(make_artifact("R-1", "loss", "Old loss")) + .unwrap(); + let head = Store::new(); + let diff = ArtifactDiff::compute(&base, &head); + assert!(diff.added.is_empty()); + assert_eq!(diff.removed, vec!["R-1".to_string()]); + } + + #[test] + fn modified_title() { + let mut base = Store::new(); + base.insert(make_artifact("M-1", "loss", "Old title")) + .unwrap(); + let mut head = Store::new(); + head.insert(make_artifact("M-1", "loss", "New title")) + .unwrap(); + let diff = ArtifactDiff::compute(&base, &head); + assert!(!diff.is_empty()); + assert_eq!(diff.modified.len(), 1); + let change = &diff.modified[0]; + assert_eq!( + change.title_changed, + Some(("Old title".into(), "New title".into())) + ); + } +} diff --git a/rivet-core/src/formats/stpa.rs b/rivet-core/src/formats/stpa.rs index 0c3e965..64494e7 100644 --- a/rivet-core/src/formats/stpa.rs +++ b/rivet-core/src/formats/stpa.rs @@ -30,6 +30,7 @@ impl StpaYamlAdapter { "control-action".into(), "uca".into(), "controller-constraint".into(), + "loss-scenario".into(), ], } } @@ -83,6 +84,7 @@ pub fn import_stpa_directory(dir: &Path) -> Result, Error> { ("control-structure.yaml", parse_control_structure), ("ucas.yaml", parse_ucas), ("controller-constraints.yaml", parse_controller_constraints), + ("loss-scenarios.yaml", parse_loss_scenarios), ]; for (filename, parser) in file_parsers { @@ -118,6 +120,7 @@ pub fn import_stpa_file(path: &Path) -> Result, Error> { "control-structure.yaml" => parse_control_structure, "ucas.yaml" => parse_ucas, "controller-constraints.yaml" => parse_controller_constraints, + "loss-scenarios.yaml" => parse_loss_scenarios, _ => { return Err(Error::Adapter(format!( "unknown STPA file type: {}", @@ -609,6 +612,93 @@ fn parse_controller_constraints(path: &Path) -> Result, Error> { .collect()) } +// ── Loss scenarios ──────────────────────────────────────────────────────── + +#[derive(Deserialize)] +struct LossScenariosFile { + #[serde(rename = "loss-scenarios")] + loss_scenarios: Vec, +} + +#[derive(Deserialize)] +struct StpaLossScenario { + id: String, + title: String, + #[serde(default)] + uca: Option, + #[serde(default, rename = "type")] + scenario_type: Option, + #[serde(default)] + scenario: Option, + #[serde(default, rename = "causal-factors")] + causal_factors: Vec, + #[serde(default)] + hazards: Vec, + #[serde(default, rename = "process-model-flaw")] + process_model_flaw: Option, +} + +fn parse_loss_scenarios(path: &Path) -> Result, Error> { + let content = read_file(path)?; + let file: LossScenariosFile = serde_yaml::from_str(&content)?; + + Ok(file + .loss_scenarios + .into_iter() + .map(|ls| { + let mut fields = BTreeMap::new(); + if let Some(st) = &ls.scenario_type { + fields.insert( + "scenario-type".into(), + serde_yaml::Value::String(st.clone()), + ); + } + if !ls.causal_factors.is_empty() { + fields.insert( + "causal-factors".into(), + serde_yaml::to_value(&ls.causal_factors).unwrap(), + ); + } + if let Some(flaw) = &ls.process_model_flaw { + fields.insert( + "process-model-flaw".into(), + serde_yaml::Value::String(flaw.clone()), + ); + } + + let mut links = Vec::new(); + + // Link to the UCA that causes this scenario + if let Some(uca) = &ls.uca { + links.push(Link { + link_type: "caused-by-uca".into(), + target: uca.clone(), + }); + } + + // Link to hazards this scenario leads to + for hazard in &ls.hazards { + links.push(Link { + link_type: "leads-to-hazard".into(), + target: hazard.clone(), + }); + } + + Artifact { + id: ls.id, + artifact_type: "loss-scenario".into(), + title: ls.title, + description: ls.scenario, + status: None, + tags: vec![], + links, + fields, + source_file: None, + } + }) + .collect()) +} + // ── Helpers ────────────────────────────────────────────────────────────── fn read_file(path: &Path) -> Result { diff --git a/rivet-core/src/lib.rs b/rivet-core/src/lib.rs index dd21937..450f4d7 100644 --- a/rivet-core/src/lib.rs +++ b/rivet-core/src/lib.rs @@ -1,14 +1,21 @@ pub mod adapter; +pub mod diff; pub mod error; pub mod formats; pub mod links; pub mod matrix; pub mod model; +#[cfg(feature = "oslc")] +pub mod oslc; pub mod query; +pub mod reqif; pub mod schema; pub mod store; pub mod validate; +#[cfg(feature = "wasm")] +pub mod wasm_runtime; + use std::path::Path; use error::Error; @@ -66,6 +73,10 @@ pub fn load_artifacts( let adapter = formats::generic::GenericYamlAdapter::new(); adapter::Adapter::import(&adapter, &source_input, &adapter_config) } + "reqif" => { + let adapter = reqif::ReqIfAdapter::new(); + adapter::Adapter::import(&adapter, &source_input, &adapter_config) + } other => Err(Error::Adapter(format!("unknown format: {}", other))), } } diff --git a/rivet-core/src/links.rs b/rivet-core/src/links.rs index 7362a7a..cec5c2f 100644 --- a/rivet-core/src/links.rs +++ b/rivet-core/src/links.rs @@ -40,7 +40,7 @@ pub struct LinkGraph { /// petgraph directed graph for structural analysis. graph: DiGraph, /// Map from artifact ID to petgraph node index (used for graph lookups). - _node_map: HashMap, + node_map: HashMap, } impl LinkGraph { @@ -100,10 +100,20 @@ impl LinkGraph { backward, broken, graph, - _node_map: node_map, + node_map, } } + /// Access the underlying petgraph directed graph. + pub fn graph(&self) -> &DiGraph { + &self.graph + } + + /// Access the mapping from artifact ID to petgraph node index. + pub fn node_map(&self) -> &HashMap { + &self.node_map + } + /// Get forward links from an artifact. pub fn links_from(&self, id: &str) -> &[ResolvedLink] { self.forward.get(id).map(|v| v.as_slice()).unwrap_or(&[]) diff --git a/rivet-core/src/matrix.rs b/rivet-core/src/matrix.rs index 3767d92..57abe97 100644 --- a/rivet-core/src/matrix.rs +++ b/rivet-core/src/matrix.rs @@ -1,7 +1,7 @@ //! Traceability matrix generation. //! //! Computes coverage between two artifact types via a specified link type. -//! For example: "which sw-reqs are verified by sw-qual-tests?" +//! For example: "which sw-reqs are verified by sw-verification measures?" use crate::links::LinkGraph; use crate::store::Store; diff --git a/rivet-core/src/oslc.rs b/rivet-core/src/oslc.rs new file mode 100644 index 0000000..3facc98 --- /dev/null +++ b/rivet-core/src/oslc.rs @@ -0,0 +1,1872 @@ +//! OSLC (Open Services for Lifecycle Collaboration) client module. +//! +//! This module provides a client for interacting with ALM tools that implement +//! the OSLC standard, such as IBM DOORS, Polarion, and codebeamer. OSLC is a +//! set of REST-based specifications that define how lifecycle tools expose and +//! consume resources using Linked Data principles (RDF, JSON-LD). +//! +//! # OSLC Protocol Overview +//! +//! OSLC defines domain-specific resource shapes and a discovery mechanism: +//! +//! 1. **Service Provider Catalog** — A root document (typically at a well-known +//! URL) that lists all available service providers. Each provider represents +//! a project or area that offers creation/query capabilities. +//! +//! 2. **Service Provider** — Describes the services offered for a particular +//! project, including query capabilities and creation factories for each +//! resource type. +//! +//! 3. **Resources** — Typed RDF resources identified by URI. OSLC defines +//! domain specifications: +//! - **oslc_rm** — Requirements Management (Requirement, RequirementCollection) +//! - **oslc_qm** — Quality Management (TestCase, TestResult, TestPlan) +//! - **oslc_cm** — Change Management (ChangeRequest) +//! +//! 4. **Query** — OSLC Query Syntax allows filtering and selecting resource +//! properties: `oslc.where=dcterms:identifier="REQ-001"&oslc.select=dcterms:title` +//! +//! # Architecture +//! +//! The module is structured in layers: +//! - Resource types (`OslcRequirement`, `OslcTestCase`, etc.) map OSLC RDF +//! types to Rust structs using JSON-LD serialization. +//! - `OslcClient` handles HTTP communication, service provider catalog +//! discovery, and CRUD operations. +//! - `OslcSyncAdapter` provides bidirectional sync by converting between OSLC +//! resources and Rivet's internal `Artifact` model. +//! +//! # Feature Flag +//! +//! This module is gated behind the `oslc` feature flag because it introduces +//! a dependency on `reqwest` for HTTP communication. Enable it with: +//! +//! ```toml +//! rivet-core = { version = "0.1", features = ["oslc"] } +//! ``` + +use std::collections::BTreeMap; +use std::fmt; + +use serde::{Deserialize, Serialize}; + +use crate::error::Error; +use crate::model::{Artifact, ArtifactId, Link}; + +// --------------------------------------------------------------------------- +// OSLC namespace constants +// --------------------------------------------------------------------------- + +/// RDF type URI for OSLC Requirements Management Requirement. +pub const OSLC_RM_REQUIREMENT: &str = "http://open-services.net/ns/rm#Requirement"; + +/// RDF type URI for OSLC Quality Management TestCase. +pub const OSLC_QM_TEST_CASE: &str = "http://open-services.net/ns/qm#TestCase"; + +/// RDF type URI for OSLC Quality Management TestResult. +pub const OSLC_QM_TEST_RESULT: &str = "http://open-services.net/ns/qm#TestResult"; + +/// RDF type URI for OSLC Change Management ChangeRequest. +pub const OSLC_CM_CHANGE_REQUEST: &str = "http://open-services.net/ns/cm#ChangeRequest"; + +/// Dublin Core Terms namespace (used for standard properties like title, identifier). +pub const DCTERMS_NS: &str = "http://purl.org/dc/terms/"; + +/// OSLC core namespace. +pub const OSLC_NS: &str = "http://open-services.net/ns/core#"; + +// --------------------------------------------------------------------------- +// OSLC Resource Types +// --------------------------------------------------------------------------- + +/// Represents the kind of OSLC resource, mapping to the four supported +/// domain specifications. +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] +pub enum OslcResourceType { + /// oslc_rm:Requirement — a requirements management artifact. + Requirement, + /// oslc_qm:TestCase — a quality management test case definition. + TestCase, + /// oslc_qm:TestResult — the result of executing a test case. + TestResult, + /// oslc_cm:ChangeRequest — a change or defect tracking artifact. + ChangeRequest, +} + +impl OslcResourceType { + /// Return the RDF type URI for this resource type. + pub fn rdf_type(&self) -> &'static str { + match self { + Self::Requirement => OSLC_RM_REQUIREMENT, + Self::TestCase => OSLC_QM_TEST_CASE, + Self::TestResult => OSLC_QM_TEST_RESULT, + Self::ChangeRequest => OSLC_CM_CHANGE_REQUEST, + } + } + + /// Parse an RDF type URI into an `OslcResourceType`, if recognized. + pub fn from_rdf_type(uri: &str) -> Option { + match uri { + OSLC_RM_REQUIREMENT => Some(Self::Requirement), + OSLC_QM_TEST_CASE => Some(Self::TestCase), + OSLC_QM_TEST_RESULT => Some(Self::TestResult), + OSLC_CM_CHANGE_REQUEST => Some(Self::ChangeRequest), + _ => None, + } + } + + /// Map this OSLC resource type to a Rivet artifact type name. + pub fn to_artifact_type(&self) -> &'static str { + match self { + Self::Requirement => "requirement", + Self::TestCase => "test-case", + Self::TestResult => "test-result", + Self::ChangeRequest => "change-request", + } + } + + /// Try to map a Rivet artifact type name to an OSLC resource type. + pub fn from_artifact_type(artifact_type: &str) -> Option { + match artifact_type { + "requirement" | "req" | "SWREQ" | "SYSREQ" => Some(Self::Requirement), + "test-case" | "test_case" | "testcase" => Some(Self::TestCase), + "test-result" | "test_result" | "testresult" => Some(Self::TestResult), + "change-request" | "change_request" | "changerequest" | "defect" | "bug" => { + Some(Self::ChangeRequest) + } + _ => None, + } + } +} + +impl fmt::Display for OslcResourceType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.rdf_type()) + } +} + +/// An OSLC Requirement (oslc_rm:Requirement). +/// +/// Represents a requirement in the OSLC Requirements Management domain. +/// Properties follow Dublin Core Terms and the OSLC RM vocabulary. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct OslcRequirement { + /// The resource URI (JSON-LD `@id`). + #[serde(rename = "@id", default, skip_serializing_if = "Option::is_none")] + pub about: Option, + + /// The RDF type URI (JSON-LD `@type`). + #[serde(rename = "@type", default)] + pub rdf_type: Vec, + + /// dcterms:identifier — external unique identifier. + #[serde( + rename = "dcterms:identifier", + default, + skip_serializing_if = "Option::is_none" + )] + pub identifier: Option, + + /// dcterms:title — human-readable title. + #[serde( + rename = "dcterms:title", + default, + skip_serializing_if = "Option::is_none" + )] + pub title: Option, + + /// dcterms:description — detailed description. + #[serde( + rename = "dcterms:description", + default, + skip_serializing_if = "Option::is_none" + )] + pub description: Option, + + /// oslc_rm:elaboratedBy — links to elaborating resources. + #[serde( + rename = "oslc_rm:elaboratedBy", + default, + skip_serializing_if = "Vec::is_empty" + )] + pub elaborated_by: Vec, + + /// oslc_rm:satisfiedBy — links to satisfying resources. + #[serde( + rename = "oslc_rm:satisfiedBy", + default, + skip_serializing_if = "Vec::is_empty" + )] + pub satisfied_by: Vec, + + /// oslc_rm:trackedBy — links to tracking resources (e.g., change requests). + #[serde( + rename = "oslc_rm:trackedBy", + default, + skip_serializing_if = "Vec::is_empty" + )] + pub tracked_by: Vec, + + /// Additional properties not explicitly modeled. + #[serde(flatten)] + pub extra: BTreeMap, +} + +/// An OSLC TestCase (oslc_qm:TestCase). +/// +/// Represents a test case definition in the OSLC Quality Management domain. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct OslcTestCase { + /// The resource URI (JSON-LD `@id`). + #[serde(rename = "@id", default, skip_serializing_if = "Option::is_none")] + pub about: Option, + + /// The RDF type URI (JSON-LD `@type`). + #[serde(rename = "@type", default)] + pub rdf_type: Vec, + + /// dcterms:identifier — external unique identifier. + #[serde( + rename = "dcterms:identifier", + default, + skip_serializing_if = "Option::is_none" + )] + pub identifier: Option, + + /// dcterms:title — human-readable title. + #[serde( + rename = "dcterms:title", + default, + skip_serializing_if = "Option::is_none" + )] + pub title: Option, + + /// dcterms:description — detailed description of the test case. + #[serde( + rename = "dcterms:description", + default, + skip_serializing_if = "Option::is_none" + )] + pub description: Option, + + /// oslc_qm:validatesRequirement — links to requirements validated by this test. + #[serde( + rename = "oslc_qm:validatesRequirement", + default, + skip_serializing_if = "Vec::is_empty" + )] + pub validates_requirement: Vec, + + /// Additional properties not explicitly modeled. + #[serde(flatten)] + pub extra: BTreeMap, +} + +/// An OSLC TestResult (oslc_qm:TestResult). +/// +/// Represents the outcome of a test execution in the OSLC Quality Management domain. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct OslcTestResult { + /// The resource URI (JSON-LD `@id`). + #[serde(rename = "@id", default, skip_serializing_if = "Option::is_none")] + pub about: Option, + + /// The RDF type URI (JSON-LD `@type`). + #[serde(rename = "@type", default)] + pub rdf_type: Vec, + + /// dcterms:identifier — external unique identifier. + #[serde( + rename = "dcterms:identifier", + default, + skip_serializing_if = "Option::is_none" + )] + pub identifier: Option, + + /// dcterms:title — human-readable title. + #[serde( + rename = "dcterms:title", + default, + skip_serializing_if = "Option::is_none" + )] + pub title: Option, + + /// oslc_qm:status — the test execution status (passed, failed, etc.). + #[serde( + rename = "oslc_qm:status", + default, + skip_serializing_if = "Option::is_none" + )] + pub status: Option, + + /// oslc_qm:reportsOnTestCase — link to the test case this result is for. + #[serde( + rename = "oslc_qm:reportsOnTestCase", + default, + skip_serializing_if = "Option::is_none" + )] + pub reports_on_test_case: Option, + + /// Additional properties not explicitly modeled. + #[serde(flatten)] + pub extra: BTreeMap, +} + +/// An OSLC ChangeRequest (oslc_cm:ChangeRequest). +/// +/// Represents a change request, defect, or work item in the OSLC Change +/// Management domain. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct OslcChangeRequest { + /// The resource URI (JSON-LD `@id`). + #[serde(rename = "@id", default, skip_serializing_if = "Option::is_none")] + pub about: Option, + + /// The RDF type URI (JSON-LD `@type`). + #[serde(rename = "@type", default)] + pub rdf_type: Vec, + + /// dcterms:identifier — external unique identifier. + #[serde( + rename = "dcterms:identifier", + default, + skip_serializing_if = "Option::is_none" + )] + pub identifier: Option, + + /// dcterms:title — human-readable title. + #[serde( + rename = "dcterms:title", + default, + skip_serializing_if = "Option::is_none" + )] + pub title: Option, + + /// dcterms:description — detailed description of the change. + #[serde( + rename = "dcterms:description", + default, + skip_serializing_if = "Option::is_none" + )] + pub description: Option, + + /// oslc_cm:status — the lifecycle status of the change request. + #[serde( + rename = "oslc_cm:status", + default, + skip_serializing_if = "Option::is_none" + )] + pub status: Option, + + /// oslc_cm:implementsRequirement — links to requirements implemented by this change. + #[serde( + rename = "oslc_cm:implementsRequirement", + default, + skip_serializing_if = "Vec::is_empty" + )] + pub implements_requirement: Vec, + + /// oslc_cm:affectsRequirement — links to requirements affected by this change. + #[serde( + rename = "oslc_cm:affectsRequirement", + default, + skip_serializing_if = "Vec::is_empty" + )] + pub affects_requirement: Vec, + + /// Additional properties not explicitly modeled. + #[serde(flatten)] + pub extra: BTreeMap, +} + +// --------------------------------------------------------------------------- +// OSLC Link (resource reference) +// --------------------------------------------------------------------------- + +/// A typed link to another OSLC resource, represented as `{ "@id": "..." }` in JSON-LD. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +pub struct OslcLink { + /// The URI of the linked resource. + #[serde(rename = "@id")] + pub href: String, +} + +impl OslcLink { + /// Create a new OSLC link pointing to the given URI. + pub fn new(href: impl Into) -> Self { + Self { href: href.into() } + } +} + +// --------------------------------------------------------------------------- +// Unified OSLC Resource envelope +// --------------------------------------------------------------------------- + +/// A type-erased OSLC resource for use in generic operations. +/// +/// This enum wraps the domain-specific resource types so that mapping and +/// sync code can work uniformly across resource kinds. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(tag = "@type")] +pub enum OslcResource { + /// An `oslc_rm:Requirement`. + Requirement(OslcRequirement), + /// An `oslc_qm:TestCase`. + TestCase(OslcTestCase), + /// An `oslc_qm:TestResult`. + TestResult(OslcTestResult), + /// An `oslc_cm:ChangeRequest`. + ChangeRequest(OslcChangeRequest), +} + +impl OslcResource { + /// Return the resource type of this OSLC resource. + pub fn resource_type(&self) -> OslcResourceType { + match self { + Self::Requirement(_) => OslcResourceType::Requirement, + Self::TestCase(_) => OslcResourceType::TestCase, + Self::TestResult(_) => OslcResourceType::TestResult, + Self::ChangeRequest(_) => OslcResourceType::ChangeRequest, + } + } + + /// Return the `@id` (about URI) of this resource, if set. + pub fn about(&self) -> Option<&str> { + match self { + Self::Requirement(r) => r.about.as_deref(), + Self::TestCase(r) => r.about.as_deref(), + Self::TestResult(r) => r.about.as_deref(), + Self::ChangeRequest(r) => r.about.as_deref(), + } + } + + /// Return the identifier (`dcterms:identifier`) of this resource, if set. + pub fn identifier(&self) -> Option<&str> { + match self { + Self::Requirement(r) => r.identifier.as_deref(), + Self::TestCase(r) => r.identifier.as_deref(), + Self::TestResult(r) => r.identifier.as_deref(), + Self::ChangeRequest(r) => r.identifier.as_deref(), + } + } + + /// Return the title (`dcterms:title`) of this resource, if set. + pub fn title(&self) -> Option<&str> { + match self { + Self::Requirement(r) => r.title.as_deref(), + Self::TestCase(r) => r.title.as_deref(), + Self::TestResult(r) => r.title.as_deref(), + Self::ChangeRequest(r) => r.title.as_deref(), + } + } +} + +// --------------------------------------------------------------------------- +// Service Provider Catalog discovery types +// --------------------------------------------------------------------------- + +/// An OSLC Service Provider Catalog. +/// +/// The catalog is the entry point for OSLC discovery. A client fetches the +/// catalog from a well-known URL and walks it to find service providers, +/// their query capabilities, and creation factories. +/// +/// Catalogs can be nested — a top-level catalog may reference sub-catalogs +/// for different project areas. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ServiceProviderCatalog { + /// The URI of this catalog. + #[serde(rename = "@id", default, skip_serializing_if = "Option::is_none")] + pub about: Option, + + /// dcterms:title — human-readable name of this catalog. + #[serde( + rename = "dcterms:title", + default, + skip_serializing_if = "Option::is_none" + )] + pub title: Option, + + /// dcterms:description — description of this catalog. + #[serde( + rename = "dcterms:description", + default, + skip_serializing_if = "Option::is_none" + )] + pub description: Option, + + /// Nested sub-catalogs. + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub referenced_catalogs: Vec, + + /// Service providers listed in this catalog. + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub service_providers: Vec, +} + +/// An OSLC Service Provider. +/// +/// A service provider represents a project or area within an ALM tool that +/// offers OSLC services. Each provider has one or more `Service` entries +/// that describe what operations are available. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ServiceProvider { + /// The URI of this service provider. + #[serde(rename = "@id", default, skip_serializing_if = "Option::is_none")] + pub about: Option, + + /// dcterms:title — project/area name. + #[serde( + rename = "dcterms:title", + default, + skip_serializing_if = "Option::is_none" + )] + pub title: Option, + + /// Services offered by this provider. + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub services: Vec, +} + +/// An OSLC Service describing available capabilities. +/// +/// Each service groups query capabilities and creation factories for a +/// particular OSLC domain (RM, QM, CM). +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Service { + /// The OSLC domain this service belongs to (e.g., the RM, QM, or CM namespace URI). + #[serde(default, skip_serializing_if = "Option::is_none")] + pub domain: Option, + + /// Query capabilities offered by this service. + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub query_capabilities: Vec, + + /// Creation factories offered by this service. + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub creation_factories: Vec, +} + +/// An OSLC Query Capability — describes a URL that supports OSLC Query Syntax. +/// +/// Clients POST queries to the `query_base` URL using parameters like +/// `oslc.where` and `oslc.select` to retrieve filtered sets of resources. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct QueryCapability { + /// dcterms:title — human-readable name (e.g., "Requirement Query Capability"). + #[serde( + rename = "dcterms:title", + default, + skip_serializing_if = "Option::is_none" + )] + pub title: Option, + + /// The base URL for OSLC queries. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub query_base: Option, + + /// The RDF types of resources returned by this query capability. + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub resource_types: Vec, +} + +/// An OSLC Creation Factory — describes a URL that accepts POST to create resources. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CreationFactory { + /// dcterms:title — human-readable name. + #[serde( + rename = "dcterms:title", + default, + skip_serializing_if = "Option::is_none" + )] + pub title: Option, + + /// The URL to POST new resources to. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub creation: Option, + + /// The RDF types of resources this factory can create. + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub resource_types: Vec, +} + +// --------------------------------------------------------------------------- +// OSLC Query response +// --------------------------------------------------------------------------- + +/// An OSLC query response page. +/// +/// OSLC query results are returned as paged collections. Each page contains +/// a slice of member resources and an optional link to the next page. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct QueryResponse { + /// Total count of matching resources, if the server supports it. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub total_count: Option, + + /// URI of the next page, if more results are available. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub next_page: Option, + + /// The member resources returned on this page (as raw JSON-LD values). + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub members: Vec, +} + +// --------------------------------------------------------------------------- +// Mapping functions: OSLC <-> Artifact +// --------------------------------------------------------------------------- + +/// Convert an OSLC resource into a Rivet [`Artifact`]. +/// +/// This extracts the identifier, title, description, and links from the OSLC +/// resource and maps them into the Rivet data model. OSLC link relations are +/// translated to Rivet link types based on the OSLC property name. +/// +/// # Errors +/// +/// Returns an error if the resource lacks a `dcterms:identifier` (no way to +/// assign an artifact ID). +pub fn oslc_to_artifact(resource: &OslcResource) -> Result { + let resource_type = resource.resource_type(); + let artifact_type = resource_type.to_artifact_type().to_string(); + + let id: ArtifactId = resource + .identifier() + .ok_or_else(|| Error::Adapter("OSLC resource missing dcterms:identifier".to_string()))? + .to_string(); + + let title = resource.title().unwrap_or("(untitled)").to_string(); + + let (description, status, links, fields) = match resource { + OslcResource::Requirement(r) => { + let mut link_list = Vec::new(); + for link in &r.elaborated_by { + link_list.push(Link { + link_type: "elaborated-by".to_string(), + target: extract_link_target(&link.href), + }); + } + for link in &r.satisfied_by { + link_list.push(Link { + link_type: "satisfied-by".to_string(), + target: extract_link_target(&link.href), + }); + } + for link in &r.tracked_by { + link_list.push(Link { + link_type: "tracked-by".to_string(), + target: extract_link_target(&link.href), + }); + } + (r.description.clone(), None, link_list, BTreeMap::new()) + } + OslcResource::TestCase(r) => { + let mut link_list = Vec::new(); + for link in &r.validates_requirement { + link_list.push(Link { + link_type: "validates".to_string(), + target: extract_link_target(&link.href), + }); + } + (r.description.clone(), None, link_list, BTreeMap::new()) + } + OslcResource::TestResult(r) => { + let mut link_list = Vec::new(); + if let Some(link) = &r.reports_on_test_case { + link_list.push(Link { + link_type: "reports-on".to_string(), + target: extract_link_target(&link.href), + }); + } + (None, r.status.clone(), link_list, BTreeMap::new()) + } + OslcResource::ChangeRequest(r) => { + let mut link_list = Vec::new(); + for link in &r.implements_requirement { + link_list.push(Link { + link_type: "implements".to_string(), + target: extract_link_target(&link.href), + }); + } + for link in &r.affects_requirement { + link_list.push(Link { + link_type: "affects".to_string(), + target: extract_link_target(&link.href), + }); + } + ( + r.description.clone(), + r.status.clone(), + link_list, + BTreeMap::new(), + ) + } + }; + + Ok(Artifact { + id, + artifact_type, + title, + description, + status, + tags: Vec::new(), + links, + fields, + source_file: None, + }) +} + +/// Convert a Rivet [`Artifact`] into an OSLC resource. +/// +/// The artifact's `artifact_type` is used to determine the OSLC resource type. +/// Links are mapped to OSLC link properties where the link type name matches +/// a known OSLC relation; unrecognized link types are ignored. +/// +/// # Errors +/// +/// Returns an error if the artifact type cannot be mapped to a known OSLC +/// resource type. +pub fn artifact_to_oslc(artifact: &Artifact) -> Result { + let resource_type = + OslcResourceType::from_artifact_type(&artifact.artifact_type).ok_or_else(|| { + Error::Adapter(format!( + "cannot map artifact type '{}' to an OSLC resource type", + artifact.artifact_type + )) + })?; + + match resource_type { + OslcResourceType::Requirement => { + let mut req = OslcRequirement { + about: None, + rdf_type: vec![OSLC_RM_REQUIREMENT.to_string()], + identifier: Some(artifact.id.clone()), + title: Some(artifact.title.clone()), + description: artifact.description.clone(), + elaborated_by: Vec::new(), + satisfied_by: Vec::new(), + tracked_by: Vec::new(), + extra: BTreeMap::new(), + }; + for link in &artifact.links { + let oslc_link = OslcLink::new(&link.target); + match link.link_type.as_str() { + "elaborated-by" => req.elaborated_by.push(oslc_link), + "satisfied-by" => req.satisfied_by.push(oslc_link), + "tracked-by" => req.tracked_by.push(oslc_link), + _ => {} // Ignore unmapped link types + } + } + Ok(OslcResource::Requirement(req)) + } + OslcResourceType::TestCase => { + let mut tc = OslcTestCase { + about: None, + rdf_type: vec![OSLC_QM_TEST_CASE.to_string()], + identifier: Some(artifact.id.clone()), + title: Some(artifact.title.clone()), + description: artifact.description.clone(), + validates_requirement: Vec::new(), + extra: BTreeMap::new(), + }; + for link in &artifact.links { + if link.link_type == "validates" { + tc.validates_requirement.push(OslcLink::new(&link.target)); + } + } + Ok(OslcResource::TestCase(tc)) + } + OslcResourceType::TestResult => { + let mut tr = OslcTestResult { + about: None, + rdf_type: vec![OSLC_QM_TEST_RESULT.to_string()], + identifier: Some(artifact.id.clone()), + title: Some(artifact.title.clone()), + status: artifact.status.clone(), + reports_on_test_case: None, + extra: BTreeMap::new(), + }; + for link in &artifact.links { + if link.link_type == "reports-on" { + tr.reports_on_test_case = Some(OslcLink::new(&link.target)); + break; // Only one reports-on link makes sense + } + } + Ok(OslcResource::TestResult(tr)) + } + OslcResourceType::ChangeRequest => { + let mut cr = OslcChangeRequest { + about: None, + rdf_type: vec![OSLC_CM_CHANGE_REQUEST.to_string()], + identifier: Some(artifact.id.clone()), + title: Some(artifact.title.clone()), + description: artifact.description.clone(), + status: artifact.status.clone(), + implements_requirement: Vec::new(), + affects_requirement: Vec::new(), + extra: BTreeMap::new(), + }; + for link in &artifact.links { + let oslc_link = OslcLink::new(&link.target); + match link.link_type.as_str() { + "implements" => cr.implements_requirement.push(oslc_link), + "affects" => cr.affects_requirement.push(oslc_link), + _ => {} + } + } + Ok(OslcResource::ChangeRequest(cr)) + } + } +} + +/// Extract a short artifact-id-like target from an OSLC link URI. +/// +/// If the href is a full URL, this extracts the last path segment as the +/// identifier. If it is already a bare identifier, it is returned as-is. +fn extract_link_target(href: &str) -> ArtifactId { + // Try to extract the last path segment from a URL + if let Some(last) = href.rsplit('/').next() { + if !last.is_empty() { + return last.to_string(); + } + } + href.to_string() +} + +// --------------------------------------------------------------------------- +// Sync diff types +// --------------------------------------------------------------------------- + +/// Describes the difference between local and remote artifact sets. +/// +/// Used by `OslcSyncAdapter::diff` to determine which artifacts need to be +/// created, updated, or deleted in either direction during bidirectional sync. +#[derive(Debug, Clone, Default)] +pub struct SyncDiff { + /// Artifacts that exist remotely but not locally — should be pulled. + pub remote_only: Vec, + + /// Artifacts that exist locally but not remotely — should be pushed. + pub local_only: Vec, + + /// Artifacts that exist in both but have differences. + pub modified: Vec, + + /// Artifacts that are identical on both sides. + pub unchanged: Vec, +} + +impl SyncDiff { + /// Returns `true` if there are no differences between local and remote. + pub fn is_empty(&self) -> bool { + self.remote_only.is_empty() && self.local_only.is_empty() && self.modified.is_empty() + } + + /// Total number of artifacts that differ between local and remote. + pub fn diff_count(&self) -> usize { + self.remote_only.len() + self.local_only.len() + self.modified.len() + } +} + +/// Compute the synchronization diff between a local and remote set of artifacts. +/// +/// Comparison is based on artifact IDs. Two artifacts with the same ID are +/// considered "modified" if their titles or descriptions differ. +pub fn compute_diff(local: &[Artifact], remote: &[Artifact]) -> SyncDiff { + let local_map: BTreeMap<&str, &Artifact> = local.iter().map(|a| (a.id.as_str(), a)).collect(); + let remote_map: BTreeMap<&str, &Artifact> = remote.iter().map(|a| (a.id.as_str(), a)).collect(); + + let mut diff = SyncDiff::default(); + + // Find remote-only and modified/unchanged + for (id, remote_artifact) in &remote_map { + if let Some(local_artifact) = local_map.get(id) { + if artifacts_differ(local_artifact, remote_artifact) { + diff.modified.push((*id).to_string()); + } else { + diff.unchanged.push((*id).to_string()); + } + } else { + diff.remote_only.push((*id).to_string()); + } + } + + // Find local-only + for id in local_map.keys() { + if !remote_map.contains_key(id) { + diff.local_only.push((*id).to_string()); + } + } + + diff +} + +/// Check whether two artifacts with the same ID have meaningful differences. +fn artifacts_differ(a: &Artifact, b: &Artifact) -> bool { + a.title != b.title + || a.description != b.description + || a.status != b.status + || a.artifact_type != b.artifact_type +} + +// --------------------------------------------------------------------------- +// OSLC Client (requires HTTP — gated on reqwest) +// --------------------------------------------------------------------------- + +/// Configuration for connecting to an OSLC service provider. +#[derive(Debug, Clone)] +pub struct OslcClientConfig { + /// Base URL of the OSLC service provider catalog (e.g., + /// `https://polarion.example.com/oslc/services/catalog`). + pub base_url: String, + + /// Optional username for basic authentication. + pub username: Option, + + /// Optional password for basic authentication. + pub password: Option, + + /// Optional OAuth2 bearer token for authentication. + pub bearer_token: Option, + + /// Content type to request and send. Defaults to `application/ld+json`. + pub content_type: String, +} + +impl OslcClientConfig { + /// Create a new configuration with just a base URL. + /// Uses JSON-LD as the default content type. + pub fn new(base_url: impl Into) -> Self { + Self { + base_url: base_url.into(), + username: None, + password: None, + bearer_token: None, + content_type: "application/ld+json".to_string(), + } + } + + /// Set basic authentication credentials. + pub fn with_basic_auth(mut self, username: String, password: String) -> Self { + self.username = Some(username); + self.password = Some(password); + self + } + + /// Set an OAuth2 bearer token. + pub fn with_bearer_token(mut self, token: String) -> Self { + self.bearer_token = Some(token); + self + } +} + +/// An OSLC HTTP client for communicating with ALM tools. +/// +/// The `OslcClient` encapsulates HTTP communication with OSLC-compliant +/// servers. It handles: +/// +/// - **Discovery** — fetching the Service Provider Catalog to learn what +/// services and query capabilities are available. +/// - **Query** — issuing OSLC queries with `oslc.where` and `oslc.select` +/// parameters. +/// - **CRUD** — reading, creating, and updating individual OSLC resources +/// using standard HTTP methods (GET, POST, PUT). +/// +/// The client requires the `reqwest` crate and is only available when the +/// `oslc` feature is enabled. +#[cfg(feature = "oslc")] +pub struct OslcClient { + http: reqwest::Client, + config: OslcClientConfig, +} + +#[cfg(feature = "oslc")] +impl OslcClient { + /// Create a new OSLC client with the given configuration. + pub fn new(config: OslcClientConfig) -> Result { + let http = reqwest::Client::builder() + .user_agent("rivet-oslc/0.1") + .build() + .map_err(|e| Error::Adapter(format!("failed to create HTTP client: {e}")))?; + + Ok(Self { http, config }) + } + + /// Discover the OSLC Service Provider Catalog at the configured base URL. + /// + /// This is the first step in the OSLC workflow: the client fetches the + /// catalog to learn which service providers (projects) are available and + /// what query/creation capabilities they offer. + pub async fn discover(&self) -> Result { + let response = self + .build_get_request(&self.config.base_url) + .send() + .await + .map_err(|e| Error::Adapter(format!("catalog discovery failed: {e}")))?; + + Self::check_response_status(&response)?; + + response + .json::() + .await + .map_err(|e| Error::Adapter(format!("failed to parse catalog: {e}"))) + } + + /// Execute an OSLC query against a query capability URL. + /// + /// # Parameters + /// + /// - `query_base` — the query capability URL obtained from discovery. + /// - `where_clause` — an OSLC `oslc.where` expression (e.g., + /// `dcterms:identifier="REQ-001"`). Pass an empty string to fetch all. + /// - `select` — an OSLC `oslc.select` expression to control which + /// properties are returned (e.g., `dcterms:title,dcterms:description`). + /// Pass an empty string for server defaults. + pub async fn query( + &self, + query_base: &str, + where_clause: &str, + select: &str, + ) -> Result { + let mut url = query_base.to_string(); + let mut params = Vec::new(); + + if !where_clause.is_empty() { + params.push(format!("oslc.where={}", urlencoding::encode(where_clause))); + } + if !select.is_empty() { + params.push(format!("oslc.select={}", urlencoding::encode(select))); + } + + if !params.is_empty() { + let sep = if url.contains('?') { '&' } else { '?' }; + url = format!("{}{}{}", url, sep, params.join("&")); + } + + let response = self + .build_get_request(&url) + .send() + .await + .map_err(|e| Error::Adapter(format!("OSLC query failed: {e}")))?; + + Self::check_response_status(&response)?; + + response + .json::() + .await + .map_err(|e| Error::Adapter(format!("failed to parse query response: {e}"))) + } + + /// Fetch a single OSLC resource by its URI as a raw JSON-LD value. + /// + /// This is used to retrieve the full representation of a resource after + /// discovering its URI through a query or link traversal. + pub async fn get_resource(&self, url: &str) -> Result { + let response = self + .build_get_request(url) + .send() + .await + .map_err(|e| Error::Adapter(format!("GET resource failed: {e}")))?; + + Self::check_response_status(&response)?; + + response + .json::() + .await + .map_err(|e| Error::Adapter(format!("failed to parse resource: {e}"))) + } + + /// Create a new OSLC resource by POSTing to a creation factory URL. + /// + /// # Parameters + /// + /// - `factory_url` — the creation factory URL obtained from discovery. + /// - `resource` — the JSON-LD body to POST. + /// + /// Returns the server response body (typically the created resource with + /// its assigned URI). + pub async fn create_resource( + &self, + factory_url: &str, + resource: &serde_json::Value, + ) -> Result { + let response = self + .build_post_request(factory_url, resource) + .send() + .await + .map_err(|e| Error::Adapter(format!("POST resource failed: {e}")))?; + + Self::check_response_status(&response)?; + + response + .json::() + .await + .map_err(|e| Error::Adapter(format!("failed to parse created resource: {e}"))) + } + + /// Update an existing OSLC resource by PUTting to its URI. + /// + /// # Parameters + /// + /// - `resource_url` — the URI of the resource to update. + /// - `resource` — the updated JSON-LD body. + /// + /// Returns the server response body. + pub async fn update_resource( + &self, + resource_url: &str, + resource: &serde_json::Value, + ) -> Result { + let response = self + .build_put_request(resource_url, resource) + .send() + .await + .map_err(|e| Error::Adapter(format!("PUT resource failed: {e}")))?; + + Self::check_response_status(&response)?; + + response + .json::() + .await + .map_err(|e| Error::Adapter(format!("failed to parse updated resource: {e}"))) + } + + /// Build a GET request with appropriate headers and auth. + fn build_get_request(&self, url: &str) -> reqwest::RequestBuilder { + let mut req = self + .http + .get(url) + .header("Accept", &self.config.content_type) + .header("OSLC-Core-Version", "2.0"); + + req = self.apply_auth(req); + req + } + + /// Build a POST request with appropriate headers, auth, and body. + fn build_post_request(&self, url: &str, body: &serde_json::Value) -> reqwest::RequestBuilder { + let mut req = self + .http + .post(url) + .header("Accept", &self.config.content_type) + .header("Content-Type", &self.config.content_type) + .header("OSLC-Core-Version", "2.0") + .json(body); + + req = self.apply_auth(req); + req + } + + /// Build a PUT request with appropriate headers, auth, and body. + fn build_put_request(&self, url: &str, body: &serde_json::Value) -> reqwest::RequestBuilder { + let mut req = self + .http + .put(url) + .header("Accept", &self.config.content_type) + .header("Content-Type", &self.config.content_type) + .header("OSLC-Core-Version", "2.0") + .json(body); + + req = self.apply_auth(req); + req + } + + /// Apply authentication credentials to a request builder. + fn apply_auth(&self, req: reqwest::RequestBuilder) -> reqwest::RequestBuilder { + if let Some(token) = &self.config.bearer_token { + req.bearer_auth(token) + } else if let (Some(user), Some(pass)) = (&self.config.username, &self.config.password) { + req.basic_auth(user, Some(pass)) + } else { + req + } + } + + /// Check the HTTP response status and return an error for non-success codes. + fn check_response_status(response: &reqwest::Response) -> Result<(), Error> { + if response.status().is_success() { + Ok(()) + } else { + Err(Error::Adapter(format!( + "OSLC server returned HTTP {}: {}", + response.status().as_u16(), + response.status().canonical_reason().unwrap_or("Unknown"), + ))) + } + } +} + +// --------------------------------------------------------------------------- +// OSLC Sync Adapter +// --------------------------------------------------------------------------- + +/// Trait for bidirectional artifact synchronization with an external system. +/// +/// Implementors handle the pull/push/diff lifecycle for syncing Rivet +/// artifacts with a remote ALM tool over OSLC. +pub trait SyncAdapter { + /// Pull artifacts from a remote OSLC service. + /// + /// Fetches all resources matching the configured query from the given + /// service URL and converts them to Rivet artifacts. + fn pull( + &self, + service_url: &str, + ) -> impl std::future::Future, Error>> + Send; + + /// Push local artifacts to a remote OSLC service. + /// + /// Converts each artifact to an OSLC resource and creates or updates + /// it on the remote server. + fn push( + &self, + service_url: &str, + artifacts: &[Artifact], + ) -> impl std::future::Future> + Send; + + /// Compute the difference between local and remote artifact sets. + fn diff(&self, local: &[Artifact], remote: &[Artifact]) -> SyncDiff; +} + +/// An OSLC-based sync adapter backed by an [`OslcClient`]. +/// +/// This adapter implements bidirectional synchronization between a local +/// Rivet project and a remote ALM tool (Polarion, DOORS, codebeamer) via +/// the OSLC protocol. +/// +/// # Sync Workflow +/// +/// 1. **Pull**: `discover()` the catalog, find the query capability for the +/// desired resource type, `query()` for all matching resources, convert +/// each to an `Artifact` via `oslc_to_artifact()`. +/// +/// 2. **Push**: For each local artifact, convert to an OSLC resource via +/// `artifact_to_oslc()`, then `create_resource()` or `update_resource()` +/// on the remote server. +/// +/// 3. **Diff**: Compare local and remote artifact sets by ID to identify +/// what needs to be created, updated, or deleted in each direction. +#[cfg(feature = "oslc")] +pub struct OslcSyncAdapter { + client: OslcClient, +} + +#[cfg(feature = "oslc")] +impl OslcSyncAdapter { + /// Create a new sync adapter wrapping an OSLC client. + pub fn new(client: OslcClient) -> Self { + Self { client } + } + + /// Create a new sync adapter from a client configuration. + pub fn from_config(config: OslcClientConfig) -> Result { + let client = OslcClient::new(config)?; + Ok(Self { client }) + } + + /// Access the underlying OSLC client. + pub fn client(&self) -> &OslcClient { + &self.client + } +} + +#[cfg(feature = "oslc")] +impl SyncAdapter for OslcSyncAdapter { + /// Pull artifacts from the remote OSLC service. + /// + /// Queries the given service URL for all available resources and converts + /// them to Rivet artifacts. Uses an empty `oslc.where` clause to fetch + /// all resources, with full property selection. + async fn pull(&self, service_url: &str) -> Result, Error> { + let query_response = self.client.query(service_url, "", "").await?; + + let mut artifacts = Vec::new(); + for member_value in &query_response.members { + // Try to determine the resource type from the JSON-LD @type field + let resource = parse_member_resource(member_value)?; + let artifact = oslc_to_artifact(&resource)?; + artifacts.push(artifact); + } + + Ok(artifacts) + } + + /// Push local artifacts to the remote OSLC service. + /// + /// For each artifact, converts it to an OSLC resource and POSTs it to + /// the service URL (used as a creation factory). Existing resources would + /// need to be updated via PUT to their individual URIs — a full + /// implementation would first diff to decide create vs. update. + async fn push(&self, service_url: &str, artifacts: &[Artifact]) -> Result<(), Error> { + for artifact in artifacts { + let oslc_resource = artifact_to_oslc(artifact)?; + let json_value = serde_json::to_value(&oslc_resource) + .map_err(|e| Error::Adapter(format!("failed to serialize OSLC resource: {e}")))?; + + self.client + .create_resource(service_url, &json_value) + .await?; + } + Ok(()) + } + + /// Compute the diff between local and remote artifact sets. + fn diff(&self, local: &[Artifact], remote: &[Artifact]) -> SyncDiff { + compute_diff(local, remote) + } +} + +// --------------------------------------------------------------------------- +// Helper: parse a JSON-LD member into a typed OslcResource +// --------------------------------------------------------------------------- + +/// Attempt to parse a raw JSON-LD value into a typed [`OslcResource`]. +/// +/// Inspects the `@type` array to determine which domain type to deserialize +/// into. Falls back to `Requirement` if no recognized type is found. +fn parse_member_resource(value: &serde_json::Value) -> Result { + // Look at the @type field to determine the resource type + let rdf_types = value + .get("@type") + .and_then(|t| t.as_array()) + .map(|arr| arr.iter().filter_map(|v| v.as_str()).collect::>()) + .unwrap_or_default(); + + // Also check for a single string @type + let single_type = value.get("@type").and_then(|t| t.as_str()); + + let all_types: Vec<&str> = if rdf_types.is_empty() { + single_type.into_iter().collect() + } else { + rdf_types + }; + + // Match on known types + for rdf_type in &all_types { + if let Some(resource_type) = OslcResourceType::from_rdf_type(rdf_type) { + return match resource_type { + OslcResourceType::Requirement => { + let req: OslcRequirement = serde_json::from_value(value.clone()) + .map_err(|e| Error::Adapter(format!("failed to parse Requirement: {e}")))?; + Ok(OslcResource::Requirement(req)) + } + OslcResourceType::TestCase => { + let tc: OslcTestCase = serde_json::from_value(value.clone()) + .map_err(|e| Error::Adapter(format!("failed to parse TestCase: {e}")))?; + Ok(OslcResource::TestCase(tc)) + } + OslcResourceType::TestResult => { + let tr: OslcTestResult = serde_json::from_value(value.clone()) + .map_err(|e| Error::Adapter(format!("failed to parse TestResult: {e}")))?; + Ok(OslcResource::TestResult(tr)) + } + OslcResourceType::ChangeRequest => { + let cr: OslcChangeRequest = + serde_json::from_value(value.clone()).map_err(|e| { + Error::Adapter(format!("failed to parse ChangeRequest: {e}")) + })?; + Ok(OslcResource::ChangeRequest(cr)) + } + }; + } + } + + // Fallback: try to parse as a Requirement (the most common type) + let req: OslcRequirement = serde_json::from_value(value.clone()).map_err(|e| { + Error::Adapter(format!("failed to parse OSLC resource (unknown type): {e}")) + })?; + Ok(OslcResource::Requirement(req)) +} + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_resource_type_rdf_roundtrip() { + let types = [ + OslcResourceType::Requirement, + OslcResourceType::TestCase, + OslcResourceType::TestResult, + OslcResourceType::ChangeRequest, + ]; + + for rt in &types { + let uri = rt.rdf_type(); + let parsed = + OslcResourceType::from_rdf_type(uri).expect("should parse back from RDF type URI"); + assert_eq!(&parsed, rt); + } + } + + #[test] + fn test_resource_type_artifact_type_roundtrip() { + let types = [ + OslcResourceType::Requirement, + OslcResourceType::TestCase, + OslcResourceType::TestResult, + OslcResourceType::ChangeRequest, + ]; + + for rt in &types { + let artifact_type = rt.to_artifact_type(); + let parsed = OslcResourceType::from_artifact_type(artifact_type) + .expect("should parse back from artifact type"); + assert_eq!(&parsed, rt); + } + } + + #[test] + fn test_extract_link_target_url() { + assert_eq!( + extract_link_target("https://example.com/oslc/resources/REQ-001"), + "REQ-001" + ); + } + + #[test] + fn test_extract_link_target_bare_id() { + assert_eq!(extract_link_target("REQ-001"), "REQ-001"); + } + + #[test] + fn test_oslc_to_artifact_requirement() { + let req = OslcRequirement { + about: Some("https://example.com/req/1".to_string()), + rdf_type: vec![OSLC_RM_REQUIREMENT.to_string()], + identifier: Some("REQ-001".to_string()), + title: Some("Safety Requirement".to_string()), + description: Some("The system shall be safe.".to_string()), + elaborated_by: vec![OslcLink::new("https://example.com/req/2")], + satisfied_by: Vec::new(), + tracked_by: Vec::new(), + extra: BTreeMap::new(), + }; + + let resource = OslcResource::Requirement(req); + let artifact = oslc_to_artifact(&resource).expect("conversion should succeed"); + + assert_eq!(artifact.id, "REQ-001"); + assert_eq!(artifact.artifact_type, "requirement"); + assert_eq!(artifact.title, "Safety Requirement"); + assert_eq!( + artifact.description.as_deref(), + Some("The system shall be safe.") + ); + assert_eq!(artifact.links.len(), 1); + assert_eq!(artifact.links[0].link_type, "elaborated-by"); + assert_eq!(artifact.links[0].target, "2"); + } + + #[test] + fn test_oslc_to_artifact_missing_identifier() { + let req = OslcRequirement { + about: None, + rdf_type: vec![OSLC_RM_REQUIREMENT.to_string()], + identifier: None, + title: Some("No ID".to_string()), + description: None, + elaborated_by: Vec::new(), + satisfied_by: Vec::new(), + tracked_by: Vec::new(), + extra: BTreeMap::new(), + }; + + let resource = OslcResource::Requirement(req); + let result = oslc_to_artifact(&resource); + assert!(result.is_err()); + } + + #[test] + fn test_artifact_to_oslc_requirement() { + let artifact = Artifact { + id: "REQ-001".to_string(), + artifact_type: "requirement".to_string(), + title: "Safety Requirement".to_string(), + description: Some("Must be safe.".to_string()), + status: None, + tags: Vec::new(), + links: vec![Link { + link_type: "satisfied-by".to_string(), + target: "IMPL-001".to_string(), + }], + fields: BTreeMap::new(), + source_file: None, + }; + + let resource = artifact_to_oslc(&artifact).expect("conversion should succeed"); + assert_eq!(resource.resource_type(), OslcResourceType::Requirement); + assert_eq!(resource.identifier(), Some("REQ-001")); + assert_eq!(resource.title(), Some("Safety Requirement")); + + if let OslcResource::Requirement(req) = &resource { + assert_eq!(req.satisfied_by.len(), 1); + assert_eq!(req.satisfied_by[0].href, "IMPL-001"); + } else { + panic!("expected Requirement variant"); + } + } + + #[test] + fn test_artifact_to_oslc_test_case() { + let artifact = Artifact { + id: "TC-001".to_string(), + artifact_type: "test-case".to_string(), + title: "Safety Test".to_string(), + description: None, + status: None, + tags: Vec::new(), + links: vec![Link { + link_type: "validates".to_string(), + target: "REQ-001".to_string(), + }], + fields: BTreeMap::new(), + source_file: None, + }; + + let resource = artifact_to_oslc(&artifact).expect("conversion should succeed"); + assert_eq!(resource.resource_type(), OslcResourceType::TestCase); + + if let OslcResource::TestCase(tc) = &resource { + assert_eq!(tc.validates_requirement.len(), 1); + assert_eq!(tc.validates_requirement[0].href, "REQ-001"); + } else { + panic!("expected TestCase variant"); + } + } + + #[test] + fn test_artifact_to_oslc_test_result() { + let artifact = Artifact { + id: "TR-001".to_string(), + artifact_type: "test-result".to_string(), + title: "Safety Test Result".to_string(), + description: None, + status: Some("passed".to_string()), + tags: Vec::new(), + links: vec![Link { + link_type: "reports-on".to_string(), + target: "TC-001".to_string(), + }], + fields: BTreeMap::new(), + source_file: None, + }; + + let resource = artifact_to_oslc(&artifact).expect("conversion should succeed"); + assert_eq!(resource.resource_type(), OslcResourceType::TestResult); + + if let OslcResource::TestResult(tr) = &resource { + assert_eq!( + tr.reports_on_test_case.as_ref().map(|l| l.href.as_str()), + Some("TC-001") + ); + assert_eq!(tr.status.as_deref(), Some("passed")); + } else { + panic!("expected TestResult variant"); + } + } + + #[test] + fn test_artifact_to_oslc_change_request() { + let artifact = Artifact { + id: "CR-001".to_string(), + artifact_type: "change-request".to_string(), + title: "Fix safety issue".to_string(), + description: Some("There is a safety defect.".to_string()), + status: Some("open".to_string()), + tags: Vec::new(), + links: vec![ + Link { + link_type: "implements".to_string(), + target: "REQ-001".to_string(), + }, + Link { + link_type: "affects".to_string(), + target: "REQ-002".to_string(), + }, + ], + fields: BTreeMap::new(), + source_file: None, + }; + + let resource = artifact_to_oslc(&artifact).expect("conversion should succeed"); + assert_eq!(resource.resource_type(), OslcResourceType::ChangeRequest); + + if let OslcResource::ChangeRequest(cr) = &resource { + assert_eq!(cr.implements_requirement.len(), 1); + assert_eq!(cr.affects_requirement.len(), 1); + assert_eq!(cr.status.as_deref(), Some("open")); + } else { + panic!("expected ChangeRequest variant"); + } + } + + #[test] + fn test_artifact_to_oslc_unknown_type() { + let artifact = Artifact { + id: "X-001".to_string(), + artifact_type: "unknown-thing".to_string(), + title: "Unknown".to_string(), + description: None, + status: None, + tags: Vec::new(), + links: Vec::new(), + fields: BTreeMap::new(), + source_file: None, + }; + + let result = artifact_to_oslc(&artifact); + assert!(result.is_err()); + } + + #[test] + fn test_compute_diff_empty() { + let diff = compute_diff(&[], &[]); + assert!(diff.is_empty()); + assert_eq!(diff.diff_count(), 0); + } + + #[test] + fn test_compute_diff_local_only() { + let local = vec![Artifact { + id: "REQ-001".to_string(), + artifact_type: "requirement".to_string(), + title: "Req 1".to_string(), + description: None, + status: None, + tags: Vec::new(), + links: Vec::new(), + fields: BTreeMap::new(), + source_file: None, + }]; + + let diff = compute_diff(&local, &[]); + assert_eq!(diff.local_only, vec!["REQ-001"]); + assert!(diff.remote_only.is_empty()); + assert!(diff.modified.is_empty()); + } + + #[test] + fn test_compute_diff_remote_only() { + let remote = vec![Artifact { + id: "REQ-002".to_string(), + artifact_type: "requirement".to_string(), + title: "Req 2".to_string(), + description: None, + status: None, + tags: Vec::new(), + links: Vec::new(), + fields: BTreeMap::new(), + source_file: None, + }]; + + let diff = compute_diff(&[], &remote); + assert_eq!(diff.remote_only, vec!["REQ-002"]); + assert!(diff.local_only.is_empty()); + } + + #[test] + fn test_compute_diff_modified() { + let local = vec![Artifact { + id: "REQ-001".to_string(), + artifact_type: "requirement".to_string(), + title: "Req 1 (old)".to_string(), + description: None, + status: None, + tags: Vec::new(), + links: Vec::new(), + fields: BTreeMap::new(), + source_file: None, + }]; + + let remote = vec![Artifact { + id: "REQ-001".to_string(), + artifact_type: "requirement".to_string(), + title: "Req 1 (new)".to_string(), + description: None, + status: None, + tags: Vec::new(), + links: Vec::new(), + fields: BTreeMap::new(), + source_file: None, + }]; + + let diff = compute_diff(&local, &remote); + assert_eq!(diff.modified, vec!["REQ-001"]); + assert!(diff.local_only.is_empty()); + assert!(diff.remote_only.is_empty()); + } + + #[test] + fn test_compute_diff_unchanged() { + let local = vec![Artifact { + id: "REQ-001".to_string(), + artifact_type: "requirement".to_string(), + title: "Req 1".to_string(), + description: None, + status: None, + tags: Vec::new(), + links: Vec::new(), + fields: BTreeMap::new(), + source_file: None, + }]; + + let remote = vec![Artifact { + id: "REQ-001".to_string(), + artifact_type: "requirement".to_string(), + title: "Req 1".to_string(), + description: None, + status: None, + tags: Vec::new(), + links: Vec::new(), + fields: BTreeMap::new(), + source_file: None, + }]; + + let diff = compute_diff(&local, &remote); + assert!(diff.is_empty()); + assert_eq!(diff.unchanged, vec!["REQ-001"]); + } + + #[test] + fn test_parse_member_resource_requirement() { + let json = serde_json::json!({ + "@type": ["http://open-services.net/ns/rm#Requirement"], + "dcterms:identifier": "REQ-100", + "dcterms:title": "A test requirement" + }); + + let resource = parse_member_resource(&json).expect("should parse"); + assert_eq!(resource.resource_type(), OslcResourceType::Requirement); + assert_eq!(resource.identifier(), Some("REQ-100")); + } + + #[test] + fn test_parse_member_resource_test_case() { + let json = serde_json::json!({ + "@type": ["http://open-services.net/ns/qm#TestCase"], + "dcterms:identifier": "TC-100", + "dcterms:title": "A test case" + }); + + let resource = parse_member_resource(&json).expect("should parse"); + assert_eq!(resource.resource_type(), OslcResourceType::TestCase); + } + + #[test] + fn test_parse_member_resource_change_request() { + let json = serde_json::json!({ + "@type": ["http://open-services.net/ns/cm#ChangeRequest"], + "dcterms:identifier": "CR-100", + "dcterms:title": "A change request", + "oslc_cm:status": "open" + }); + + let resource = parse_member_resource(&json).expect("should parse"); + assert_eq!(resource.resource_type(), OslcResourceType::ChangeRequest); + } + + #[test] + fn test_oslc_link_new() { + let link = OslcLink::new("https://example.com/resource/1"); + assert_eq!(link.href, "https://example.com/resource/1"); + } + + #[test] + fn test_oslc_client_config_new() { + let config = OslcClientConfig::new("https://example.com/oslc"); + assert_eq!(config.base_url, "https://example.com/oslc"); + assert_eq!(config.content_type, "application/ld+json"); + assert!(config.username.is_none()); + assert!(config.password.is_none()); + assert!(config.bearer_token.is_none()); + } + + #[test] + fn test_oslc_client_config_with_basic_auth() { + let config = OslcClientConfig::new("https://example.com/oslc") + .with_basic_auth("user".to_string(), "pass".to_string()); + assert_eq!(config.username.as_deref(), Some("user")); + assert_eq!(config.password.as_deref(), Some("pass")); + } + + #[test] + fn test_oslc_client_config_with_bearer() { + let config = OslcClientConfig::new("https://example.com/oslc") + .with_bearer_token("my-token".to_string()); + assert_eq!(config.bearer_token.as_deref(), Some("my-token")); + } + + #[test] + fn test_sync_diff_is_empty() { + let empty = SyncDiff::default(); + assert!(empty.is_empty()); + + let nonempty = SyncDiff { + remote_only: vec!["REQ-001".to_string()], + ..Default::default() + }; + assert!(!nonempty.is_empty()); + } + + #[test] + fn test_resource_type_display() { + assert_eq!( + format!("{}", OslcResourceType::Requirement), + OSLC_RM_REQUIREMENT + ); + } + + #[test] + fn test_artifact_type_aliases() { + // Test that common aliases map correctly + assert_eq!( + OslcResourceType::from_artifact_type("req"), + Some(OslcResourceType::Requirement) + ); + assert_eq!( + OslcResourceType::from_artifact_type("SWREQ"), + Some(OslcResourceType::Requirement) + ); + assert_eq!( + OslcResourceType::from_artifact_type("defect"), + Some(OslcResourceType::ChangeRequest) + ); + assert_eq!( + OslcResourceType::from_artifact_type("bug"), + Some(OslcResourceType::ChangeRequest) + ); + assert_eq!( + OslcResourceType::from_artifact_type("something-unknown"), + None + ); + } + + #[test] + fn test_requirement_serialization_roundtrip() { + let req = OslcRequirement { + about: Some("https://example.com/req/1".to_string()), + rdf_type: vec![OSLC_RM_REQUIREMENT.to_string()], + identifier: Some("REQ-001".to_string()), + title: Some("Safety Requirement".to_string()), + description: Some("The system shall be safe.".to_string()), + elaborated_by: vec![OslcLink::new("https://example.com/req/2")], + satisfied_by: Vec::new(), + tracked_by: Vec::new(), + extra: BTreeMap::new(), + }; + + let json = serde_json::to_string(&req).expect("serialization should succeed"); + let parsed: OslcRequirement = + serde_json::from_str(&json).expect("deserialization should succeed"); + + assert_eq!(parsed.identifier, req.identifier); + assert_eq!(parsed.title, req.title); + assert_eq!(parsed.elaborated_by.len(), 1); + } +} diff --git a/rivet-core/src/reqif.rs b/rivet-core/src/reqif.rs new file mode 100644 index 0000000..430f663 --- /dev/null +++ b/rivet-core/src/reqif.rs @@ -0,0 +1,925 @@ +//! ReqIF 1.2 XML import/export adapter. +//! +//! Implements the OMG Requirements Interchange Format (ReqIF) version 1.2, +//! namespace `http://www.omg.org/spec/ReqIF/20110401/reqif.xsd`. +//! +//! Mapping strategy: +//! +//! | Rivet concept | ReqIF element | +//! |-------------------|-----------------------------------| +//! | Artifact | SPEC-OBJECT | +//! | Artifact.id | SPEC-OBJECT.IDENTIFIER | +//! | Artifact.title | SPEC-OBJECT.LONG-NAME | +//! | Artifact.description | SPEC-OBJECT.DESC | +//! | Artifact.artifact_type | SPEC-OBJECT-TYPE.LONG-NAME | +//! | Artifact.status | ATTRIBUTE-VALUE-STRING ("status") | +//! | Artifact.tags | ATTRIBUTE-VALUE-STRING ("tags") | +//! | Artifact.fields | ATTRIBUTE-VALUE-STRING per field | +//! | Link | SPEC-RELATION | +//! | Link.link_type | SPEC-RELATION-TYPE.LONG-NAME | + +use std::collections::{BTreeMap, HashMap}; + +use quick_xml::de::from_str as xml_from_str; +use quick_xml::se::to_string as xml_to_string; +use serde::{Deserialize, Serialize}; + +use crate::adapter::{Adapter, AdapterConfig, AdapterSource}; +use crate::error::Error; +use crate::model::{Artifact, Link}; + +// ── ReqIF XML structures ──────────────────────────────────────────────── +// +// These mirror the ReqIF 1.2 XSD just enough for lossless round-tripping +// of Rivet artifacts. Fields not relevant to Rivet are accepted on read +// (via serde defaults) and omitted on write. + +/// Root element: ``. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename = "REQ-IF")] +pub struct ReqIfRoot { + #[serde(rename = "@xmlns", default = "default_namespace")] + pub xmlns: String, + + #[serde(rename = "THE-HEADER")] + pub the_header: TheHeader, + + #[serde(rename = "CORE-CONTENT")] + pub core_content: CoreContent, +} + +fn default_namespace() -> String { + REQIF_NAMESPACE.to_string() +} + +/// `` wrapping ``. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename = "THE-HEADER")] +pub struct TheHeader { + #[serde(rename = "REQ-IF-HEADER")] + pub req_if_header: ReqIfHeader, +} + +/// ``. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename = "REQ-IF-HEADER")] +pub struct ReqIfHeader { + #[serde(rename = "@IDENTIFIER")] + pub identifier: String, + + #[serde(rename = "COMMENT", default, skip_serializing_if = "Option::is_none")] + pub comment: Option, + + #[serde( + rename = "CREATION-TIME", + default, + skip_serializing_if = "Option::is_none" + )] + pub creation_time: Option, + + #[serde( + rename = "REPOSITORY-ID", + default, + skip_serializing_if = "Option::is_none" + )] + pub repository_id: Option, + + #[serde( + rename = "REQ-IF-TOOL-ID", + default, + skip_serializing_if = "Option::is_none" + )] + pub req_if_tool_id: Option, + + #[serde( + rename = "REQ-IF-VERSION", + default = "default_reqif_version", + skip_serializing_if = "Option::is_none" + )] + pub req_if_version: Option, + + #[serde( + rename = "SOURCE-TOOL-ID", + default, + skip_serializing_if = "Option::is_none" + )] + pub source_tool_id: Option, + + #[serde(rename = "TITLE", default, skip_serializing_if = "Option::is_none")] + pub title: Option, +} + +fn default_reqif_version() -> Option { + Some("1.2".into()) +} + +/// `` wrapping ``. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename = "CORE-CONTENT")] +pub struct CoreContent { + #[serde(rename = "REQ-IF-CONTENT")] + pub req_if_content: ReqIfContent, +} + +/// `` — the meat of a ReqIF document. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename = "REQ-IF-CONTENT")] +pub struct ReqIfContent { + #[serde(rename = "DATATYPES", default)] + pub datatypes: Datatypes, + + #[serde(rename = "SPEC-TYPES", default)] + pub spec_types: SpecTypes, + + #[serde(rename = "SPEC-OBJECTS", default)] + pub spec_objects: SpecObjects, + + #[serde(rename = "SPEC-RELATIONS", default)] + pub spec_relations: SpecRelations, +} + +// ── DATATYPES ─────────────────────────────────────────────────────────── + +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +#[serde(rename = "DATATYPES")] +pub struct Datatypes { + #[serde(rename = "DATATYPE-DEFINITION-STRING", default)] + pub string_types: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename = "DATATYPE-DEFINITION-STRING")] +pub struct DatatypeDefinitionString { + #[serde(rename = "@IDENTIFIER")] + pub identifier: String, + + #[serde( + rename = "@LONG-NAME", + default, + skip_serializing_if = "Option::is_none" + )] + pub long_name: Option, + + #[serde( + rename = "@MAX-LENGTH", + default, + skip_serializing_if = "Option::is_none" + )] + pub max_length: Option, +} + +// ── SPEC-TYPES ────────────────────────────────────────────────────────── + +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +#[serde(rename = "SPEC-TYPES")] +pub struct SpecTypes { + #[serde(rename = "SPEC-OBJECT-TYPE", default)] + pub object_types: Vec, + + #[serde(rename = "SPEC-RELATION-TYPE", default)] + pub relation_types: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename = "SPEC-OBJECT-TYPE")] +pub struct SpecObjectType { + #[serde(rename = "@IDENTIFIER")] + pub identifier: String, + + #[serde( + rename = "@LONG-NAME", + default, + skip_serializing_if = "Option::is_none" + )] + pub long_name: Option, + + #[serde( + rename = "SPEC-ATTRIBUTES", + default, + skip_serializing_if = "Option::is_none" + )] + pub spec_attributes: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename = "SPEC-RELATION-TYPE")] +pub struct SpecRelationType { + #[serde(rename = "@IDENTIFIER")] + pub identifier: String, + + #[serde( + rename = "@LONG-NAME", + default, + skip_serializing_if = "Option::is_none" + )] + pub long_name: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename = "SPEC-ATTRIBUTES")] +pub struct SpecAttributes { + #[serde(rename = "ATTRIBUTE-DEFINITION-STRING", default)] + pub string_attrs: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename = "ATTRIBUTE-DEFINITION-STRING")] +pub struct AttributeDefinitionString { + #[serde(rename = "@IDENTIFIER")] + pub identifier: String, + + #[serde( + rename = "@LONG-NAME", + default, + skip_serializing_if = "Option::is_none" + )] + pub long_name: Option, + + #[serde(rename = "TYPE", default, skip_serializing_if = "Option::is_none")] + pub datatype_ref: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename = "TYPE")] +pub struct DatatypeRef { + #[serde(rename = "DATATYPE-DEFINITION-STRING-REF")] + pub datatype_ref: String, +} + +// ── SPEC-OBJECTS ──────────────────────────────────────────────────────── + +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +#[serde(rename = "SPEC-OBJECTS")] +pub struct SpecObjects { + #[serde(rename = "SPEC-OBJECT", default)] + pub objects: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename = "SPEC-OBJECT")] +pub struct SpecObject { + #[serde(rename = "@IDENTIFIER")] + pub identifier: String, + + #[serde( + rename = "@LONG-NAME", + default, + skip_serializing_if = "Option::is_none" + )] + pub long_name: Option, + + #[serde(rename = "@DESC", default, skip_serializing_if = "Option::is_none")] + pub desc: Option, + + #[serde(rename = "TYPE", default, skip_serializing_if = "Option::is_none")] + pub object_type_ref: Option, + + #[serde(rename = "VALUES", default, skip_serializing_if = "Option::is_none")] + pub values: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename = "TYPE")] +pub struct SpecObjectTypeRef { + #[serde(rename = "SPEC-OBJECT-TYPE-REF")] + pub spec_object_type_ref: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename = "VALUES")] +pub struct Values { + #[serde(rename = "ATTRIBUTE-VALUE-STRING", default)] + pub string_values: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename = "ATTRIBUTE-VALUE-STRING")] +pub struct AttributeValueString { + #[serde(rename = "@THE-VALUE")] + pub the_value: String, + + #[serde(rename = "DEFINITION")] + pub definition: AttrDefinitionRef, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename = "DEFINITION")] +pub struct AttrDefinitionRef { + #[serde(rename = "ATTRIBUTE-DEFINITION-STRING-REF")] + pub attr_def_ref: String, +} + +// ── SPEC-RELATIONS ────────────────────────────────────────────────────── + +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +#[serde(rename = "SPEC-RELATIONS")] +pub struct SpecRelations { + #[serde(rename = "SPEC-RELATION", default)] + pub relations: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename = "SPEC-RELATION")] +pub struct SpecRelation { + #[serde(rename = "@IDENTIFIER")] + pub identifier: String, + + #[serde(rename = "TYPE", default, skip_serializing_if = "Option::is_none")] + pub relation_type_ref: Option, + + #[serde(rename = "SOURCE")] + pub source: SpecRelationEnd, + + #[serde(rename = "TARGET")] + pub target: SpecRelationEnd, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename = "TYPE")] +pub struct SpecRelationTypeRef { + #[serde(rename = "SPEC-RELATION-TYPE-REF")] + pub spec_relation_type_ref: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct SpecRelationEnd { + #[serde(rename = "SPEC-OBJECT-REF")] + pub spec_object_ref: String, +} + +// ── Constants ─────────────────────────────────────────────────────────── + +pub const REQIF_NAMESPACE: &str = "http://www.omg.org/spec/ReqIF/20110401/reqif.xsd"; + +const DATATYPE_STRING_ID: &str = "DT-STRING"; +const ATTR_DEF_STATUS: &str = "ATTR-STATUS"; +const ATTR_DEF_TAGS: &str = "ATTR-TAGS"; +const ATTR_DEF_ARTIFACT_TYPE: &str = "ATTR-ARTIFACT-TYPE"; + +// ── Adapter ───────────────────────────────────────────────────────────── + +pub struct ReqIfAdapter { + supported: Vec, +} + +impl ReqIfAdapter { + pub fn new() -> Self { + Self { + supported: vec![], // accepts all types + } + } +} + +impl Default for ReqIfAdapter { + fn default() -> Self { + Self::new() + } +} + +impl Adapter for ReqIfAdapter { + fn id(&self) -> &str { + "reqif" + } + + fn name(&self) -> &str { + "ReqIF 1.2 XML" + } + + fn supported_types(&self) -> &[String] { + &self.supported + } + + fn import( + &self, + source: &AdapterSource, + _config: &AdapterConfig, + ) -> Result, Error> { + let xml_str = match source { + AdapterSource::Bytes(bytes) => std::str::from_utf8(bytes) + .map_err(|e| Error::Adapter(format!("invalid UTF-8: {e}")))? + .to_string(), + AdapterSource::Path(path) => std::fs::read_to_string(path) + .map_err(|e| Error::Io(format!("{}: {e}", path.display())))?, + AdapterSource::Directory(dir) => { + return import_reqif_directory(dir); + } + }; + parse_reqif(&xml_str) + } + + fn export(&self, artifacts: &[Artifact], _config: &AdapterConfig) -> Result, Error> { + let reqif = build_reqif(artifacts); + serialize_reqif(&reqif) + } +} + +// ── Import ────────────────────────────────────────────────────────────── + +fn import_reqif_directory(dir: &std::path::Path) -> Result, Error> { + let mut artifacts = Vec::new(); + let entries = + std::fs::read_dir(dir).map_err(|e| Error::Io(format!("{}: {e}", dir.display())))?; + + for entry in entries { + let entry = entry.map_err(|e| Error::Io(e.to_string()))?; + let path = entry.path(); + if path + .extension() + .is_some_and(|ext| ext == "reqif" || ext == "xml") + { + let content = std::fs::read_to_string(&path) + .map_err(|e| Error::Io(format!("{}: {e}", path.display())))?; + match parse_reqif(&content) { + Ok(arts) => artifacts.extend(arts), + Err(e) => log::warn!("skipping {}: {e}", path.display()), + } + } else if path.is_dir() { + artifacts.extend(import_reqif_directory(&path)?); + } + } + + Ok(artifacts) +} + +/// Parse a ReqIF XML string into Rivet artifacts. +pub fn parse_reqif(xml: &str) -> Result, Error> { + let root: ReqIfRoot = + xml_from_str(xml).map_err(|e| Error::Adapter(format!("ReqIF XML parse error: {e}")))?; + + let content = &root.core_content.req_if_content; + + // Build lookup tables for types. + let object_type_names: HashMap<&str, &str> = content + .spec_types + .object_types + .iter() + .map(|t| { + ( + t.identifier.as_str(), + t.long_name.as_deref().unwrap_or(&t.identifier), + ) + }) + .collect(); + + let relation_type_names: HashMap<&str, &str> = content + .spec_types + .relation_types + .iter() + .map(|t| { + ( + t.identifier.as_str(), + t.long_name.as_deref().unwrap_or(&t.identifier), + ) + }) + .collect(); + + // Build lookup: attr-def id -> long-name. + let mut attr_def_names: HashMap<&str, &str> = HashMap::new(); + for ot in &content.spec_types.object_types { + if let Some(attrs) = &ot.spec_attributes { + for ad in &attrs.string_attrs { + let name = ad.long_name.as_deref().unwrap_or(&ad.identifier); + attr_def_names.insert(ad.identifier.as_str(), name); + } + } + } + + // Parse SPEC-OBJECTS into Artifacts. + let mut artifacts: Vec = Vec::new(); + for obj in &content.spec_objects.objects { + let artifact_type = obj + .object_type_ref + .as_ref() + .and_then(|r| { + object_type_names + .get(r.spec_object_type_ref.as_str()) + .copied() + }) + .unwrap_or("unknown") + .to_string(); + + let mut status: Option = None; + let mut tags: Vec = Vec::new(); + let mut fields: BTreeMap = BTreeMap::new(); + let mut override_artifact_type: Option = None; + + if let Some(values) = &obj.values { + for av in &values.string_values { + let attr_name = attr_def_names + .get(av.definition.attr_def_ref.as_str()) + .copied() + .unwrap_or(&av.definition.attr_def_ref); + + match attr_name { + "status" => { + if !av.the_value.is_empty() { + status = Some(av.the_value.clone()); + } + } + "tags" => { + tags = av + .the_value + .split(',') + .map(|s| s.trim().to_string()) + .filter(|s| !s.is_empty()) + .collect(); + } + "artifact-type" => { + if !av.the_value.is_empty() { + override_artifact_type = Some(av.the_value.clone()); + } + } + _ => { + fields.insert( + attr_name.to_string(), + serde_yaml::Value::String(av.the_value.clone()), + ); + } + } + } + } + + let artifact = Artifact { + id: obj.identifier.clone(), + artifact_type: override_artifact_type.unwrap_or(artifact_type), + title: obj.long_name.clone().unwrap_or_default(), + description: obj.desc.clone(), + status, + tags, + links: vec![], // filled in below from SPEC-RELATIONS + fields, + source_file: None, + }; + artifacts.push(artifact); + } + + // Build id -> index map using owned strings to avoid borrow conflicts. + let artifact_ids: HashMap = artifacts + .iter() + .enumerate() + .map(|(i, a)| (a.id.clone(), i)) + .collect(); + + // Parse SPEC-RELATIONS into Links on source artifacts. + for rel in &content.spec_relations.relations { + let link_type = rel + .relation_type_ref + .as_ref() + .and_then(|r| { + relation_type_names + .get(r.spec_relation_type_ref.as_str()) + .copied() + }) + .unwrap_or("traces-to") + .to_string(); + + let source_id = &rel.source.spec_object_ref; + let target_id = &rel.target.spec_object_ref; + + if let Some(&idx) = artifact_ids.get(source_id) { + artifacts[idx].links.push(Link { + link_type, + target: target_id.clone(), + }); + } + } + + Ok(artifacts) +} + +// ── Export ─────────────────────────────────────────────────────────────── + +/// Build a ReqIF document from Rivet artifacts. +pub fn build_reqif(artifacts: &[Artifact]) -> ReqIfRoot { + // Collect unique artifact types and link types. + let mut artifact_types: Vec = Vec::new(); + let mut link_types: Vec = Vec::new(); + + for a in artifacts { + if !artifact_types.contains(&a.artifact_type) { + artifact_types.push(a.artifact_type.clone()); + } + for l in &a.links { + if !link_types.contains(&l.link_type) { + link_types.push(l.link_type.clone()); + } + } + } + + // Collect unique extra field names across all artifacts. + let mut field_names: Vec = Vec::new(); + for a in artifacts { + for key in a.fields.keys() { + if !field_names.contains(key) { + field_names.push(key.clone()); + } + } + } + + // Build DATATYPE-DEFINITION-STRING. + let datatypes = Datatypes { + string_types: vec![DatatypeDefinitionString { + identifier: DATATYPE_STRING_ID.into(), + long_name: Some("String".into()), + max_length: Some(65535), + }], + }; + + // Build SPEC-OBJECT-TYPEs — one per artifact type, each with standard + // attribute definitions for status, tags, artifact-type, plus any extra fields. + let object_types: Vec = artifact_types + .iter() + .map(|at| { + let type_id = format!("SOT-{at}"); + + let mut string_attrs = vec![ + AttributeDefinitionString { + identifier: ATTR_DEF_STATUS.into(), + long_name: Some("status".into()), + datatype_ref: Some(DatatypeRef { + datatype_ref: DATATYPE_STRING_ID.into(), + }), + }, + AttributeDefinitionString { + identifier: ATTR_DEF_TAGS.into(), + long_name: Some("tags".into()), + datatype_ref: Some(DatatypeRef { + datatype_ref: DATATYPE_STRING_ID.into(), + }), + }, + AttributeDefinitionString { + identifier: ATTR_DEF_ARTIFACT_TYPE.into(), + long_name: Some("artifact-type".into()), + datatype_ref: Some(DatatypeRef { + datatype_ref: DATATYPE_STRING_ID.into(), + }), + }, + ]; + + for fname in &field_names { + string_attrs.push(AttributeDefinitionString { + identifier: format!("ATTR-{fname}"), + long_name: Some(fname.clone()), + datatype_ref: Some(DatatypeRef { + datatype_ref: DATATYPE_STRING_ID.into(), + }), + }); + } + + SpecObjectType { + identifier: type_id, + long_name: Some(at.clone()), + spec_attributes: Some(SpecAttributes { string_attrs }), + } + }) + .collect(); + + // Build SPEC-RELATION-TYPEs. + let relation_types: Vec = link_types + .iter() + .map(|lt| { + let type_id = format!("SRT-{lt}"); + SpecRelationType { + identifier: type_id, + long_name: Some(lt.clone()), + } + }) + .collect(); + + // Build SPEC-OBJECTs. + let objects: Vec = artifacts + .iter() + .map(|a| { + let type_ref_id = format!("SOT-{}", a.artifact_type); + + let mut string_values = vec![ + AttributeValueString { + the_value: a.status.clone().unwrap_or_default(), + definition: AttrDefinitionRef { + attr_def_ref: ATTR_DEF_STATUS.into(), + }, + }, + AttributeValueString { + the_value: a.tags.join(", "), + definition: AttrDefinitionRef { + attr_def_ref: ATTR_DEF_TAGS.into(), + }, + }, + AttributeValueString { + the_value: a.artifact_type.clone(), + definition: AttrDefinitionRef { + attr_def_ref: ATTR_DEF_ARTIFACT_TYPE.into(), + }, + }, + ]; + + for (key, value) in &a.fields { + let val_str = match value { + serde_yaml::Value::String(s) => s.clone(), + other => format!("{other:?}"), + }; + string_values.push(AttributeValueString { + the_value: val_str, + definition: AttrDefinitionRef { + attr_def_ref: format!("ATTR-{key}"), + }, + }); + } + + SpecObject { + identifier: a.id.clone(), + long_name: Some(a.title.clone()), + desc: a.description.clone(), + object_type_ref: Some(SpecObjectTypeRef { + spec_object_type_ref: type_ref_id, + }), + values: Some(Values { string_values }), + } + }) + .collect(); + + // Build SPEC-RELATIONs. + let mut relations: Vec = Vec::new(); + let mut rel_counter = 0u64; + for a in artifacts { + for link in &a.links { + rel_counter += 1; + let type_ref_id = format!("SRT-{}", link.link_type); + relations.push(SpecRelation { + identifier: format!("REL-{rel_counter}"), + relation_type_ref: Some(SpecRelationTypeRef { + spec_relation_type_ref: type_ref_id, + }), + source: SpecRelationEnd { + spec_object_ref: a.id.clone(), + }, + target: SpecRelationEnd { + spec_object_ref: link.target.clone(), + }, + }); + } + } + + ReqIfRoot { + xmlns: REQIF_NAMESPACE.into(), + the_header: TheHeader { + req_if_header: ReqIfHeader { + identifier: "rivet-export".into(), + comment: Some("Generated by Rivet SDLC tool".into()), + creation_time: None, + repository_id: None, + req_if_tool_id: Some("rivet".into()), + req_if_version: Some("1.2".into()), + source_tool_id: Some("rivet".into()), + title: Some("Rivet ReqIF Export".into()), + }, + }, + core_content: CoreContent { + req_if_content: ReqIfContent { + datatypes, + spec_types: SpecTypes { + object_types, + relation_types, + }, + spec_objects: SpecObjects { objects }, + spec_relations: SpecRelations { relations }, + }, + }, + } +} + +/// Serialize a ReqIF document to XML bytes. +pub fn serialize_reqif(root: &ReqIfRoot) -> Result, Error> { + let xml_body = xml_to_string(root) + .map_err(|e| Error::Adapter(format!("ReqIF XML serialize error: {e}")))?; + + // Prepend the XML declaration that quick-xml's serializer omits. + let mut output = String::with_capacity(xml_body.len() + 50); + output.push_str("\n"); + output.push_str(&xml_body); + + Ok(output.into_bytes()) +} + +#[cfg(test)] +mod tests { + use super::*; + + fn sample_artifacts() -> Vec { + vec![ + Artifact { + id: "REQ-001".into(), + artifact_type: "requirement".into(), + title: "Memory isolation".into(), + description: Some("The system shall enforce memory isolation.".into()), + status: Some("approved".into()), + tags: vec!["safety".into(), "core".into()], + links: vec![], + fields: { + let mut f = BTreeMap::new(); + f.insert("priority".into(), serde_yaml::Value::String("must".into())); + f + }, + source_file: None, + }, + Artifact { + id: "TC-001".into(), + artifact_type: "test-case".into(), + title: "Test memory isolation".into(), + description: None, + status: Some("draft".into()), + tags: vec![], + links: vec![Link { + link_type: "verifies".into(), + target: "REQ-001".into(), + }], + fields: BTreeMap::new(), + source_file: None, + }, + ] + } + + #[test] + #[cfg_attr(miri, ignore)] // quick-xml uses unsafe/SIMD internals that Miri cannot interpret + fn test_export_produces_valid_xml() { + let arts = sample_artifacts(); + let adapter = ReqIfAdapter::new(); + let config = AdapterConfig::default(); + let bytes = adapter.export(&arts, &config).unwrap(); + let xml = std::str::from_utf8(&bytes).unwrap(); + + assert!(xml.starts_with("")); + assert!(xml.contains("REQ-IF")); + assert!(xml.contains("THE-HEADER")); + assert!(xml.contains("SPEC-OBJECTS")); + assert!(xml.contains("SPEC-RELATIONS")); + assert!(xml.contains("SPEC-OBJECT-TYPE")); + assert!(xml.contains("SPEC-RELATION-TYPE")); + assert!(xml.contains(REQIF_NAMESPACE)); + } + + #[test] + #[cfg_attr(miri, ignore)] // quick-xml uses unsafe/SIMD internals that Miri cannot interpret + fn test_roundtrip() { + let original = sample_artifacts(); + let adapter = ReqIfAdapter::new(); + let config = AdapterConfig::default(); + + let bytes = adapter.export(&original, &config).unwrap(); + let reimported = adapter + .import(&AdapterSource::Bytes(bytes), &config) + .unwrap(); + + assert_eq!(reimported.len(), original.len()); + + for (orig, re) in original.iter().zip(reimported.iter()) { + assert_eq!(orig.id, re.id, "id mismatch"); + assert_eq!( + orig.artifact_type, re.artifact_type, + "artifact_type mismatch" + ); + assert_eq!(orig.title, re.title, "title mismatch"); + assert_eq!(orig.description, re.description, "description mismatch"); + assert_eq!(orig.status, re.status, "status mismatch"); + assert_eq!(orig.tags, re.tags, "tags mismatch"); + assert_eq!(orig.links.len(), re.links.len(), "links len mismatch"); + for (ol, rl) in orig.links.iter().zip(re.links.iter()) { + assert_eq!(ol.link_type, rl.link_type, "link_type mismatch"); + assert_eq!(ol.target, rl.target, "link target mismatch"); + } + assert_eq!(orig.fields, re.fields, "fields mismatch"); + } + } + + #[test] + #[cfg_attr(miri, ignore)] // quick-xml uses unsafe/SIMD internals that Miri cannot interpret + fn test_parse_minimal_reqif() { + let xml = r#" + + + + + + + + + + + + + SOT-req + + + + + +"#; + + let arts = parse_reqif(xml).unwrap(); + assert_eq!(arts.len(), 1); + assert_eq!(arts[0].id, "R-1"); + assert_eq!(arts[0].title, "First req"); + assert_eq!(arts[0].description, Some("A description".into())); + assert_eq!(arts[0].artifact_type, "requirement"); + } +} diff --git a/rivet-core/src/validate.rs b/rivet-core/src/validate.rs index 44842fe..e64d882 100644 --- a/rivet-core/src/validate.rs +++ b/rivet-core/src/validate.rs @@ -3,7 +3,7 @@ use crate::schema::{Cardinality, Schema, Severity}; use crate::store::Store; /// A single validation diagnostic. -#[derive(Debug, Clone)] +#[derive(Debug, Clone, PartialEq, Eq)] pub struct Diagnostic { pub severity: Severity, pub artifact_id: Option, diff --git a/rivet-core/src/wasm_runtime.rs b/rivet-core/src/wasm_runtime.rs new file mode 100644 index 0000000..6ea69ea --- /dev/null +++ b/rivet-core/src/wasm_runtime.rs @@ -0,0 +1,565 @@ +//! WASM Component Model adapter runtime. +//! +//! This module provides the ability to load and execute custom adapters +//! compiled as WebAssembly components. Each WASM adapter implements the +//! `pulseengine:rivet/adapter` WIT interface defined in `wit/adapter.wit`. +//! +//! # Architecture +//! +//! ```text +//! ┌──────────────┐ ┌─────────────────────┐ +//! │ Rivet Host │──────▶│ WasmAdapterRuntime │ +//! │ (rivet-cli) │ │ (wasmtime Engine) │ +//! └──────────────┘ └──────┬──────────────┘ +//! │ instantiate +//! ┌──────▼──────────────┐ +//! │ WasmAdapter │ +//! │ (Component instance)│ +//! │ impl Adapter trait │ +//! └──────────────────────┘ +//! ``` +//! +//! The [`WasmAdapterRuntime`] manages a shared `wasmtime::Engine` with +//! configurable resource limits. Individual [`WasmAdapter`] instances +//! wrap a compiled component and implement [`crate::adapter::Adapter`]. + +use std::path::{Path, PathBuf}; + +use wasmtime::component::{Component, Linker}; +use wasmtime::{Config, Engine, Store}; + +use crate::adapter::{Adapter, AdapterConfig, AdapterSource}; +use crate::error::Error; +use crate::model::Artifact; + +// --------------------------------------------------------------------------- +// Configuration +// --------------------------------------------------------------------------- + +/// Resource limits for the WASM runtime. +#[derive(Debug, Clone)] +pub struct WasmRuntimeConfig { + /// Maximum linear memory (bytes). `None` means unlimited. + pub max_memory_bytes: Option, + /// Fuel limit for metering execution. `None` disables fuel metering. + pub fuel: Option, + /// Enable WASI preview-2 support for the guest. + pub wasi: bool, +} + +impl Default for WasmRuntimeConfig { + fn default() -> Self { + Self { + max_memory_bytes: Some(256 * 1024 * 1024), // 256 MiB + fuel: Some(1_000_000_000), // 1 billion ops + wasi: true, + } + } +} + +// --------------------------------------------------------------------------- +// Error types +// --------------------------------------------------------------------------- + +/// Errors specific to WASM adapter loading and execution. +#[derive(Debug, thiserror::Error)] +pub enum WasmError { + #[error("failed to create WASM engine: {0}")] + EngineCreation(String), + + #[error("failed to read component file '{path}': {source}")] + FileRead { + path: PathBuf, + source: std::io::Error, + }, + + #[error("failed to compile WASM component '{path}': {reason}")] + Compilation { path: PathBuf, reason: String }, + + #[error("failed to instantiate WASM component: {0}")] + Instantiation(String), + + #[error("WASM guest returned an error: {0}")] + Guest(String), + + #[error("type conversion error: {0}")] + Conversion(String), +} + +impl From for Error { + fn from(e: WasmError) -> Self { + Error::Adapter(e.to_string()) + } +} + +// --------------------------------------------------------------------------- +// Host state +// --------------------------------------------------------------------------- + +/// Per-instance host state passed into the wasmtime `Store`. +struct HostState { + /// WASI context for filesystem / stdio / clock access. + wasi: wasmtime_wasi::WasiCtx, + /// Resource table required by wasmtime-wasi. + table: wasmtime::component::ResourceTable, + /// Optional memory limiter for resource constraints. + limiter: Option, +} + +// Implement the WasiView trait so wasmtime-wasi can access its state. +impl wasmtime_wasi::WasiView for HostState { + fn ctx(&mut self) -> wasmtime_wasi::WasiCtxView<'_> { + wasmtime_wasi::WasiCtxView { + ctx: &mut self.wasi, + table: &mut self.table, + } + } +} + +// --------------------------------------------------------------------------- +// Runtime +// --------------------------------------------------------------------------- + +/// Shared WASM runtime that manages the engine and can load adapters. +/// +/// Create one `WasmAdapterRuntime` per application and use it to load +/// multiple adapter components. +pub struct WasmAdapterRuntime { + engine: Engine, + config: WasmRuntimeConfig, +} + +impl WasmAdapterRuntime { + /// Create a new runtime with the given configuration. + pub fn new(config: WasmRuntimeConfig) -> Result { + let mut engine_config = Config::new(); + engine_config.wasm_component_model(true); + + if config.fuel.is_some() { + engine_config.consume_fuel(true); + } + + let engine = + Engine::new(&engine_config).map_err(|e| WasmError::EngineCreation(e.to_string()))?; + + Ok(Self { engine, config }) + } + + /// Create a runtime with default configuration. + pub fn with_defaults() -> Result { + Self::new(WasmRuntimeConfig::default()) + } + + /// Load a WASM component from a file path and return a [`WasmAdapter`]. + pub fn load_adapter(&self, path: &Path) -> Result { + let bytes = std::fs::read(path).map_err(|e| WasmError::FileRead { + path: path.to_path_buf(), + source: e, + })?; + + let component = + Component::from_binary(&self.engine, &bytes).map_err(|e| WasmError::Compilation { + path: path.to_path_buf(), + reason: e.to_string(), + })?; + + Ok(WasmAdapter { + engine: self.engine.clone(), + component, + runtime_config: self.config.clone(), + path: path.to_path_buf(), + }) + } +} + +// --------------------------------------------------------------------------- +// WasmAdapter +// --------------------------------------------------------------------------- + +/// A single WASM adapter component that implements the `Adapter` trait. +/// +/// Each `WasmAdapter` holds a compiled [`Component`] and creates fresh +/// `Store` instances per call to ensure isolation between invocations. +pub struct WasmAdapter { + engine: Engine, + component: Component, + runtime_config: WasmRuntimeConfig, + path: PathBuf, +} + +impl std::fmt::Debug for WasmAdapter { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("WasmAdapter") + .field("path", &self.path) + .field("runtime_config", &self.runtime_config) + .finish_non_exhaustive() + } +} + +impl WasmAdapter { + /// Create a fresh wasmtime [`Store`] with WASI and resource limits. + fn create_store(&self) -> Result, WasmError> { + let wasi = wasmtime_wasi::WasiCtxBuilder::new() + .inherit_stderr() + .build(); + + let limiter = self + .runtime_config + .max_memory_bytes + .map(|max| MemoryLimiter { max_memory: max }); + + let state = HostState { + wasi, + table: wasmtime::component::ResourceTable::new(), + limiter, + }; + + let mut store = Store::new(&self.engine, state); + + // Apply fuel limit. + if let Some(fuel) = self.runtime_config.fuel { + store + .set_fuel(fuel) + .map_err(|e| WasmError::Instantiation(e.to_string()))?; + } + + // Apply memory limit. + if self.runtime_config.max_memory_bytes.is_some() { + store.limiter(|state| state.limiter.as_mut().unwrap()); + } + + Ok(store) + } + + /// Create a linker with WASI bindings added. + fn create_linker(&self) -> Result, WasmError> { + let mut linker = Linker::new(&self.engine); + wasmtime_wasi::p2::add_to_linker_sync(&mut linker) + .map_err(|e| WasmError::Instantiation(e.to_string()))?; + Ok(linker) + } + + /// Call the guest `id` function. + #[allow(dead_code)] + fn call_id(&self) -> Result { + let mut store = self.create_store()?; + let linker = self.create_linker()?; + let instance = linker + .instantiate(&mut store, &self.component) + .map_err(|e| WasmError::Instantiation(e.to_string()))?; + + // TODO: Use generated bindings from `wasmtime::component::bindgen!` + // once the WIT is finalized. For now, look up the function by name. + let func = instance + .get_func(&mut store, "id") + .ok_or_else(|| WasmError::Guest("adapter does not export 'id' function".into()))?; + + let mut results = [wasmtime::component::Val::String("".into())]; + func.call(&mut store, &[], &mut results) + .map_err(|e| WasmError::Guest(e.to_string()))?; + + match &results[0] { + wasmtime::component::Val::String(s) => Ok(s.to_string()), + other => Err(WasmError::Conversion(format!( + "expected string from id(), got {:?}", + other + ))), + } + } + + /// Call the guest `name` function. + #[allow(dead_code)] + fn call_name(&self) -> Result { + let mut store = self.create_store()?; + let linker = self.create_linker()?; + let instance = linker + .instantiate(&mut store, &self.component) + .map_err(|e| WasmError::Instantiation(e.to_string()))?; + + let func = instance + .get_func(&mut store, "name") + .ok_or_else(|| WasmError::Guest("adapter does not export 'name' function".into()))?; + + let mut results = [wasmtime::component::Val::String("".into())]; + func.call(&mut store, &[], &mut results) + .map_err(|e| WasmError::Guest(e.to_string()))?; + + match &results[0] { + wasmtime::component::Val::String(s) => Ok(s.to_string()), + other => Err(WasmError::Conversion(format!( + "expected string from name(), got {:?}", + other + ))), + } + } + + /// Call the guest `supported-types` function. + #[allow(dead_code)] + fn call_supported_types(&self) -> Result, WasmError> { + let mut store = self.create_store()?; + let linker = self.create_linker()?; + let instance = linker + .instantiate(&mut store, &self.component) + .map_err(|e| WasmError::Instantiation(e.to_string()))?; + + let func = instance + .get_func(&mut store, "supported-types") + .ok_or_else(|| { + WasmError::Guest("adapter does not export 'supported-types' function".into()) + })?; + + // TODO: Proper deserialization of list result via generated bindings. + // For now, return an empty list as a placeholder. + let _ = func; + log::debug!("supported-types: using placeholder (empty list)"); + Ok(vec![]) + } + + /// Call the guest `import` function. + /// + /// This reads source data into bytes, sends them to the WASM guest, and + /// converts the returned artifacts back into the host model. + fn call_import( + &self, + source: &AdapterSource, + config: &AdapterConfig, + ) -> Result, WasmError> { + let source_bytes = read_source_bytes(source) + .map_err(|e| WasmError::Guest(format!("failed to read adapter source: {e}")))?; + + let mut store = self.create_store()?; + let linker = self.create_linker()?; + let instance = linker + .instantiate(&mut store, &self.component) + .map_err(|e| WasmError::Instantiation(e.to_string()))?; + + let func = instance + .get_func(&mut store, "import") + .ok_or_else(|| WasmError::Guest("adapter does not export 'import' function".into()))?; + + // Build config entries as component values. + let config_entries: Vec<(String, String)> = config + .entries + .iter() + .map(|(k, v)| (k.clone(), v.clone())) + .collect(); + + // TODO: Build proper component-model values for the function arguments + // and parse the result, adapter-error> return type. + // This requires either `wasmtime::component::bindgen!` macro or manual + // Val construction matching the WIT types. + // + // Placeholder: log the call and return an error indicating this path + // is not yet fully wired up. + let _ = (func, source_bytes, config_entries); + Err(WasmError::Guest( + "WASM adapter import is not yet fully implemented — \ + the component was loaded and validated, but host-guest \ + data marshalling requires generated bindings" + .into(), + )) + } + + /// Call the guest `export` function. + fn call_export( + &self, + artifacts: &[Artifact], + config: &AdapterConfig, + ) -> Result, WasmError> { + let mut store = self.create_store()?; + let linker = self.create_linker()?; + let instance = linker + .instantiate(&mut store, &self.component) + .map_err(|e| WasmError::Instantiation(e.to_string()))?; + + let func = instance + .get_func(&mut store, "export") + .ok_or_else(|| WasmError::Guest("adapter does not export 'export' function".into()))?; + + // TODO: Convert host Artifact list to component-model values, + // invoke the function, and parse result, adapter-error>. + let _ = (func, artifacts, config); + Err(WasmError::Guest( + "WASM adapter export is not yet fully implemented — \ + the component was loaded and validated, but host-guest \ + data marshalling requires generated bindings" + .into(), + )) + } +} + +// --------------------------------------------------------------------------- +// Adapter trait implementation +// --------------------------------------------------------------------------- + +impl Adapter for WasmAdapter { + fn id(&self) -> &str { + // The Adapter trait returns `&str`, but we need to call into WASM + // each time. We use a leaked Box to produce a stable &str. + // In production this would be cached at construction time. + // + // For now, return the file stem as a fallback identifier so the + // adapter is usable even before full WASM calls are wired up. + // TODO: call self.call_id() and cache the result during construction. + let stem = self + .path + .file_stem() + .and_then(|s| s.to_str()) + .unwrap_or("wasm-adapter"); + // SAFETY: We leak a small string once per adapter load. In practice + // adapters are loaded once at startup, so this is acceptable. + Box::leak(stem.to_string().into_boxed_str()) + } + + fn name(&self) -> &str { + // Same strategy as id() — use path-based fallback. + let display = format!("WASM adapter ({})", self.path.display()); + Box::leak(display.into_boxed_str()) + } + + fn supported_types(&self) -> &[String] { + // TODO: Cache result of call_supported_types() during construction. + // Returning a static empty slice for now. + &[] + } + + fn import( + &self, + source: &AdapterSource, + config: &AdapterConfig, + ) -> Result, Error> { + self.call_import(source, config).map_err(Error::from) + } + + fn export(&self, artifacts: &[Artifact], config: &AdapterConfig) -> Result, Error> { + self.call_export(artifacts, config).map_err(Error::from) + } +} + +// --------------------------------------------------------------------------- +// Resource limiter +// --------------------------------------------------------------------------- + +/// Simple memory limiter for the WASM store. +struct MemoryLimiter { + max_memory: usize, +} + +impl wasmtime::ResourceLimiter for MemoryLimiter { + fn memory_growing( + &mut self, + _current: usize, + desired: usize, + _maximum: Option, + ) -> wasmtime::Result { + Ok(desired <= self.max_memory) + } + + fn table_growing( + &mut self, + _current: usize, + _desired: usize, + _maximum: Option, + ) -> wasmtime::Result { + Ok(true) + } +} + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +/// Read source data into a byte vector, regardless of the source variant. +fn read_source_bytes(source: &AdapterSource) -> Result, Error> { + match source { + AdapterSource::Bytes(bytes) => Ok(bytes.clone()), + AdapterSource::Path(path) => { + std::fs::read(path).map_err(|e| Error::Io(format!("{}: {}", path.display(), e))) + } + AdapterSource::Directory(dir) => { + // For directory sources, concatenate all files. + // A real implementation would pass file listings to the guest. + let mut combined = Vec::new(); + let entries = std::fs::read_dir(dir) + .map_err(|e| Error::Io(format!("{}: {}", dir.display(), e)))?; + for entry in entries { + let entry = entry.map_err(|e| Error::Io(e.to_string()))?; + let path = entry.path(); + if path.is_file() { + let bytes = std::fs::read(&path) + .map_err(|e| Error::Io(format!("{}: {}", path.display(), e)))?; + combined.extend(bytes); + } + } + Ok(combined) + } + } +} + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn default_config_has_sane_limits() { + let config = WasmRuntimeConfig::default(); + assert_eq!(config.max_memory_bytes, Some(256 * 1024 * 1024)); + assert_eq!(config.fuel, Some(1_000_000_000)); + assert!(config.wasi); + } + + #[test] + fn runtime_creation_succeeds() { + let runtime = WasmAdapterRuntime::with_defaults(); + assert!(runtime.is_ok(), "runtime creation should succeed"); + } + + #[test] + fn load_nonexistent_file_returns_error() { + let runtime = WasmAdapterRuntime::with_defaults().unwrap(); + let result = runtime.load_adapter(Path::new("/nonexistent/adapter.wasm")); + assert!(result.is_err()); + match result.unwrap_err() { + WasmError::FileRead { path, .. } => { + assert_eq!(path, Path::new("/nonexistent/adapter.wasm")); + } + other => panic!("expected FileRead error, got: {other}"), + } + } + + #[test] + fn load_invalid_wasm_returns_compilation_error() { + let runtime = WasmAdapterRuntime::with_defaults().unwrap(); + // Write garbage bytes to a temp file + let dir = std::env::temp_dir().join("rivet-wasm-test"); + std::fs::create_dir_all(&dir).unwrap(); + let path = dir.join("bad.wasm"); + std::fs::write(&path, b"not a wasm component").unwrap(); + + let result = runtime.load_adapter(&path); + assert!(result.is_err()); + match result.unwrap_err() { + WasmError::Compilation { path: p, .. } => { + assert_eq!(p, path); + } + other => panic!("expected Compilation error, got: {other}"), + } + + // Clean up + let _ = std::fs::remove_dir_all(&dir); + } + + #[test] + fn wasm_error_converts_to_core_error() { + let wasm_err = WasmError::Guest("test error".into()); + let core_err: Error = wasm_err.into(); + match core_err { + Error::Adapter(msg) => assert!(msg.contains("test error")), + other => panic!("expected Adapter error, got: {other:?}"), + } + } +} diff --git a/rivet-core/tests/integration.rs b/rivet-core/tests/integration.rs index 5484274..ca93bc4 100644 --- a/rivet-core/tests/integration.rs +++ b/rivet-core/tests/integration.rs @@ -7,11 +7,13 @@ use std::collections::BTreeMap; use std::path::PathBuf; use rivet_core::adapter::{Adapter, AdapterConfig, AdapterSource}; +use rivet_core::diff::{ArtifactDiff, DiagnosticDiff}; use rivet_core::formats::generic::GenericYamlAdapter; use rivet_core::links::LinkGraph; use rivet_core::matrix::{self, Direction}; use rivet_core::model::{Artifact, Link}; use rivet_core::query::{self, Query}; +use rivet_core::reqif::ReqIfAdapter; use rivet_core::schema::{Schema, Severity}; use rivet_core::store::Store; use rivet_core::validate; @@ -223,13 +225,13 @@ fn test_schema_merge_preserves_types() { "sw-req", "sw-arch-component", "sw-detail-design", - "unit-test", - "integration-test", - "sw-qual-test", - "sys-integration-test", - "sys-qual-test", - "test-execution", - "test-verdict", + "unit-verification", + "sw-integration-verification", + "sw-verification", + "sys-integration-verification", + "sys-verification", + "verification-execution", + "verification-verdict", ]; for t in &aspice_types { assert!( @@ -269,6 +271,48 @@ fn test_schema_merge_preserves_types() { assert_eq!(schema.inverse_of("result-of"), Some("has-result")); } +// ── Cybersecurity schema merge ─────────────────────────────────────────── + +/// The cybersecurity schema loads and merges with common + aspice. +#[test] +fn test_cybersecurity_schema_merge() { + let schema = load_schema_files(&["common", "aspice", "cybersecurity"]); + + // Cybersecurity types + let sec_types = [ + "asset", + "threat-scenario", + "risk-assessment", + "cybersecurity-goal", + "cybersecurity-req", + "cybersecurity-design", + "cybersecurity-implementation", + "cybersecurity-verification", + ]; + for t in &sec_types { + assert!( + schema.artifact_type(t).is_some(), + "merged schema must contain cybersecurity type '{t}'" + ); + } + + // Cybersecurity link types + assert!(schema.link_type("threatens").is_some()); + assert!(schema.link_type("assesses").is_some()); + + // Inverse mappings + assert_eq!(schema.inverse_of("threatens"), Some("threatened-by")); + assert_eq!(schema.inverse_of("assesses"), Some("assessed-by")); + + // ASPICE types still present + assert!(schema.artifact_type("sw-req").is_some()); + assert!(schema.artifact_type("unit-verification").is_some()); + + // Common link types still present + assert!(schema.link_type("mitigates").is_some()); + assert!(schema.link_type("verifies").is_some()); +} + // ── Traceability matrix ───────────────────────────────────────────────── /// Build a store with known artifacts and links, compute matrix, verify coverage. @@ -644,3 +688,403 @@ fn test_store_upsert_type_change() { assert_eq!(store.by_type("hazard").len(), 1); assert_eq!(store.len(), 1); } + +// ── ReqIF roundtrip ───────────────────────────────────────────────────── + +/// Create artifacts with links and fields, export to ReqIF XML, reimport, +/// verify that all data survives the round-trip. +#[test] +fn test_reqif_roundtrip() { + let original = vec![ + make_artifact_full( + "REQ-001", + "requirement", + "Memory isolation requirement", + Some("approved"), + &["safety", "core"], + vec![], + { + let mut f = BTreeMap::new(); + f.insert("priority".into(), serde_yaml::Value::String("must".into())); + f + }, + ), + make_artifact_full( + "REQ-002", + "requirement", + "Access control", + Some("draft"), + &["security"], + vec![Link { + link_type: "derives-from".into(), + target: "REQ-001".into(), + }], + BTreeMap::new(), + ), + make_artifact_full( + "TC-001", + "test-case", + "Verify memory isolation", + None, + &[], + vec![Link { + link_type: "verifies".into(), + target: "REQ-001".into(), + }], + BTreeMap::new(), + ), + ]; + + let adapter = ReqIfAdapter::new(); + let config = AdapterConfig::default(); + + // Export + let xml_bytes = adapter.export(&original, &config).expect("export to ReqIF"); + let xml_str = std::str::from_utf8(&xml_bytes).expect("valid utf-8"); + + // Verify XML structure + assert!( + xml_str.contains(""), + "must have XML declaration" + ); + assert!(xml_str.contains("REQ-IF"), "must have REQ-IF root element"); + assert!( + xml_str.contains("THE-HEADER"), + "must have THE-HEADER element" + ); + assert!( + xml_str.contains("SPEC-OBJECTS"), + "must have SPEC-OBJECTS element" + ); + assert!( + xml_str.contains("SPEC-RELATIONS"), + "must have SPEC-RELATIONS element" + ); + assert!( + xml_str.contains("SPEC-OBJECT-TYPE"), + "must have SPEC-OBJECT-TYPE" + ); + assert!( + xml_str.contains("SPEC-RELATION-TYPE"), + "must have SPEC-RELATION-TYPE" + ); + assert!( + xml_str.contains("http://www.omg.org/spec/ReqIF/20110401/reqif.xsd"), + "must use ReqIF 1.2 namespace" + ); + assert!( + xml_str.contains("DATATYPE-DEFINITION-STRING"), + "must have DATATYPES" + ); + + // Reimport + let reimported = adapter + .import(&AdapterSource::Bytes(xml_bytes), &config) + .expect("reimport from ReqIF"); + + assert_eq!( + reimported.len(), + original.len(), + "artifact count must match" + ); + + for (orig, re) in original.iter().zip(reimported.iter()) { + assert_eq!(orig.id, re.id, "id mismatch for {}", orig.id); + assert_eq!( + orig.artifact_type, re.artifact_type, + "artifact_type mismatch for {}", + orig.id + ); + assert_eq!(orig.title, re.title, "title mismatch for {}", orig.id); + assert_eq!( + orig.description, re.description, + "description mismatch for {}", + orig.id + ); + assert_eq!(orig.status, re.status, "status mismatch for {}", orig.id); + assert_eq!(orig.tags, re.tags, "tags mismatch for {}", orig.id); + assert_eq!( + orig.links.len(), + re.links.len(), + "links count mismatch for {}", + orig.id + ); + for (ol, rl) in orig.links.iter().zip(re.links.iter()) { + assert_eq!(ol.link_type, rl.link_type, "link_type mismatch"); + assert_eq!(ol.target, rl.target, "link target mismatch"); + } + assert_eq!(orig.fields, re.fields, "fields mismatch for {}", orig.id); + } +} + +/// Verify that ReqIF-exported artifacts can be loaded into a Store and +/// participate in link-graph analysis. +#[test] +fn test_reqif_store_integration() { + let artifacts = vec![ + make_artifact_full( + "SYS-001", + "system-req", + "System requirement", + Some("approved"), + &[], + vec![], + BTreeMap::new(), + ), + make_artifact_full( + "SW-001", + "sw-req", + "Software requirement", + Some("approved"), + &[], + vec![Link { + link_type: "derives-from".into(), + target: "SYS-001".into(), + }], + BTreeMap::new(), + ), + ]; + + let adapter = ReqIfAdapter::new(); + let config = AdapterConfig::default(); + + // Export then reimport via ReqIF. + let xml_bytes = adapter.export(&artifacts, &config).expect("export"); + let reimported = adapter + .import(&AdapterSource::Bytes(xml_bytes), &config) + .expect("reimport"); + + // Load into a store. + let mut store = Store::new(); + for a in reimported { + store.upsert(a); + } + + assert_eq!(store.len(), 2); + assert!(store.contains("SYS-001")); + assert!(store.contains("SW-001")); + + let sw = store.get("SW-001").unwrap(); + assert_eq!(sw.links.len(), 1); + assert_eq!(sw.links[0].link_type, "derives-from"); + assert_eq!(sw.links[0].target, "SYS-001"); +} + +// ── Diff: identical stores ────────────────────────────────────────────── + +/// Two identical stores should produce an empty diff. +#[test] +fn test_diff_identical_stores() { + let mut base = Store::new(); + base.insert(make_artifact("D-1", "loss", "Loss one")) + .unwrap(); + base.insert(make_artifact("D-2", "hazard", "Hazard one")) + .unwrap(); + + let mut head = Store::new(); + head.insert(make_artifact("D-1", "loss", "Loss one")) + .unwrap(); + head.insert(make_artifact("D-2", "hazard", "Hazard one")) + .unwrap(); + + let diff = ArtifactDiff::compute(&base, &head); + assert!(diff.is_empty(), "identical stores must produce empty diff"); + assert_eq!(diff.unchanged, 2); + assert_eq!( + diff.summary(), + "0 added, 0 removed, 0 modified, 2 unchanged" + ); +} + +// ── Diff: added artifact ──────────────────────────────────────────────── + +/// An artifact present in head but not in base should appear as added. +#[test] +fn test_diff_added_artifact() { + let mut base = Store::new(); + base.insert(make_artifact("D-1", "loss", "Loss one")) + .unwrap(); + + let mut head = Store::new(); + head.insert(make_artifact("D-1", "loss", "Loss one")) + .unwrap(); + head.insert(make_artifact("D-2", "hazard", "Hazard new")) + .unwrap(); + + let diff = ArtifactDiff::compute(&base, &head); + assert!(!diff.is_empty()); + assert_eq!(diff.added, vec!["D-2".to_string()]); + assert!(diff.removed.is_empty()); + assert!(diff.modified.is_empty()); + assert_eq!(diff.unchanged, 1); +} + +// ── Diff: removed artifact ────────────────────────────────────────────── + +/// An artifact present in base but not in head should appear as removed. +#[test] +fn test_diff_removed_artifact() { + let mut base = Store::new(); + base.insert(make_artifact("D-1", "loss", "Loss one")) + .unwrap(); + base.insert(make_artifact("D-2", "hazard", "Hazard one")) + .unwrap(); + + let mut head = Store::new(); + head.insert(make_artifact("D-1", "loss", "Loss one")) + .unwrap(); + + let diff = ArtifactDiff::compute(&base, &head); + assert!(!diff.is_empty()); + assert!(diff.added.is_empty()); + assert_eq!(diff.removed, vec!["D-2".to_string()]); + assert!(diff.modified.is_empty()); + assert_eq!(diff.unchanged, 1); +} + +// ── Diff: modified artifact (title, status, links, fields) ────────────── + +/// Artifacts that exist in both stores but differ structurally should appear +/// as modified with all changed fields recorded. +#[test] +fn test_diff_modified_artifact() { + let mut base = Store::new(); + base.insert(make_artifact_full( + "M-1", + "requirement", + "Old title", + Some("draft"), + &["safety"], + vec![Link { + link_type: "satisfies".into(), + target: "M-2".into(), + }], + { + let mut f = BTreeMap::new(); + f.insert( + "priority".into(), + serde_yaml::Value::String("should".into()), + ); + f + }, + )) + .unwrap(); + + let mut head = Store::new(); + head.insert(make_artifact_full( + "M-1", + "requirement", + "New title", + Some("approved"), + &["safety", "core"], + vec![ + Link { + link_type: "satisfies".into(), + target: "M-2".into(), + }, + Link { + link_type: "derives-from".into(), + target: "M-3".into(), + }, + ], + { + let mut f = BTreeMap::new(); + f.insert("priority".into(), serde_yaml::Value::String("must".into())); + f + }, + )) + .unwrap(); + + let diff = ArtifactDiff::compute(&base, &head); + assert!(!diff.is_empty()); + assert!(diff.added.is_empty()); + assert!(diff.removed.is_empty()); + assert_eq!(diff.modified.len(), 1); + + let change = &diff.modified[0]; + assert_eq!(change.id, "M-1"); + + // Title changed + assert_eq!( + change.title_changed, + Some(("Old title".into(), "New title".into())) + ); + + // Status changed + assert_eq!( + change.status_changed, + Some((Some("draft".into()), Some("approved".into()))) + ); + + // Tags: "core" added, nothing removed + assert_eq!(change.tags_added, vec!["core".to_string()]); + assert!(change.tags_removed.is_empty()); + + // Links: derives-from -> M-3 added, nothing removed + assert_eq!(change.links_added.len(), 1); + assert_eq!(change.links_added[0].link_type, "derives-from"); + assert_eq!(change.links_added[0].target, "M-3"); + assert!(change.links_removed.is_empty()); + + // Fields: priority changed + assert_eq!(change.fields_changed, vec!["priority".to_string()]); + + // Description unchanged (both have one via make_artifact_full) + assert!(!change.description_changed); +} + +// ── Diff: diagnostic changes ──────────────────────────────────────────── + +/// Diagnostics that appear only in head are "new"; those only in base are +/// "resolved". +#[test] +fn test_diff_diagnostic_changes() { + let base_diags = vec![ + validate::Diagnostic { + severity: Severity::Error, + artifact_id: Some("X-1".into()), + rule: "broken-link".into(), + message: "link target missing".into(), + }, + validate::Diagnostic { + severity: Severity::Warning, + artifact_id: Some("X-2".into()), + rule: "allowed-values".into(), + message: "bad value".into(), + }, + ]; + + let head_diags = vec![ + // The error on X-1 is resolved (not present in head) + // A new error appears on X-3 + validate::Diagnostic { + severity: Severity::Error, + artifact_id: Some("X-3".into()), + rule: "required-field".into(), + message: "missing field".into(), + }, + // The warning on X-2 persists + validate::Diagnostic { + severity: Severity::Warning, + artifact_id: Some("X-2".into()), + rule: "allowed-values".into(), + message: "bad value".into(), + }, + ]; + + let ddiff = DiagnosticDiff::compute(&base_diags, &head_diags); + + assert_eq!(ddiff.new_errors.len(), 1); + assert_eq!(ddiff.new_errors[0].artifact_id.as_deref(), Some("X-3")); + + assert_eq!(ddiff.resolved_errors.len(), 1); + assert_eq!(ddiff.resolved_errors[0].artifact_id.as_deref(), Some("X-1")); + + assert!(ddiff.new_warnings.is_empty()); + assert!(ddiff.resolved_warnings.is_empty()); + + assert_eq!( + ddiff.summary(), + "1 new errors, 1 resolved errors, 0 new warnings, 0 resolved warnings" + ); +} diff --git a/rivet-core/tests/oslc_integration.rs b/rivet-core/tests/oslc_integration.rs new file mode 100644 index 0000000..46ca018 --- /dev/null +++ b/rivet-core/tests/oslc_integration.rs @@ -0,0 +1,872 @@ +//! Integration tests for the OSLC client module. +//! +//! These tests use `wiremock` to spin up a local mock HTTP server that +//! simulates an OSLC-compliant ALM tool. Each test exercises a different +//! aspect of the OSLC protocol: catalog discovery, query, CRUD operations, +//! pull via the sync adapter, and error handling. + +#![cfg(feature = "oslc")] + +use serde_json::json; +use wiremock::matchers::{header, method, path, query_param}; +use wiremock::{Mock, MockServer, ResponseTemplate}; + +use rivet_core::oslc::{OslcClient, OslcClientConfig, OslcSyncAdapter, SyncAdapter}; + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +/// Build an `OslcClient` pointed at the given mock server base URL. +fn client_for(base_url: &str) -> OslcClient { + let config = OslcClientConfig::new(base_url); + OslcClient::new(config).expect("client creation should succeed") +} + +/// A realistic OSLC Service Provider Catalog in JSON-LD. +fn catalog_json(base: &str) -> serde_json::Value { + json!({ + "@id": format!("{base}/catalog"), + "dcterms:title": "Rivet Test Catalog", + "dcterms:description": "Mock OSLC catalog for integration testing", + "service_providers": [ + { + "@id": format!("{base}/sp/project-alpha"), + "dcterms:title": "Project Alpha", + "services": [ + { + "domain": "http://open-services.net/ns/rm#", + "query_capabilities": [ + { + "dcterms:title": "Requirement Query Capability", + "query_base": format!("{base}/rm/query"), + "resource_types": [ + "http://open-services.net/ns/rm#Requirement" + ] + } + ], + "creation_factories": [ + { + "dcterms:title": "Requirement Creation Factory", + "creation": format!("{base}/rm/create"), + "resource_types": [ + "http://open-services.net/ns/rm#Requirement" + ] + } + ] + }, + { + "domain": "http://open-services.net/ns/qm#", + "query_capabilities": [ + { + "dcterms:title": "TestCase Query Capability", + "query_base": format!("{base}/qm/query"), + "resource_types": [ + "http://open-services.net/ns/qm#TestCase" + ] + } + ], + "creation_factories": [] + } + ] + } + ] + }) +} + +/// A single OSLC Requirement resource in JSON-LD. +fn requirement_json(base: &str, id: &str, title: &str, desc: &str) -> serde_json::Value { + json!({ + "@context": { + "dcterms": "http://purl.org/dc/terms/", + "oslc": "http://open-services.net/ns/core#", + "oslc_rm": "http://open-services.net/ns/rm#" + }, + "@id": format!("{base}/rm/resources/{id}"), + "@type": ["http://open-services.net/ns/rm#Requirement"], + "dcterms:identifier": id, + "dcterms:title": title, + "dcterms:description": desc, + "oslc_rm:elaboratedBy": [ + { "@id": format!("{base}/rm/resources/ELAB-001") } + ], + "oslc_rm:satisfiedBy": [], + "oslc_rm:trackedBy": [] + }) +} + +/// A query response containing multiple requirement members. +fn query_response_json(base: &str) -> serde_json::Value { + json!({ + "total_count": 2, + "next_page": null, + "members": [ + { + "@context": { + "dcterms": "http://purl.org/dc/terms/", + "oslc_rm": "http://open-services.net/ns/rm#" + }, + "@id": format!("{base}/rm/resources/REQ-001"), + "@type": ["http://open-services.net/ns/rm#Requirement"], + "dcterms:identifier": "REQ-001", + "dcterms:title": "Braking System Safety", + "dcterms:description": "The braking system shall ensure safe deceleration under all conditions." + }, + { + "@context": { + "dcterms": "http://purl.org/dc/terms/", + "oslc_rm": "http://open-services.net/ns/rm#" + }, + "@id": format!("{base}/rm/resources/REQ-002"), + "@type": ["http://open-services.net/ns/rm#Requirement"], + "dcterms:identifier": "REQ-002", + "dcterms:title": "Steering Redundancy", + "dcterms:description": "The steering system shall provide redundant control paths." + } + ] + }) +} + +// --------------------------------------------------------------------------- +// Discovery: Service Provider Catalog +// --------------------------------------------------------------------------- + +#[tokio::test] +async fn test_discover_service_provider_catalog() { + let mock_server = MockServer::start().await; + let base = mock_server.uri(); + + Mock::given(method("GET")) + .and(path("/")) + .and(header("Accept", "application/ld+json")) + .and(header("OSLC-Core-Version", "2.0")) + .respond_with(ResponseTemplate::new(200).set_body_json(catalog_json(&base))) + .expect(1) + .mount(&mock_server) + .await; + + let client = client_for(&base); + let catalog = client.discover().await.expect("discovery should succeed"); + + assert_eq!(catalog.title.as_deref(), Some("Rivet Test Catalog")); + assert_eq!(catalog.service_providers.len(), 1); + + let sp = &catalog.service_providers[0]; + assert_eq!(sp.title.as_deref(), Some("Project Alpha")); + assert_eq!(sp.services.len(), 2); + + // RM service + let rm_svc = &sp.services[0]; + assert_eq!( + rm_svc.domain.as_deref(), + Some("http://open-services.net/ns/rm#") + ); + assert_eq!(rm_svc.query_capabilities.len(), 1); + assert_eq!(rm_svc.creation_factories.len(), 1); + assert!( + rm_svc.query_capabilities[0] + .query_base + .as_ref() + .unwrap() + .ends_with("/rm/query") + ); + + // QM service + let qm_svc = &sp.services[1]; + assert_eq!( + qm_svc.domain.as_deref(), + Some("http://open-services.net/ns/qm#") + ); + assert_eq!(qm_svc.query_capabilities.len(), 1); +} + +// --------------------------------------------------------------------------- +// Query: list requirements +// --------------------------------------------------------------------------- + +#[tokio::test] +async fn test_query_requirements() { + let mock_server = MockServer::start().await; + let base = mock_server.uri(); + + Mock::given(method("GET")) + .and(path("/rm/query")) + .and(header("Accept", "application/ld+json")) + .respond_with(ResponseTemplate::new(200).set_body_json(query_response_json(&base))) + .expect(1) + .mount(&mock_server) + .await; + + let client = client_for(&base); + let query_url = format!("{base}/rm/query"); + let response = client + .query(&query_url, "", "") + .await + .expect("query should succeed"); + + assert_eq!(response.total_count, Some(2)); + assert!(response.next_page.is_none()); + assert_eq!(response.members.len(), 2); + + // Verify first member has expected identifier + let first = &response.members[0]; + assert_eq!(first["dcterms:identifier"].as_str(), Some("REQ-001")); + assert_eq!( + first["dcterms:title"].as_str(), + Some("Braking System Safety") + ); +} + +#[tokio::test] +async fn test_query_with_where_and_select() { + let mock_server = MockServer::start().await; + let base = mock_server.uri(); + + // Expect the query to include oslc.where and oslc.select parameters + Mock::given(method("GET")) + .and(path("/rm/query")) + .and(query_param("oslc.where", "dcterms:identifier=\"REQ-001\"")) + .and(query_param( + "oslc.select", + "dcterms:title,dcterms:description", + )) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "total_count": 1, + "members": [ + { + "@type": ["http://open-services.net/ns/rm#Requirement"], + "dcterms:identifier": "REQ-001", + "dcterms:title": "Braking System Safety", + "dcterms:description": "Safe braking under all conditions." + } + ] + }))) + .expect(1) + .mount(&mock_server) + .await; + + let client = client_for(&base); + let query_url = format!("{base}/rm/query"); + let response = client + .query( + &query_url, + "dcterms:identifier=\"REQ-001\"", + "dcterms:title,dcterms:description", + ) + .await + .expect("filtered query should succeed"); + + assert_eq!(response.total_count, Some(1)); + assert_eq!(response.members.len(), 1); + assert_eq!( + response.members[0]["dcterms:identifier"].as_str(), + Some("REQ-001") + ); +} + +// --------------------------------------------------------------------------- +// GET single resource +// --------------------------------------------------------------------------- + +#[tokio::test] +async fn test_get_single_resource() { + let mock_server = MockServer::start().await; + let base = mock_server.uri(); + + let req_json = requirement_json( + &base, + "REQ-042", + "Thermal Protection", + "The system shall withstand temperatures up to 85C.", + ); + + Mock::given(method("GET")) + .and(path("/rm/resources/REQ-042")) + .and(header("Accept", "application/ld+json")) + .and(header("OSLC-Core-Version", "2.0")) + .respond_with(ResponseTemplate::new(200).set_body_json(req_json)) + .expect(1) + .mount(&mock_server) + .await; + + let client = client_for(&base); + let resource_url = format!("{base}/rm/resources/REQ-042"); + let value = client + .get_resource(&resource_url) + .await + .expect("GET resource should succeed"); + + assert_eq!(value["dcterms:identifier"].as_str(), Some("REQ-042")); + assert_eq!(value["dcterms:title"].as_str(), Some("Thermal Protection")); + assert_eq!( + value["@type"][0].as_str(), + Some("http://open-services.net/ns/rm#Requirement") + ); + // Verify the elaboratedBy link is present + let elab = &value["oslc_rm:elaboratedBy"]; + assert!(elab.is_array()); + assert_eq!( + elab[0]["@id"].as_str().unwrap(), + format!("{base}/rm/resources/ELAB-001") + ); +} + +// --------------------------------------------------------------------------- +// CREATE resource (POST) +// --------------------------------------------------------------------------- + +#[tokio::test] +async fn test_create_resource() { + let mock_server = MockServer::start().await; + let base = mock_server.uri(); + + let new_req = json!({ + "@context": { + "dcterms": "http://purl.org/dc/terms/", + "oslc_rm": "http://open-services.net/ns/rm#" + }, + "@type": ["http://open-services.net/ns/rm#Requirement"], + "dcterms:identifier": "REQ-NEW-001", + "dcterms:title": "New Requirement via OSLC", + "dcterms:description": "Created through the OSLC creation factory." + }); + + let created_response = json!({ + "@context": { + "dcterms": "http://purl.org/dc/terms/", + "oslc_rm": "http://open-services.net/ns/rm#" + }, + "@id": format!("{base}/rm/resources/REQ-NEW-001"), + "@type": ["http://open-services.net/ns/rm#Requirement"], + "dcterms:identifier": "REQ-NEW-001", + "dcterms:title": "New Requirement via OSLC", + "dcterms:description": "Created through the OSLC creation factory." + }); + + Mock::given(method("POST")) + .and(path("/rm/create")) + .and(header("Content-Type", "application/ld+json")) + .and(header("OSLC-Core-Version", "2.0")) + .respond_with( + ResponseTemplate::new(201) + .append_header("Location", format!("{base}/rm/resources/REQ-NEW-001")) + .set_body_json(created_response), + ) + .expect(1) + .mount(&mock_server) + .await; + + let client = client_for(&base); + let factory_url = format!("{base}/rm/create"); + let result = client + .create_resource(&factory_url, &new_req) + .await + .expect("POST create should succeed"); + + assert_eq!(result["dcterms:identifier"].as_str(), Some("REQ-NEW-001")); + assert_eq!( + result["@id"].as_str(), + Some(format!("{base}/rm/resources/REQ-NEW-001").as_str()) + ); +} + +// --------------------------------------------------------------------------- +// UPDATE resource (PUT) +// --------------------------------------------------------------------------- + +#[tokio::test] +async fn test_update_resource() { + let mock_server = MockServer::start().await; + let base = mock_server.uri(); + + let updated_req = json!({ + "@context": { + "dcterms": "http://purl.org/dc/terms/", + "oslc_rm": "http://open-services.net/ns/rm#" + }, + "@id": format!("{base}/rm/resources/REQ-001"), + "@type": ["http://open-services.net/ns/rm#Requirement"], + "dcterms:identifier": "REQ-001", + "dcterms:title": "Braking System Safety (Revised)", + "dcterms:description": "Updated description with tighter constraints." + }); + + let response_body = updated_req.clone(); + + Mock::given(method("PUT")) + .and(path("/rm/resources/REQ-001")) + .and(header("Content-Type", "application/ld+json")) + .and(header("OSLC-Core-Version", "2.0")) + .respond_with(ResponseTemplate::new(200).set_body_json(response_body)) + .expect(1) + .mount(&mock_server) + .await; + + let client = client_for(&base); + let resource_url = format!("{base}/rm/resources/REQ-001"); + let result = client + .update_resource(&resource_url, &updated_req) + .await + .expect("PUT update should succeed"); + + assert_eq!( + result["dcterms:title"].as_str(), + Some("Braking System Safety (Revised)") + ); + assert_eq!( + result["dcterms:description"].as_str(), + Some("Updated description with tighter constraints.") + ); +} + +// --------------------------------------------------------------------------- +// Pull via SyncAdapter +// --------------------------------------------------------------------------- + +#[tokio::test] +async fn test_pull_converts_to_artifacts() { + let mock_server = MockServer::start().await; + let base = mock_server.uri(); + + Mock::given(method("GET")) + .and(path("/rm/query")) + .respond_with(ResponseTemplate::new(200).set_body_json(query_response_json(&base))) + .expect(1) + .mount(&mock_server) + .await; + + let config = OslcClientConfig::new(&base); + let adapter = OslcSyncAdapter::from_config(config).expect("adapter creation should succeed"); + + let query_url = format!("{base}/rm/query"); + let artifacts = adapter.pull(&query_url).await.expect("pull should succeed"); + + assert_eq!(artifacts.len(), 2); + + // First artifact + assert_eq!(artifacts[0].id, "REQ-001"); + assert_eq!(artifacts[0].artifact_type, "requirement"); + assert_eq!(artifacts[0].title, "Braking System Safety"); + assert_eq!( + artifacts[0].description.as_deref(), + Some("The braking system shall ensure safe deceleration under all conditions.") + ); + + // Second artifact + assert_eq!(artifacts[1].id, "REQ-002"); + assert_eq!(artifacts[1].artifact_type, "requirement"); + assert_eq!(artifacts[1].title, "Steering Redundancy"); +} + +#[tokio::test] +async fn test_pull_with_mixed_resource_types() { + let mock_server = MockServer::start().await; + let base = mock_server.uri(); + + let mixed_response = json!({ + "total_count": 3, + "members": [ + { + "@type": ["http://open-services.net/ns/rm#Requirement"], + "dcterms:identifier": "REQ-010", + "dcterms:title": "Sensor Accuracy" + }, + { + "@type": ["http://open-services.net/ns/qm#TestCase"], + "dcterms:identifier": "TC-010", + "dcterms:title": "Sensor Accuracy Verification", + "oslc_qm:validatesRequirement": [ + { "@id": format!("{base}/rm/resources/REQ-010") } + ] + }, + { + "@type": ["http://open-services.net/ns/cm#ChangeRequest"], + "dcterms:identifier": "CR-010", + "dcterms:title": "Calibrate sensor threshold", + "dcterms:description": "Adjust the threshold for the proximity sensor.", + "oslc_cm:status": "in-progress", + "oslc_cm:implementsRequirement": [ + { "@id": format!("{base}/rm/resources/REQ-010") } + ] + } + ] + }); + + Mock::given(method("GET")) + .and(path("/mixed/query")) + .respond_with(ResponseTemplate::new(200).set_body_json(mixed_response)) + .expect(1) + .mount(&mock_server) + .await; + + let config = OslcClientConfig::new(&base); + let adapter = OslcSyncAdapter::from_config(config).expect("adapter creation should succeed"); + + let query_url = format!("{base}/mixed/query"); + let artifacts = adapter.pull(&query_url).await.expect("pull should succeed"); + + assert_eq!(artifacts.len(), 3); + + // Requirement + assert_eq!(artifacts[0].artifact_type, "requirement"); + assert_eq!(artifacts[0].id, "REQ-010"); + + // TestCase + assert_eq!(artifacts[1].artifact_type, "test-case"); + assert_eq!(artifacts[1].id, "TC-010"); + assert_eq!(artifacts[1].links.len(), 1); + assert_eq!(artifacts[1].links[0].link_type, "validates"); + assert_eq!(artifacts[1].links[0].target, "REQ-010"); + + // ChangeRequest + assert_eq!(artifacts[2].artifact_type, "change-request"); + assert_eq!(artifacts[2].id, "CR-010"); + assert_eq!(artifacts[2].status.as_deref(), Some("in-progress")); + assert_eq!(artifacts[2].links.len(), 1); + assert_eq!(artifacts[2].links[0].link_type, "implements"); +} + +// --------------------------------------------------------------------------- +// Error handling: HTTP errors +// --------------------------------------------------------------------------- + +#[tokio::test] +async fn test_error_404_not_found() { + let mock_server = MockServer::start().await; + let base = mock_server.uri(); + + Mock::given(method("GET")) + .and(path("/rm/resources/NONEXISTENT")) + .respond_with(ResponseTemplate::new(404)) + .expect(1) + .mount(&mock_server) + .await; + + let client = client_for(&base); + let url = format!("{base}/rm/resources/NONEXISTENT"); + let result = client.get_resource(&url).await; + + assert!(result.is_err()); + let err_msg = format!("{}", result.unwrap_err()); + assert!( + err_msg.contains("404"), + "error message should mention 404: {err_msg}" + ); +} + +#[tokio::test] +async fn test_error_500_internal_server_error() { + let mock_server = MockServer::start().await; + let base = mock_server.uri(); + + Mock::given(method("GET")) + .and(path("/rm/query")) + .respond_with(ResponseTemplate::new(500)) + .expect(1) + .mount(&mock_server) + .await; + + let client = client_for(&base); + let query_url = format!("{base}/rm/query"); + let result = client.query(&query_url, "", "").await; + + assert!(result.is_err()); + let err_msg = format!("{}", result.unwrap_err()); + assert!( + err_msg.contains("500"), + "error message should mention 500: {err_msg}" + ); +} + +#[tokio::test] +async fn test_error_malformed_json_response() { + let mock_server = MockServer::start().await; + let base = mock_server.uri(); + + Mock::given(method("GET")) + .and(path("/rm/resources/BAD")) + .respond_with( + ResponseTemplate::new(200) + .set_body_string("this is not valid JSON {{{") + .append_header("Content-Type", "application/ld+json"), + ) + .expect(1) + .mount(&mock_server) + .await; + + let client = client_for(&base); + let url = format!("{base}/rm/resources/BAD"); + let result = client.get_resource(&url).await; + + assert!(result.is_err()); + let err_msg = format!("{}", result.unwrap_err()); + assert!( + err_msg.contains("failed to parse"), + "error message should mention parse failure: {err_msg}" + ); +} + +#[tokio::test] +async fn test_error_malformed_catalog() { + let mock_server = MockServer::start().await; + let base = mock_server.uri(); + + Mock::given(method("GET")) + .and(path("/")) + .respond_with( + ResponseTemplate::new(200) + .set_body_string("{not valid json at all") + .append_header("Content-Type", "application/ld+json"), + ) + .expect(1) + .mount(&mock_server) + .await; + + let client = client_for(&base); + let result = client.discover().await; + + assert!(result.is_err()); + let err_msg = format!("{}", result.unwrap_err()); + assert!( + err_msg.contains("failed to parse catalog"), + "error should mention catalog parse failure: {err_msg}" + ); +} + +#[tokio::test] +async fn test_error_create_returns_server_error() { + let mock_server = MockServer::start().await; + let base = mock_server.uri(); + + Mock::given(method("POST")) + .and(path("/rm/create")) + .respond_with(ResponseTemplate::new(500)) + .expect(1) + .mount(&mock_server) + .await; + + let client = client_for(&base); + let factory_url = format!("{base}/rm/create"); + let body = json!({ + "@type": ["http://open-services.net/ns/rm#Requirement"], + "dcterms:identifier": "REQ-FAIL", + "dcterms:title": "Should fail" + }); + + let result = client.create_resource(&factory_url, &body).await; + assert!(result.is_err()); + let err_msg = format!("{}", result.unwrap_err()); + assert!( + err_msg.contains("500"), + "error message should mention 500: {err_msg}" + ); +} + +#[tokio::test] +async fn test_error_update_returns_404() { + let mock_server = MockServer::start().await; + let base = mock_server.uri(); + + Mock::given(method("PUT")) + .and(path("/rm/resources/GONE")) + .respond_with(ResponseTemplate::new(404)) + .expect(1) + .mount(&mock_server) + .await; + + let client = client_for(&base); + let url = format!("{base}/rm/resources/GONE"); + let body = json!({ + "@type": ["http://open-services.net/ns/rm#Requirement"], + "dcterms:identifier": "GONE", + "dcterms:title": "Deleted resource" + }); + + let result = client.update_resource(&url, &body).await; + assert!(result.is_err()); + let err_msg = format!("{}", result.unwrap_err()); + assert!( + err_msg.contains("404"), + "error message should mention 404: {err_msg}" + ); +} + +// --------------------------------------------------------------------------- +// Authentication configuration +// --------------------------------------------------------------------------- + +#[tokio::test] +async fn test_basic_auth_is_sent() { + let mock_server = MockServer::start().await; + let base = mock_server.uri(); + + // The Authorization header for basic auth "user:pass" is "Basic dXNlcjpwYXNz" + Mock::given(method("GET")) + .and(path("/")) + .and(header("Authorization", "Basic dXNlcjpwYXNz")) + .respond_with(ResponseTemplate::new(200).set_body_json(catalog_json(&base))) + .expect(1) + .mount(&mock_server) + .await; + + let config = + OslcClientConfig::new(&base).with_basic_auth("user".to_string(), "pass".to_string()); + let client = OslcClient::new(config).expect("client creation should succeed"); + + let result = client.discover().await; + assert!(result.is_ok(), "basic-auth request should succeed"); +} + +#[tokio::test] +async fn test_bearer_auth_is_sent() { + let mock_server = MockServer::start().await; + let base = mock_server.uri(); + + Mock::given(method("GET")) + .and(path("/")) + .and(header("Authorization", "Bearer my-secret-token")) + .respond_with(ResponseTemplate::new(200).set_body_json(catalog_json(&base))) + .expect(1) + .mount(&mock_server) + .await; + + let config = OslcClientConfig::new(&base).with_bearer_token("my-secret-token".to_string()); + let client = OslcClient::new(config).expect("client creation should succeed"); + + let result = client.discover().await; + assert!(result.is_ok(), "bearer-auth request should succeed"); +} + +// --------------------------------------------------------------------------- +// Pull error: member without identifier +// --------------------------------------------------------------------------- + +#[tokio::test] +async fn test_pull_member_missing_identifier() { + let mock_server = MockServer::start().await; + let base = mock_server.uri(); + + let response = json!({ + "total_count": 1, + "members": [ + { + "@type": ["http://open-services.net/ns/rm#Requirement"], + "dcterms:title": "Requirement without identifier" + // No dcterms:identifier field + } + ] + }); + + Mock::given(method("GET")) + .and(path("/rm/query")) + .respond_with(ResponseTemplate::new(200).set_body_json(response)) + .expect(1) + .mount(&mock_server) + .await; + + let config = OslcClientConfig::new(&base); + let adapter = OslcSyncAdapter::from_config(config).expect("adapter creation should succeed"); + + let query_url = format!("{base}/rm/query"); + let result = adapter.pull(&query_url).await; + + assert!( + result.is_err(), + "pull should fail for member without identifier" + ); + let err_msg = format!("{}", result.unwrap_err()); + assert!( + err_msg.contains("identifier"), + "error should mention missing identifier: {err_msg}" + ); +} + +// --------------------------------------------------------------------------- +// Query response with pagination link +// --------------------------------------------------------------------------- + +#[tokio::test] +async fn test_query_response_with_next_page() { + let mock_server = MockServer::start().await; + let base = mock_server.uri(); + + let paged_response = json!({ + "total_count": 100, + "next_page": format!("{base}/rm/query?page=2"), + "members": [ + { + "@type": ["http://open-services.net/ns/rm#Requirement"], + "dcterms:identifier": "REQ-001", + "dcterms:title": "First page requirement" + } + ] + }); + + Mock::given(method("GET")) + .and(path("/rm/query")) + .respond_with(ResponseTemplate::new(200).set_body_json(paged_response)) + .expect(1) + .mount(&mock_server) + .await; + + let client = client_for(&base); + let query_url = format!("{base}/rm/query"); + let response = client + .query(&query_url, "", "") + .await + .expect("query should succeed"); + + assert_eq!(response.total_count, Some(100)); + assert!(response.next_page.is_some()); + assert_eq!( + response.next_page.as_deref(), + Some(format!("{base}/rm/query?page=2").as_str()) + ); + assert_eq!(response.members.len(), 1); +} + +// --------------------------------------------------------------------------- +// Resource type mapping sanity (exercised via pull path) +// --------------------------------------------------------------------------- + +#[tokio::test] +async fn test_pull_test_result_with_status() { + let mock_server = MockServer::start().await; + let base = mock_server.uri(); + + let response = json!({ + "total_count": 1, + "members": [ + { + "@type": ["http://open-services.net/ns/qm#TestResult"], + "dcterms:identifier": "TR-001", + "dcterms:title": "Braking Test Result", + "oslc_qm:status": "passed", + "oslc_qm:reportsOnTestCase": { + "@id": format!("{base}/qm/testcases/TC-001") + } + } + ] + }); + + Mock::given(method("GET")) + .and(path("/qm/results")) + .respond_with(ResponseTemplate::new(200).set_body_json(response)) + .expect(1) + .mount(&mock_server) + .await; + + let config = OslcClientConfig::new(&base); + let adapter = OslcSyncAdapter::from_config(config).expect("adapter creation should succeed"); + + let query_url = format!("{base}/qm/results"); + let artifacts = adapter.pull(&query_url).await.expect("pull should succeed"); + + assert_eq!(artifacts.len(), 1); + assert_eq!(artifacts[0].id, "TR-001"); + assert_eq!(artifacts[0].artifact_type, "test-result"); + assert_eq!(artifacts[0].status.as_deref(), Some("passed")); + assert_eq!(artifacts[0].links.len(), 1); + assert_eq!(artifacts[0].links[0].link_type, "reports-on"); + assert_eq!(artifacts[0].links[0].target, "TC-001"); +} diff --git a/rivet-core/tests/proptest_core.rs b/rivet-core/tests/proptest_core.rs index 13c3295..3eddcb2 100644 --- a/rivet-core/tests/proptest_core.rs +++ b/rivet-core/tests/proptest_core.rs @@ -142,7 +142,7 @@ proptest! { fn prop_schema_merge_idempotent() { let schemas_dir = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("../schemas"); - for schema_name in &["common", "stpa", "aspice", "dev"] { + for schema_name in &["common", "stpa", "aspice", "cybersecurity", "dev"] { let path = schemas_dir.join(format!("{schema_name}.yaml")); if !path.exists() { continue; diff --git a/schemas/aspice.yaml b/schemas/aspice.yaml index aa12af2..6c3b7ca 100644 --- a/schemas/aspice.yaml +++ b/schemas/aspice.yaml @@ -4,33 +4,39 @@ # Based on Automotive SPICE PAM v4.0. # # Left side of V (specification/design): -# SYS.1 Stakeholder requirements -# SYS.2 System requirements -# SYS.3 System architecture -# SWE.1 Software requirements -# SWE.2 Software architecture -# SWE.3 Software detailed design / unit construction +# SYS.1 Stakeholder Requirements Analysis +# SYS.2 System Requirements Analysis +# SYS.3 System Architectural Design +# SWE.1 Software Requirements Analysis +# SWE.2 Software Architectural Design +# SWE.3 Software Detailed Design and Unit Construction # -# Right side of V (verification/testing): -# SWE.4 Unit verification -# SWE.5 Integration verification -# SWE.6 Software qualification testing -# SYS.4 System integration testing -# SYS.5 System qualification testing +# Right side of V (verification): +# SWE.4 Software Unit Verification +# SWE.5 Software Component Verification and Integration Verification +# SWE.6 Software Verification +# SYS.4 System Integration and Integration Verification +# SYS.5 System Verification # -# Test execution results (per-version evidence): -# test-execution — a run against a specific version -# test-verdict — pass/fail verdict for a single test case +# Verification execution results (per-version evidence): +# verification-execution — a run against a specific version +# verification-verdict — pass/fail verdict for a single measure +# +# ASPICE 4.0 key terminology changes: +# - "test" is now "verification measure" (broader: includes review, +# static analysis, formal verification, simulation, not just testing) +# - SWE.5 scope expanded to include component verification +# - SWE.6/SYS.5 renamed from "qualification test" to "verification" # # This schema is reusable across any automotive project. schema: name: aspice - version: "0.1.0" + version: "0.2.0" namespace: "http://pulseengine.dev/ns/aspice#" extends: [common] description: > - Automotive SPICE V-model artifact types and traceability rules. + Automotive SPICE v4.0 V-model artifact types and traceability rules. # ────────────────────────────────────────────────────────────────────────── # Artifact types @@ -64,6 +70,10 @@ artifact-types: - name: priority type: string required: false + - name: verification-criteria + type: text + required: false + description: Criteria for verifying this requirement (ASPICE 4.0 — verification measures) link-fields: - name: derived-from link-type: derives-from @@ -103,6 +113,10 @@ artifact-types: - name: priority type: string required: false + - name: verification-criteria + type: text + required: false + description: Criteria for verifying this requirement (ASPICE 4.0 — verification measures) link-fields: - name: derived-from link-type: derives-from @@ -159,16 +173,28 @@ artifact-types: required: false cardinality: zero-or-many - # ── Test specifications (right side of V) ─────────────────────────────── + # ── Verification specifications (right side of V) ────────────────────── + # + # ASPICE 4.0: "verification measure" replaces "test case". + # Methods now include static analysis, formal verification, simulation, + # review, inspection — not just automated/manual testing. - - name: unit-test - description: Unit verification test specification (SWE.4) + - name: unit-verification + description: Unit verification measure (SWE.4 — Software Unit Verification) aspice-process: SWE.4 fields: - name: method type: string required: false - allowed-values: [automated, manual, review, analysis] + allowed-values: + - automated-test + - manual-test + - review + - static-analysis + - formal-verification + - simulation + - inspection + - walkthrough - name: preconditions type: list required: false @@ -182,14 +208,24 @@ artifact-types: required: true cardinality: one-or-many - - name: integration-test - description: Software integration test specification (SWE.5) + - name: sw-integration-verification + description: > + Software component and integration verification measure + (SWE.5 — Software Component Verification and Integration Verification) aspice-process: SWE.5 fields: - name: method type: string required: false - allowed-values: [automated, manual, review, analysis] + allowed-values: + - automated-test + - manual-test + - review + - static-analysis + - formal-verification + - simulation + - inspection + - walkthrough - name: preconditions type: list required: false @@ -203,14 +239,24 @@ artifact-types: required: true cardinality: one-or-many - - name: sw-qual-test - description: Software qualification test specification (SWE.6) + - name: sw-verification + description: > + Software verification measure against SW requirements + (SWE.6 — Software Verification) aspice-process: SWE.6 fields: - name: method type: string required: false - allowed-values: [automated, manual, review, analysis] + allowed-values: + - automated-test + - manual-test + - review + - static-analysis + - formal-verification + - simulation + - inspection + - walkthrough - name: priority type: string required: false @@ -227,14 +273,24 @@ artifact-types: required: true cardinality: one-or-many - - name: sys-integration-test - description: System integration test specification (SYS.4) + - name: sys-integration-verification + description: > + System integration and integration verification measure + (SYS.4 — System Integration and Integration Verification) aspice-process: SYS.4 fields: - name: method type: string required: false - allowed-values: [automated, manual, review, analysis] + allowed-values: + - automated-test + - manual-test + - review + - static-analysis + - formal-verification + - simulation + - inspection + - walkthrough - name: preconditions type: list required: false @@ -248,14 +304,24 @@ artifact-types: required: true cardinality: one-or-many - - name: sys-qual-test - description: System qualification test specification (SYS.5) + - name: sys-verification + description: > + System verification measure against system requirements + (SYS.5 — System Verification) aspice-process: SYS.5 fields: - name: method type: string required: false - allowed-values: [automated, manual, review, analysis] + allowed-values: + - automated-test + - manual-test + - review + - static-analysis + - formal-verification + - simulation + - inspection + - walkthrough - name: preconditions type: list required: false @@ -269,10 +335,10 @@ artifact-types: required: true cardinality: one-or-many - # ── Test execution results (per-version evidence) ─────────────────────── + # ── Verification execution results (per-version evidence) ────────────── - - name: test-execution - description: A test execution run against a specific version + - name: verification-execution + description: A verification execution run against a specific version fields: - name: version type: string @@ -287,15 +353,15 @@ artifact-types: - name: executor type: string required: false - description: Who or what ran the tests (CI system, person) + description: Who or what ran the verification (CI system, person) - name: environment type: structured required: false description: OS, toolchain, runtime versions link-fields: [] - - name: test-verdict - description: Pass/fail verdict for a single test case in an execution run + - name: verification-verdict + description: Pass/fail verdict for a single verification measure in an execution run fields: - name: verdict type: string @@ -316,14 +382,19 @@ artifact-types: type: text required: false link-fields: - - name: test + - name: measure link-type: result-of - target-types: [unit-test, integration-test, sw-qual-test, sys-integration-test, sys-qual-test] + target-types: + - unit-verification + - sw-integration-verification + - sw-verification + - sys-integration-verification + - sys-verification required: true cardinality: exactly-one - name: execution link-type: part-of-execution - target-types: [test-execution] + target-types: [verification-execution] required: true cardinality: exactly-one @@ -333,21 +404,26 @@ artifact-types: link-types: - name: result-of inverse: has-result - description: Test verdict is the result of executing a test specification - source-types: [test-verdict] - target-types: [unit-test, integration-test, sw-qual-test, sys-integration-test, sys-qual-test] + description: Verification verdict is the result of executing a verification measure + source-types: [verification-verdict] + target-types: + - unit-verification + - sw-integration-verification + - sw-verification + - sys-integration-verification + - sys-verification - name: part-of-execution inverse: contains-verdict - description: Test verdict belongs to a test execution run - source-types: [test-verdict] - target-types: [test-execution] + description: Verification verdict belongs to a verification execution run + source-types: [verification-verdict] + target-types: [verification-execution] # ────────────────────────────────────────────────────────────────────────── # ASPICE traceability rules # # These encode the bidirectional traceability requirements from the -# Automotive SPICE PAM. `trace validate` checks these automatically. +# Automotive SPICE PAM v4.0. `rivet validate` checks these automatically. # ────────────────────────────────────────────────────────────────────────── traceability-rules: # Forward traceability (left side of V, top-down) @@ -381,44 +457,44 @@ traceability-rules: # Reverse traceability (right side of V, bottom-up verification) - name: swe4-verifies-swe3 - description: Every unit test must verify a detailed design element - source-type: unit-test + description: Every unit verification measure must verify a detailed design element + source-type: unit-verification required-link: verifies target-types: [sw-detail-design] severity: error - name: swe6-verifies-swe1 - description: Every SW qualification test must verify a SW requirement - source-type: sw-qual-test + description: Every SW verification measure must verify a SW requirement + source-type: sw-verification required-link: verifies target-types: [sw-req] severity: error - name: sys5-verifies-sys2 - description: Every system qualification test must verify a system requirement - source-type: sys-qual-test + description: Every system verification measure must verify a system requirement + source-type: sys-verification required-link: verifies target-types: [system-req] severity: error # Coverage rules (every requirement should be verified) - - name: swe1-has-test - description: Every SW requirement should be verified by at least one test + - name: swe1-has-verification + description: Every SW requirement should be verified by at least one verification measure source-type: sw-req required-backlink: verifies - from-types: [sw-qual-test, unit-test, integration-test] + from-types: [sw-verification, unit-verification, sw-integration-verification] severity: warning - - name: sys2-has-test - description: Every system requirement should be verified by at least one test + - name: sys2-has-verification + description: Every system requirement should be verified by at least one verification measure source-type: system-req required-backlink: verifies - from-types: [sys-qual-test, sys-integration-test] + from-types: [sys-verification, sys-integration-verification] severity: warning - - name: swe3-has-test - description: Every detailed design element should be verified by at least one unit test + - name: swe3-has-verification + description: Every detailed design element should be verified by at least one unit verification measure source-type: sw-detail-design required-backlink: verifies - from-types: [unit-test] + from-types: [unit-verification] severity: warning diff --git a/schemas/cybersecurity.yaml b/schemas/cybersecurity.yaml new file mode 100644 index 0000000..30ec772 --- /dev/null +++ b/schemas/cybersecurity.yaml @@ -0,0 +1,317 @@ +# Cybersecurity schema — ASPICE 4.0 SEC.1-4 / ISO 21434 +# +# Defines artifact types and traceability rules for automotive cybersecurity +# engineering aligned with Automotive SPICE v4.0 cybersecurity plug-in and +# ISO/SAE 21434 "Road vehicles — Cybersecurity engineering". +# +# Processes covered: +# SEC.1 Cybersecurity Requirements Analysis +# SEC.2 Cybersecurity Design +# SEC.3 Cybersecurity Implementation +# SEC.4 Cybersecurity Verification +# +# Supporting activities: +# MAN.7 TARA (Threat Analysis and Risk Assessment) +# +# This schema extends common and is designed to compose with the aspice schema. + +schema: + name: cybersecurity + version: "0.1.0" + namespace: "http://pulseengine.dev/ns/cybersecurity#" + extends: [common] + description: > + Automotive cybersecurity artifact types aligned with ASPICE v4.0 + SEC.1-4 and ISO/SAE 21434. + +# ────────────────────────────────────────────────────────────────────────── +# Artifact types +# ────────────────────────────────────────────────────────────────────────── +artifact-types: + + # ── TARA (MAN.7) ────────────────────────────────────────────────────── + + - name: asset + description: > + An item of value requiring protection — data, function, or component. + Identified during TARA (ISO 21434 clause 8). + fields: + - name: asset-type + type: string + required: false + allowed-values: [data, function, component, interface, key-material] + - name: cybersecurity-properties + type: list + required: false + description: "CIA properties: confidentiality, integrity, availability, authenticity" + link-fields: [] + + - name: threat-scenario + description: > + A potential attack scenario against an asset. + Part of TARA (ISO 21434 clause 8). + fields: + - name: attack-vector + type: string + required: false + allowed-values: [network, physical, local, adjacent] + description: CVSS-style attack vector + - name: attack-feasibility + type: string + required: false + allowed-values: [high, medium, low, very-low] + - name: impact + type: string + required: false + allowed-values: [severe, major, moderate, negligible] + link-fields: + - name: targets + link-type: threatens + target-types: [asset] + required: true + cardinality: one-or-many + + - name: risk-assessment + description: > + Combined risk level from threat feasibility and impact. + Determines whether a cybersecurity goal is needed (ISO 21434 clause 8). + fields: + - name: risk-level + type: string + required: true + allowed-values: [unacceptable, conditional, acceptable] + - name: risk-treatment + type: string + required: false + allowed-values: [mitigate, avoid, transfer, accept] + link-fields: + - name: threat + link-type: assesses + target-types: [threat-scenario] + required: true + cardinality: exactly-one + + # ── SEC.1: Cybersecurity Requirements Analysis ──────────────────────── + + - name: cybersecurity-goal + description: > + A top-level cybersecurity requirement derived from TARA results. + Equivalent to a cybersecurity goal in ISO 21434 clause 9. + aspice-process: SEC.1 + fields: + - name: cal + type: string + required: false + allowed-values: ["1", "2", "3", "4"] + description: Cybersecurity Assurance Level (CAL 1-4) + - name: verification-criteria + type: text + required: false + link-fields: + - name: mitigates + link-type: mitigates + target-types: [threat-scenario] + required: true + cardinality: one-or-many + + - name: cybersecurity-req + description: > + A detailed cybersecurity requirement derived from cybersecurity goals. + SEC.1 outcome (ISO 21434 clause 9). + aspice-process: SEC.1 + fields: + - name: req-type + type: string + required: false + allowed-values: [authentication, authorization, encryption, integrity, availability, logging] + - name: priority + type: string + required: false + - name: verification-criteria + type: text + required: false + link-fields: + - name: derived-from + link-type: derives-from + target-types: [cybersecurity-goal] + required: true + cardinality: one-or-many + + # ── SEC.2: Cybersecurity Design ─────────────────────────────────────── + + - name: cybersecurity-design + description: > + Architecture or design element addressing cybersecurity requirements. + SEC.2 outcome — security mechanisms, protocols, algorithms. + aspice-process: SEC.2 + fields: + - name: mechanism + type: string + required: false + description: Security mechanism (e.g. TLS, SecOC, HSM, secure boot) + - name: algorithm + type: string + required: false + description: Cryptographic algorithm or protocol + link-fields: + - name: satisfies + link-type: satisfies + target-types: [cybersecurity-req] + required: true + cardinality: one-or-many + + # ── SEC.3: Cybersecurity Implementation ─────────────────────────────── + + - name: cybersecurity-implementation + description: > + Implementation artifact for a cybersecurity design element. + SEC.3 outcome — code, configuration, key management. + aspice-process: SEC.3 + fields: + - name: unit + type: string + required: false + description: Source file or module + - name: implementation-type + type: string + required: false + allowed-values: [code, configuration, key-provisioning, policy] + link-fields: + - name: implements + link-type: implements + target-types: [cybersecurity-design] + required: true + cardinality: one-or-many + + # ── SEC.4: Cybersecurity Verification ───────────────────────────────── + + - name: cybersecurity-verification + description: > + Verification measure for cybersecurity requirements. + SEC.4 outcome — includes penetration testing, fuzzing, code review, + static analysis, vulnerability scanning. + aspice-process: SEC.4 + fields: + - name: method + type: string + required: false + allowed-values: + - penetration-test + - fuzz-test + - code-review + - static-analysis + - vulnerability-scan + - automated-test + - manual-test + - formal-verification + - name: preconditions + type: list + required: false + - name: steps + type: structured + required: false + link-fields: + - name: verifies + link-type: verifies + target-types: [cybersecurity-req, cybersecurity-design, cybersecurity-implementation] + required: true + cardinality: one-or-many + +# ────────────────────────────────────────────────────────────────────────── +# Cybersecurity-specific link types +# ────────────────────────────────────────────────────────────────────────── +link-types: + - name: threatens + inverse: threatened-by + description: Threat scenario threatens an asset + source-types: [threat-scenario] + target-types: [asset] + + - name: assesses + inverse: assessed-by + description: Risk assessment evaluates a threat scenario + source-types: [risk-assessment] + target-types: [threat-scenario] + +# ────────────────────────────────────────────────────────────────────────── +# Cybersecurity traceability rules +# +# These encode the traceability requirements from ISO 21434 and +# ASPICE v4.0 SEC processes. +# ────────────────────────────────────────────────────────────────────────── +traceability-rules: + # TARA completeness + - name: threat-has-asset + description: Every threat scenario must target at least one asset + source-type: threat-scenario + required-link: threatens + target-types: [asset] + severity: error + + - name: risk-has-threat + description: Every risk assessment must assess a threat scenario + source-type: risk-assessment + required-link: assesses + target-types: [threat-scenario] + severity: error + + # SEC.1 traceability + - name: goal-mitigates-threat + description: Every cybersecurity goal must mitigate at least one threat + source-type: cybersecurity-goal + required-link: mitigates + target-types: [threat-scenario] + severity: error + + - name: req-derives-from-goal + description: Every cybersecurity requirement must derive from a cybersecurity goal + source-type: cybersecurity-req + required-link: derives-from + target-types: [cybersecurity-goal] + severity: error + + # SEC.2 traceability + - name: design-satisfies-req + description: Every cybersecurity design must satisfy at least one cybersecurity requirement + source-type: cybersecurity-design + required-link: satisfies + target-types: [cybersecurity-req] + severity: error + + # SEC.3 traceability + - name: impl-implements-design + description: Every cybersecurity implementation must implement a design element + source-type: cybersecurity-implementation + required-link: implements + target-types: [cybersecurity-design] + severity: error + + # SEC.4 traceability + - name: verification-verifies-sec + description: Every cybersecurity verification must verify a requirement, design, or implementation + source-type: cybersecurity-verification + required-link: verifies + target-types: [cybersecurity-req, cybersecurity-design, cybersecurity-implementation] + severity: error + + # Coverage rules + - name: sec-req-has-verification + description: Every cybersecurity requirement should be verified + source-type: cybersecurity-req + required-backlink: verifies + from-types: [cybersecurity-verification] + severity: warning + + - name: sec-req-has-design + description: Every cybersecurity requirement should be satisfied by a design element + source-type: cybersecurity-req + required-backlink: satisfies + from-types: [cybersecurity-design] + severity: warning + + - name: threat-has-goal + description: Every unacceptable threat should be mitigated by a cybersecurity goal + source-type: threat-scenario + required-backlink: mitigates + from-types: [cybersecurity-goal] + severity: warning diff --git a/schemas/stpa.yaml b/schemas/stpa.yaml index c0ce11f..6ce1f68 100644 --- a/schemas/stpa.yaml +++ b/schemas/stpa.yaml @@ -207,11 +207,14 @@ artifact-types: required: false allowed-values: - controller-failure + - inadequate-control-algorithm + - inadequate-process-model - inadequate-feedback - process-model-flaw - coordination-failure - actuator-failure - sensor-failure + - control-path - name: causal-factors type: list required: false @@ -220,8 +223,11 @@ artifact-types: - name: uca link-type: caused-by-uca target-types: [uca] - required: true - cardinality: one-or-many + required: false + cardinality: zero-or-many + description: > + UCA(s) this scenario explains. Type-a scenarios (why does the UCA + occur?) require this link; type-b control-path scenarios may omit it. - name: hazards link-type: leads-to-hazard target-types: [hazard, sub-hazard]