From e8d0c853662b37f1a198134c806ef19ca2a52806 Mon Sep 17 00:00:00 2001 From: Tsavo Knott Date: Tue, 24 Feb 2026 14:39:48 -0500 Subject: [PATCH 1/3] fix: cross-platform hashing, CRLF handling, and autodoc AccumulatorSink - Replace shell-based computeFileHash in template_manifest.dart with pure-Dart crypto (fixes Windows where shasum/sha256sum don't exist) - Normalize CRLF to LF in _preserveUserSections to prevent silent data loss on Windows - Fix autodoc AccumulatorSink import error from recent crypto migration Fixes #11, fixes #8 Co-Authored-By: Claude Opus 4.6 --- lib/src/cli/commands/autodoc_command.dart | 15 +++++++-------- lib/src/cli/utils/template_manifest.dart | 18 +++++++----------- lib/src/cli/utils/workflow_generator.dart | 5 +++++ 3 files changed, 19 insertions(+), 19 deletions(-) diff --git a/lib/src/cli/commands/autodoc_command.dart b/lib/src/cli/commands/autodoc_command.dart index c179abf..492810c 100644 --- a/lib/src/cli/commands/autodoc_command.dart +++ b/lib/src/cli/commands/autodoc_command.dart @@ -1,5 +1,6 @@ import 'dart:convert'; import 'dart:io'; +import 'dart:typed_data'; import 'package:args/command_runner.dart'; import 'package:crypto/crypto.dart'; @@ -482,26 +483,24 @@ Write the corrected file to the same path: $absOutputFile filePaths.sort(); - final sink = AccumulatorSink(); - final input = sha256.startChunkedConversion(sink); + final builder = BytesBuilder(copy: false); for (final path in filePaths) { // Include the path name in the digest so renames affect the hash. - input.add(utf8.encode(path)); - input.add(const [0]); + builder.add(utf8.encode(path)); + builder.addByte(0); if (!path.startsWith('missing_dir:')) { try { - input.add(File(path).readAsBytesSync()); + builder.add(File(path).readAsBytesSync()); } catch (e) { Logger.warn('Could not read $path for module hash: $e'); } } - input.add(const [0]); + builder.addByte(0); } - input.close(); - return sink.events.single.toString(); + return sha256.convert(builder.takeBytes()).toString(); } } diff --git a/lib/src/cli/utils/template_manifest.dart b/lib/src/cli/utils/template_manifest.dart index a333d94..7640cf4 100644 --- a/lib/src/cli/utils/template_manifest.dart +++ b/lib/src/cli/utils/template_manifest.dart @@ -1,6 +1,8 @@ import 'dart:convert'; import 'dart:io'; +import 'package:crypto/crypto.dart'; + import '../../triage/utils/run_context.dart'; /// Represents one template entry from manifest.json. @@ -96,18 +98,12 @@ class TemplateVersionTracker { } /// Compute SHA256 hash of a file's contents. +/// +/// Uses pure Dart [sha256] from `package:crypto` so it works on all platforms +/// (macOS, Linux, Windows) without shelling out to external tools. String computeFileHash(String filePath) { final file = File(filePath); if (!file.existsSync()) return ''; - // Try shasum (macOS) first, then sha256sum (Linux) - final macResult = Process.runSync('sh', ['-c', 'shasum -a 256 "$filePath" | cut -d" " -f1']); - if (macResult.exitCode == 0) { - final hash = (macResult.stdout as String).trim(); - if (hash.isNotEmpty) return hash; - } - final linuxResult = Process.runSync('sh', ['-c', 'sha256sum "$filePath" | cut -d" " -f1']); - if (linuxResult.exitCode == 0) { - return (linuxResult.stdout as String).trim(); - } - return ''; + final bytes = file.readAsBytesSync(); + return sha256.convert(bytes).toString(); } diff --git a/lib/src/cli/utils/workflow_generator.dart b/lib/src/cli/utils/workflow_generator.dart index 3616011..03ff570 100644 --- a/lib/src/cli/utils/workflow_generator.dart +++ b/lib/src/cli/utils/workflow_generator.dart @@ -202,6 +202,11 @@ class WorkflowGenerator { /// `# --- BEGIN USER: ---` /// `# --- END USER: ---` String _preserveUserSections(String rendered, String existing) { + // Normalize CRLF → LF so the regex matches regardless of line-ending style + // (Windows checkouts with core.autocrlf=true produce \r\n). + existing = existing.replaceAll('\r\n', '\n'); + rendered = rendered.replaceAll('\r\n', '\n'); + final sectionPattern = RegExp(r'# --- BEGIN USER: (\S+) ---\n(.*?)# --- END USER: \1 ---', dotAll: true); // Extract user content from existing file From 56b3379797375d913ae18c8cc0cd2b0a4b17a657 Mon Sep 17 00:00:00 2001 From: Tsavo Knott Date: Tue, 24 Feb 2026 14:39:56 -0500 Subject: [PATCH 2/3] fix: CI template security hardening and improvements - Add ::add-mask:: token masking before git config in all 3 skeleton locations and both ci.yaml locations (prevents PAT leaks in logs) - Add fetch-depth: 1 to all analyze/test checkout steps (saves 50-150s) - Replace /tmp/ with $RUNNER_TEMP for Windows compatibility - Add conditional artifact upload on test failure for diagnostics - Fix analysis cache key drift: add runner.arch to multi_platform block - Add sync-marker comments to all duplicated step sequences Fixes #10, fixes #13, partially addresses #12 Co-Authored-By: Claude Opus 4.6 --- .github/workflows/ci.yaml | 40 ++++--- templates/github/workflows/ci.skeleton.yaml | 112 ++++++++++++++------ 2 files changed, 107 insertions(+), 45 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 56a1e31..4a87757 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -85,17 +85,18 @@ jobs: - uses: actions/checkout@v6.0.2 with: ref: ${{ needs.auto-format.outputs.sha }} + fetch-depth: 1 persist-credentials: false - name: Configure Git for HTTPS with Token shell: bash - env: - GH_PAT: ${{ secrets.TSAVO_AT_PIECES_PERSONAL_ACCESS_TOKEN || secrets.GITHUB_TOKEN }} run: | - git config --global url."https://x-access-token:${GH_PAT}@github.com/".insteadOf "git@github.com:" - git config --global url."https://x-access-token:${GH_PAT}@github.com/".insteadOf "ssh://git@github.com/" - git config --global url."https://x-access-token:${GH_PAT}@github.com/open-runtime/".insteadOf "git@github.com:open-runtime/" - git config --global url."https://x-access-token:${GH_PAT}@github.com/pieces-app/".insteadOf "git@github.com:pieces-app/" + TOKEN="${{ secrets.TSAVO_AT_PIECES_PERSONAL_ACCESS_TOKEN || secrets.GITHUB_TOKEN }}" + echo "::add-mask::${TOKEN}" + git config --global url."https://x-access-token:${TOKEN}@github.com/".insteadOf "git@github.com:" + git config --global url."https://x-access-token:${TOKEN}@github.com/".insteadOf "ssh://git@github.com/" + git config --global url."https://x-access-token:${TOKEN}@github.com/open-runtime/".insteadOf "git@github.com:open-runtime/" + git config --global url."https://x-access-token:${TOKEN}@github.com/pieces-app/".insteadOf "git@github.com:pieces-app/" - uses: dart-lang/setup-dart@v1.7.1 with: @@ -114,8 +115,8 @@ jobs: - name: Analyze run: | - dart analyze 2>&1 | tee /tmp/analysis.txt - if grep -q "^ error -" /tmp/analysis.txt; then + dart analyze 2>&1 | tee "$RUNNER_TEMP/analysis.txt" + if grep -q "^ error -" "$RUNNER_TEMP/analysis.txt"; then echo "::error::Analysis errors found" exit 1 fi @@ -132,17 +133,18 @@ jobs: - uses: actions/checkout@v6.0.2 with: ref: ${{ needs.auto-format.outputs.sha }} + fetch-depth: 1 persist-credentials: false - name: Configure Git for HTTPS with Token shell: bash - env: - GH_PAT: ${{ secrets.TSAVO_AT_PIECES_PERSONAL_ACCESS_TOKEN || secrets.GITHUB_TOKEN }} run: | - git config --global url."https://x-access-token:${GH_PAT}@github.com/".insteadOf "git@github.com:" - git config --global url."https://x-access-token:${GH_PAT}@github.com/".insteadOf "ssh://git@github.com/" - git config --global url."https://x-access-token:${GH_PAT}@github.com/open-runtime/".insteadOf "git@github.com:open-runtime/" - git config --global url."https://x-access-token:${GH_PAT}@github.com/pieces-app/".insteadOf "git@github.com:pieces-app/" + TOKEN="${{ secrets.TSAVO_AT_PIECES_PERSONAL_ACCESS_TOKEN || secrets.GITHUB_TOKEN }}" + echo "::add-mask::${TOKEN}" + git config --global url."https://x-access-token:${TOKEN}@github.com/".insteadOf "git@github.com:" + git config --global url."https://x-access-token:${TOKEN}@github.com/".insteadOf "ssh://git@github.com/" + git config --global url."https://x-access-token:${TOKEN}@github.com/open-runtime/".insteadOf "git@github.com:open-runtime/" + git config --global url."https://x-access-token:${TOKEN}@github.com/pieces-app/".insteadOf "git@github.com:pieces-app/" - uses: dart-lang/setup-dart@v1.7.1 with: @@ -165,6 +167,16 @@ jobs: - name: Test run: dart test + - name: Upload test artifacts on failure + if: failure() + uses: actions/upload-artifact@v4 + with: + name: test-artifacts-${{ matrix.platform_id }} + path: | + test/integration/fixtures/bin/ + **/test-results/ + retention-days: 7 + # --- BEGIN USER: post-test --- # --- END USER: post-test --- diff --git a/templates/github/workflows/ci.skeleton.yaml b/templates/github/workflows/ci.skeleton.yaml index 0c9c94c..8018d88 100644 --- a/templates/github/workflows/ci.skeleton.yaml +++ b/templates/github/workflows/ci.skeleton.yaml @@ -92,30 +92,35 @@ jobs: <%/secrets_list%> <%/has_secrets%> steps: + # ── shared:checkout ── keep in sync with multi_platform ── - uses: actions/checkout@v6.0.2 with: <%#format_check%> ref: ${{ needs.auto-format.outputs.sha }} <%/format_check%> + fetch-depth: 1 persist-credentials: false <%#lfs%> lfs: true <%/lfs%> + # ── shared:git-config ── keep in sync with multi_platform ── - name: Configure Git for HTTPS with Token shell: bash - env: - GH_PAT: ${{ secrets.<%pat_secret%> || secrets.GITHUB_TOKEN }} run: | - git config --global url."https://x-access-token:${GH_PAT}@github.com/".insteadOf "git@github.com:" - git config --global url."https://x-access-token:${GH_PAT}@github.com/".insteadOf "ssh://git@github.com/" - git config --global url."https://x-access-token:${GH_PAT}@github.com/open-runtime/".insteadOf "git@github.com:open-runtime/" - git config --global url."https://x-access-token:${GH_PAT}@github.com/pieces-app/".insteadOf "git@github.com:pieces-app/" - + TOKEN="${{ secrets.<%pat_secret%> || secrets.GITHUB_TOKEN }}" + echo "::add-mask::${TOKEN}" + git config --global url."https://x-access-token:${TOKEN}@github.com/".insteadOf "git@github.com:" + git config --global url."https://x-access-token:${TOKEN}@github.com/".insteadOf "ssh://git@github.com/" + git config --global url."https://x-access-token:${TOKEN}@github.com/open-runtime/".insteadOf "git@github.com:open-runtime/" + git config --global url."https://x-access-token:${TOKEN}@github.com/pieces-app/".insteadOf "git@github.com:pieces-app/" + + # ── shared:dart-setup ── keep in sync with multi_platform ── - uses: dart-lang/setup-dart@v1.7.1 with: sdk: "<%dart_sdk%>" + # ── shared:pub-cache ── keep in sync with multi_platform ── - name: Cache Dart pub dependencies uses: actions/cache@v5.0.3 with: @@ -123,6 +128,7 @@ jobs: key: ${{ runner.os }}-${{ runner.arch }}-dart-pub-${{ hashFiles('**/pubspec.yaml') }} restore-keys: ${{ runner.os }}-${{ runner.arch }}-dart-pub- + # ── shared:proto-setup ── keep in sync with multi_platform ── <%#proto%> - name: Install protoc uses: arduino/setup-protoc@v3.0.0 @@ -130,10 +136,12 @@ jobs: - run: dart pub global activate protoc_plugin 25.0.0 <%/proto%> + # ── shared:pub-get ── keep in sync with multi_platform ── - run: dart pub get env: GIT_LFS_SKIP_SMUDGE: "1" + # ── shared:analysis-cache ── keep in sync with multi_platform ── <%#analysis_cache%> - name: Cache Dart analysis uses: actions/cache@v5.0.3 @@ -143,11 +151,13 @@ jobs: restore-keys: ${{ runner.os }}-${{ runner.arch }}-dart-analysis- <%/analysis_cache%> + # ── shared:proto-verify ── keep in sync with multi_platform ── <%#proto%> - name: Verify proto files run: dart run runtime_ci_tooling:manage_cicd verify-protos <%/proto%> + # ── shared:analyze ── keep in sync with multi_platform ── <%#managed_analyze%> - name: Analyze run: dart run runtime_ci_tooling:manage_cicd analyze @@ -155,20 +165,21 @@ jobs: <%^managed_analyze%> - name: Analyze run: | - dart analyze 2>&1 | tee /tmp/analysis.txt - if grep -q "^ error -" /tmp/analysis.txt; then + dart analyze 2>&1 | tee "$RUNNER_TEMP/analysis.txt" + if grep -q "^ error -" "$RUNNER_TEMP/analysis.txt"; then echo "::error::Analysis errors found" exit 1 fi <%/managed_analyze%> + # ── shared:sub-packages ── keep in sync with multi_platform ── <%#sub_packages%> - name: Analyze (<%name%>) working-directory: <%path%> run: | GIT_LFS_SKIP_SMUDGE=1 dart pub get - dart analyze 2>&1 | tee /tmp/sub_analysis.txt - if grep -q "^ error -" /tmp/sub_analysis.txt; then + dart analyze 2>&1 | tee "$RUNNER_TEMP/sub_analysis.txt" + if grep -q "^ error -" "$RUNNER_TEMP/sub_analysis.txt"; then echo "::error::Errors found in <%name%>" exit 1 fi @@ -177,6 +188,7 @@ jobs: # --- BEGIN USER: pre-test --- # --- END USER: pre-test --- + # ── shared:test ── keep in sync with multi_platform ── <%#managed_test%> - name: Test run: dart run runtime_ci_tooling:manage_cicd test @@ -186,6 +198,16 @@ jobs: run: dart test <%/managed_test%> + - name: Upload test artifacts on failure + if: failure() + uses: actions/upload-artifact@v4 + with: + name: test-artifacts-<%runner%> + path: | + test/integration/fixtures/bin/ + **/test-results/ + retention-days: 7 + # --- BEGIN USER: post-test --- # --- END USER: post-test --- <%/single_platform%> @@ -201,30 +223,35 @@ jobs: <%/secrets_list%> <%/has_secrets%> steps: + # ── shared:checkout ── keep in sync with single_platform ── - uses: actions/checkout@v6.0.2 with: <%#format_check%> ref: ${{ needs.auto-format.outputs.sha }} <%/format_check%> + fetch-depth: 1 persist-credentials: false <%#lfs%> lfs: true <%/lfs%> + # ── shared:git-config ── keep in sync with single_platform ── - name: Configure Git for HTTPS with Token shell: bash - env: - GH_PAT: ${{ secrets.<%pat_secret%> || secrets.GITHUB_TOKEN }} run: | - git config --global url."https://x-access-token:${GH_PAT}@github.com/".insteadOf "git@github.com:" - git config --global url."https://x-access-token:${GH_PAT}@github.com/".insteadOf "ssh://git@github.com/" - git config --global url."https://x-access-token:${GH_PAT}@github.com/open-runtime/".insteadOf "git@github.com:open-runtime/" - git config --global url."https://x-access-token:${GH_PAT}@github.com/pieces-app/".insteadOf "git@github.com:pieces-app/" - + TOKEN="${{ secrets.<%pat_secret%> || secrets.GITHUB_TOKEN }}" + echo "::add-mask::${TOKEN}" + git config --global url."https://x-access-token:${TOKEN}@github.com/".insteadOf "git@github.com:" + git config --global url."https://x-access-token:${TOKEN}@github.com/".insteadOf "ssh://git@github.com/" + git config --global url."https://x-access-token:${TOKEN}@github.com/open-runtime/".insteadOf "git@github.com:open-runtime/" + git config --global url."https://x-access-token:${TOKEN}@github.com/pieces-app/".insteadOf "git@github.com:pieces-app/" + + # ── shared:dart-setup ── keep in sync with single_platform ── - uses: dart-lang/setup-dart@v1.7.1 with: sdk: "<%dart_sdk%>" + # ── shared:pub-cache ── keep in sync with single_platform ── - name: Cache Dart pub dependencies uses: actions/cache@v5.0.3 with: @@ -232,6 +259,7 @@ jobs: key: ${{ runner.os }}-${{ runner.arch }}-dart-pub-${{ hashFiles('**/pubspec.yaml') }} restore-keys: ${{ runner.os }}-${{ runner.arch }}-dart-pub- + # ── shared:proto-setup ── keep in sync with single_platform ── <%#proto%> - name: Install protoc uses: arduino/setup-protoc@v3.0.0 @@ -239,24 +267,28 @@ jobs: - run: dart pub global activate protoc_plugin 25.0.0 <%/proto%> + # ── shared:pub-get ── keep in sync with single_platform ── - run: dart pub get env: GIT_LFS_SKIP_SMUDGE: "1" + # ── shared:analysis-cache ── keep in sync with single_platform ── <%#analysis_cache%> - name: Cache Dart analysis uses: actions/cache@v5.0.3 with: path: ~/.dartServer - key: ${{ runner.os }}-dart-analysis-${{ hashFiles('**/*.dart', '**/pubspec.yaml') }} - restore-keys: ${{ runner.os }}-dart-analysis- + key: ${{ runner.os }}-${{ runner.arch }}-dart-analysis-${{ hashFiles('**/*.dart', '**/pubspec.yaml') }} + restore-keys: ${{ runner.os }}-${{ runner.arch }}-dart-analysis- <%/analysis_cache%> + # ── shared:proto-verify ── keep in sync with single_platform ── <%#proto%> - name: Verify proto files run: dart run runtime_ci_tooling:manage_cicd verify-protos <%/proto%> + # ── shared:analyze ── keep in sync with single_platform ── <%#managed_analyze%> - name: Analyze run: dart run runtime_ci_tooling:manage_cicd analyze @@ -264,20 +296,21 @@ jobs: <%^managed_analyze%> - name: Analyze run: | - dart analyze 2>&1 | tee /tmp/analysis.txt - if grep -q "^ error -" /tmp/analysis.txt; then + dart analyze 2>&1 | tee "$RUNNER_TEMP/analysis.txt" + if grep -q "^ error -" "$RUNNER_TEMP/analysis.txt"; then echo "::error::Analysis errors found" exit 1 fi <%/managed_analyze%> + # ── shared:sub-packages ── keep in sync with single_platform ── <%#sub_packages%> - name: Analyze (<%name%>) working-directory: <%path%> run: | GIT_LFS_SKIP_SMUDGE=1 dart pub get - dart analyze 2>&1 | tee /tmp/sub_analysis.txt - if grep -q "^ error -" /tmp/sub_analysis.txt; then + dart analyze 2>&1 | tee "$RUNNER_TEMP/sub_analysis.txt" + if grep -q "^ error -" "$RUNNER_TEMP/sub_analysis.txt"; then echo "::error::Errors found in <%name%>" exit 1 fi @@ -298,30 +331,35 @@ jobs: <%/secrets_list%> <%/has_secrets%> steps: + # ── shared:checkout ── keep in sync with single_platform ── - uses: actions/checkout@v6.0.2 with: <%#format_check%> ref: ${{ needs.auto-format.outputs.sha }} <%/format_check%> + fetch-depth: 1 persist-credentials: false <%#lfs%> lfs: true <%/lfs%> + # ── shared:git-config ── keep in sync with single_platform ── - name: Configure Git for HTTPS with Token shell: bash - env: - GH_PAT: ${{ secrets.<%pat_secret%> || secrets.GITHUB_TOKEN }} run: | - git config --global url."https://x-access-token:${GH_PAT}@github.com/".insteadOf "git@github.com:" - git config --global url."https://x-access-token:${GH_PAT}@github.com/".insteadOf "ssh://git@github.com/" - git config --global url."https://x-access-token:${GH_PAT}@github.com/open-runtime/".insteadOf "git@github.com:open-runtime/" - git config --global url."https://x-access-token:${GH_PAT}@github.com/pieces-app/".insteadOf "git@github.com:pieces-app/" - + TOKEN="${{ secrets.<%pat_secret%> || secrets.GITHUB_TOKEN }}" + echo "::add-mask::${TOKEN}" + git config --global url."https://x-access-token:${TOKEN}@github.com/".insteadOf "git@github.com:" + git config --global url."https://x-access-token:${TOKEN}@github.com/".insteadOf "ssh://git@github.com/" + git config --global url."https://x-access-token:${TOKEN}@github.com/open-runtime/".insteadOf "git@github.com:open-runtime/" + git config --global url."https://x-access-token:${TOKEN}@github.com/pieces-app/".insteadOf "git@github.com:pieces-app/" + + # ── shared:dart-setup ── keep in sync with single_platform ── - uses: dart-lang/setup-dart@v1.7.1 with: sdk: "<%dart_sdk%>" + # ── shared:pub-cache ── keep in sync with single_platform ── - name: Cache Dart pub dependencies uses: actions/cache@v5.0.3 with: @@ -329,6 +367,7 @@ jobs: key: ${{ runner.os }}-${{ runner.arch }}-dart-pub-${{ hashFiles('**/pubspec.yaml') }} restore-keys: ${{ runner.os }}-${{ runner.arch }}-dart-pub- + # ── shared:pub-get ── keep in sync with single_platform ── - run: dart pub get env: GIT_LFS_SKIP_SMUDGE: "1" @@ -336,6 +375,7 @@ jobs: # --- BEGIN USER: pre-test --- # --- END USER: pre-test --- + # ── shared:test ── keep in sync with single_platform ── <%#managed_test%> - name: Test run: dart run runtime_ci_tooling:manage_cicd test @@ -345,6 +385,16 @@ jobs: run: dart test <%/managed_test%> + - name: Upload test artifacts on failure + if: failure() + uses: actions/upload-artifact@v4 + with: + name: test-artifacts-${{ matrix.os }} + path: | + test/integration/fixtures/bin/ + **/test-results/ + retention-days: 7 + # --- BEGIN USER: post-test --- # --- END USER: post-test --- <%/multi_platform%> From e2cb4fc9ccb8931cb755b33dde785d8808a3287a Mon Sep 17 00:00:00 2001 From: Tsavo Knott Date: Tue, 24 Feb 2026 14:40:03 -0500 Subject: [PATCH 3/3] test: add 69-test suite for WorkflowGenerator validate() and loadCiConfig() P0 coverage for validate(): - dart_sdk: missing, wrong type, empty, whitespace, channel vs semver - features: missing, wrong type, unknown keys (typo detection), non-bool - platforms: wrong type, unknown platform, valid single/multi - sub_packages: type validation, path traversal, duplicates, unsafe chars - secrets, pat_secret, line_length, runner_overrides P0 coverage for loadCiConfig(): - File missing, no ci key, valid ci section, malformed JSON, ci not a Map Partially addresses #5 Co-Authored-By: Claude Opus 4.6 --- test/workflow_generator_test.dart | 600 ++++++++++++++++++++++++++++++ 1 file changed, 600 insertions(+) create mode 100644 test/workflow_generator_test.dart diff --git a/test/workflow_generator_test.dart b/test/workflow_generator_test.dart new file mode 100644 index 0000000..15c136d --- /dev/null +++ b/test/workflow_generator_test.dart @@ -0,0 +1,600 @@ +import 'dart:convert'; +import 'dart:io'; + +import 'package:test/test.dart'; + +import 'package:runtime_ci_tooling/src/cli/utils/workflow_generator.dart'; + +/// Helper: build a minimal valid CI config map. +Map _validConfig({ + String dartSdk = '3.9.2', + Map? features, + List? platforms, + Map? secrets, + String? pat, + dynamic lineLength, + List? subPackages, + Map? runnerOverrides, +}) { + return { + 'dart_sdk': dartSdk, + 'features': features ?? {'proto': false, 'lfs': false}, + if (platforms != null) 'platforms': platforms, + if (secrets != null) 'secrets': secrets, + if (pat != null) 'personal_access_token_secret': pat, + if (lineLength != null) 'line_length': lineLength, + if (subPackages != null) 'sub_packages': subPackages, + if (runnerOverrides != null) 'runner_overrides': runnerOverrides, + }; +} + +void main() { + // =========================================================================== + // P0: validate() tests + // =========================================================================== + group('WorkflowGenerator.validate()', () { + // ---- dart_sdk ---- + group('dart_sdk', () { + test('missing dart_sdk produces error', () { + final errors = WorkflowGenerator.validate({'features': {}}); + expect(errors, contains('ci.dart_sdk is required')); + }); + + test('null dart_sdk produces error', () { + final errors = WorkflowGenerator.validate({ + 'dart_sdk': null, + 'features': {}, + }); + expect(errors, contains('ci.dart_sdk is required')); + }); + + test('non-string dart_sdk produces error', () { + final errors = WorkflowGenerator.validate({ + 'dart_sdk': 42, + 'features': {}, + }); + expect(errors, anyElement(contains('must be a string'))); + }); + + test('empty-string dart_sdk produces error', () { + final errors = WorkflowGenerator.validate({ + 'dart_sdk': '', + 'features': {}, + }); + expect(errors, anyElement(contains('non-empty'))); + }); + + test('whitespace-only dart_sdk produces error', () { + final errors = WorkflowGenerator.validate({ + 'dart_sdk': ' ', + 'features': {}, + }); + // After trim the string is empty + expect(errors, anyElement(contains('non-empty'))); + }); + + test('dart_sdk with leading/trailing whitespace produces error', () { + final errors = WorkflowGenerator.validate({ + 'dart_sdk': ' 3.9.2 ', + 'features': {}, + }); + expect(errors, anyElement(contains('whitespace'))); + }); + + test('dart_sdk with trailing newline triggers whitespace error', () { + // A trailing \n makes trimmed != sdk, so the whitespace check fires first. + final errors = WorkflowGenerator.validate({ + 'dart_sdk': '3.9.2\n', + 'features': {}, + }); + expect(errors, anyElement(contains('whitespace'))); + }); + + test('dart_sdk with embedded tab (after trim is identity) triggers newlines/tabs error', () { + // A tab in the middle: trim() has no effect but the regex catches it. + final errors = WorkflowGenerator.validate({ + 'dart_sdk': '3.9\t.2', + 'features': {}, + }); + expect(errors, anyElement(contains('newlines/tabs'))); + }); + + test('valid semver dart_sdk passes', () { + final errors = WorkflowGenerator.validate(_validConfig(dartSdk: '3.9.2')); + expect(errors.where((e) => e.contains('dart_sdk')), isEmpty); + }); + + test('valid semver with pre-release passes', () { + final errors = WorkflowGenerator.validate(_validConfig(dartSdk: '3.10.0-beta.1')); + expect(errors.where((e) => e.contains('dart_sdk')), isEmpty); + }); + + test('channel "stable" passes', () { + final errors = WorkflowGenerator.validate(_validConfig(dartSdk: 'stable')); + expect(errors.where((e) => e.contains('dart_sdk')), isEmpty); + }); + + test('channel "beta" passes', () { + final errors = WorkflowGenerator.validate(_validConfig(dartSdk: 'beta')); + expect(errors.where((e) => e.contains('dart_sdk')), isEmpty); + }); + + test('channel "dev" passes', () { + final errors = WorkflowGenerator.validate(_validConfig(dartSdk: 'dev')); + expect(errors.where((e) => e.contains('dart_sdk')), isEmpty); + }); + + test('invalid dart_sdk like "latest" produces error', () { + final errors = WorkflowGenerator.validate(_validConfig(dartSdk: 'latest')); + expect(errors, anyElement(contains('channel'))); + }); + + test('invalid dart_sdk like "3.9" (not full semver) produces error', () { + final errors = WorkflowGenerator.validate(_validConfig(dartSdk: '3.9')); + expect(errors, anyElement(contains('channel'))); + }); + }); + + // ---- features ---- + group('features', () { + test('missing features produces error', () { + final errors = WorkflowGenerator.validate({'dart_sdk': '3.9.2'}); + expect(errors, contains('ci.features is required')); + }); + + test('non-map features produces error', () { + final errors = WorkflowGenerator.validate({ + 'dart_sdk': '3.9.2', + 'features': 'not_a_map', + }); + expect(errors, anyElement(contains('features must be an object'))); + }); + + test('features with non-bool value produces error', () { + final errors = WorkflowGenerator.validate({ + 'dart_sdk': '3.9.2', + 'features': {'proto': 'yes'}, + }); + expect(errors, anyElement(contains('must be a bool'))); + }); + + test('features with unknown key (typo) produces error', () { + final errors = WorkflowGenerator.validate({ + 'dart_sdk': '3.9.2', + 'features': {'prto': true}, // typo of 'proto' + }); + expect(errors, anyElement(contains('unknown key "prto"'))); + }); + + test('all known feature keys pass validation', () { + final errors = WorkflowGenerator.validate(_validConfig( + features: { + 'proto': true, + 'lfs': false, + 'format_check': true, + 'analysis_cache': false, + 'managed_analyze': true, + 'managed_test': false, + }, + )); + expect(errors.where((e) => e.contains('features')), isEmpty); + }); + + test('empty features map passes (no keys required)', () { + final errors = WorkflowGenerator.validate(_validConfig(features: {})); + expect(errors.where((e) => e.contains('features')), isEmpty); + }); + }); + + // ---- platforms ---- + group('platforms', () { + test('non-list platforms produces error', () { + final errors = WorkflowGenerator.validate(_validConfig(platforms: null)..['platforms'] = 'ubuntu'); + expect(errors, anyElement(contains('platforms must be an array'))); + }); + + test('unknown platform entry produces error', () { + final errors = WorkflowGenerator.validate(_validConfig(platforms: ['ubuntu', 'solaris'])); + expect(errors, anyElement(contains('invalid platform "solaris"'))); + }); + + test('non-string platform entry produces error', () { + final config = _validConfig(); + config['platforms'] = [42]; + final errors = WorkflowGenerator.validate(config); + expect(errors, anyElement(contains('invalid platform'))); + }); + + test('valid single platform passes', () { + final errors = WorkflowGenerator.validate(_validConfig(platforms: ['ubuntu'])); + expect(errors.where((e) => e.contains('platforms')), isEmpty); + }); + + test('valid multi-platform passes', () { + final errors = WorkflowGenerator.validate( + _validConfig(platforms: ['ubuntu', 'macos', 'windows']), + ); + expect(errors.where((e) => e.contains('platforms')), isEmpty); + }); + + test('omitted platforms (null) does not produce error', () { + final errors = WorkflowGenerator.validate(_validConfig()); + expect(errors.where((e) => e.contains('platforms')), isEmpty); + }); + }); + + // ---- secrets ---- + group('secrets', () { + test('non-map secrets produces error', () { + final config = _validConfig(); + config['secrets'] = 'not_a_map'; + final errors = WorkflowGenerator.validate(config); + expect(errors, anyElement(contains('secrets must be an object'))); + }); + + test('null secrets is fine (optional)', () { + final errors = WorkflowGenerator.validate(_validConfig()); + expect(errors.where((e) => e.contains('secrets')), isEmpty); + }); + + test('valid secrets map passes', () { + final errors = WorkflowGenerator.validate( + _validConfig(secrets: {'API_KEY': 'SOME_SECRET'}), + ); + expect(errors.where((e) => e.contains('secrets')), isEmpty); + }); + }); + + // ---- personal_access_token_secret ---- + group('personal_access_token_secret', () { + test('non-string pat produces error', () { + final config = _validConfig(); + config['personal_access_token_secret'] = 123; + final errors = WorkflowGenerator.validate(config); + expect(errors, anyElement(contains('personal_access_token_secret'))); + }); + + test('empty pat produces error', () { + final errors = WorkflowGenerator.validate(_validConfig(pat: '')); + expect(errors, anyElement(contains('personal_access_token_secret'))); + }); + + test('valid pat passes', () { + final errors = WorkflowGenerator.validate(_validConfig(pat: 'MY_PAT')); + expect(errors.where((e) => e.contains('personal_access_token_secret')), isEmpty); + }); + + test('null pat is fine (optional, defaults to GITHUB_TOKEN)', () { + final errors = WorkflowGenerator.validate(_validConfig()); + expect(errors.where((e) => e.contains('personal_access_token_secret')), isEmpty); + }); + }); + + // ---- line_length ---- + group('line_length', () { + test('non-numeric line_length produces error', () { + final errors = WorkflowGenerator.validate(_validConfig(lineLength: true)); + expect(errors, anyElement(contains('line_length'))); + }); + + test('int line_length passes', () { + final errors = WorkflowGenerator.validate(_validConfig(lineLength: 80)); + expect(errors.where((e) => e.contains('line_length')), isEmpty); + }); + + test('string line_length passes', () { + final errors = WorkflowGenerator.validate(_validConfig(lineLength: '120')); + expect(errors.where((e) => e.contains('line_length')), isEmpty); + }); + + test('null line_length is fine (optional)', () { + final errors = WorkflowGenerator.validate(_validConfig()); + expect(errors.where((e) => e.contains('line_length')), isEmpty); + }); + }); + + // ---- sub_packages (Issue #9 validation) ---- + group('sub_packages', () { + test('non-list sub_packages produces error', () { + final config = _validConfig(); + config['sub_packages'] = 'not_a_list'; + final errors = WorkflowGenerator.validate(config); + expect(errors, anyElement(contains('sub_packages must be an array'))); + }); + + test('sub_packages entry that is not a map produces error', () { + final errors = WorkflowGenerator.validate( + _validConfig(subPackages: ['just_a_string']), + ); + expect(errors, anyElement(contains('sub_packages entries must be objects'))); + }); + + test('sub_packages with missing name produces error', () { + final errors = WorkflowGenerator.validate( + _validConfig(subPackages: [ + {'path': 'packages/foo'}, + ]), + ); + expect(errors, anyElement(contains('name must be a non-empty string'))); + }); + + test('sub_packages with empty name produces error', () { + final errors = WorkflowGenerator.validate( + _validConfig(subPackages: [ + {'name': '', 'path': 'packages/foo'}, + ]), + ); + expect(errors, anyElement(contains('name must be a non-empty string'))); + }); + + test('sub_packages with missing path produces error', () { + final errors = WorkflowGenerator.validate( + _validConfig(subPackages: [ + {'name': 'foo'}, + ]), + ); + expect(errors, anyElement(contains('path must be a non-empty string'))); + }); + + test('sub_packages with empty path produces error', () { + final errors = WorkflowGenerator.validate( + _validConfig(subPackages: [ + {'name': 'foo', 'path': ''}, + ]), + ); + expect(errors, anyElement(contains('path must be a non-empty string'))); + }); + + test('sub_packages path with directory traversal (..) produces error', () { + final errors = WorkflowGenerator.validate( + _validConfig(subPackages: [ + {'name': 'foo', 'path': '../../../etc/passwd'}, + ]), + ); + expect(errors, anyElement(contains('must not traverse outside the repo'))); + }); + + test('sub_packages path with embedded traversal produces error', () { + final errors = WorkflowGenerator.validate( + _validConfig(subPackages: [ + {'name': 'foo', 'path': 'packages/../../../etc'}, + ]), + ); + expect(errors, anyElement(contains('must not traverse outside the repo'))); + }); + + test('sub_packages absolute path produces error', () { + final errors = WorkflowGenerator.validate( + _validConfig(subPackages: [ + {'name': 'foo', 'path': '/usr/local/bin'}, + ]), + ); + expect(errors, anyElement(contains('must be a relative repo path'))); + }); + + test('sub_packages path starting with ~ produces error', () { + final errors = WorkflowGenerator.validate( + _validConfig(subPackages: [ + {'name': 'foo', 'path': '~/evil'}, + ]), + ); + expect(errors, anyElement(contains('must be a relative repo path'))); + }); + + test('sub_packages path with backslashes produces error', () { + final errors = WorkflowGenerator.validate( + _validConfig(subPackages: [ + {'name': 'foo', 'path': r'packages\foo'}, + ]), + ); + expect(errors, anyElement(contains('forward slashes'))); + }); + + test('sub_packages path with unsupported characters produces error', () { + final errors = WorkflowGenerator.validate( + _validConfig(subPackages: [ + {'name': 'foo', 'path': 'packages/foo bar'}, + ]), + ); + expect(errors, anyElement(contains('unsupported characters'))); + }); + + test('sub_packages path with leading/trailing whitespace produces error', () { + final errors = WorkflowGenerator.validate( + _validConfig(subPackages: [ + {'name': 'foo', 'path': ' packages/foo '}, + ]), + ); + expect(errors, anyElement(contains('whitespace'))); + }); + + test('sub_packages path with trailing tab triggers whitespace error', () { + // Trailing \t means trimmed != value, so the whitespace check fires first. + final errors = WorkflowGenerator.validate( + _validConfig(subPackages: [ + {'name': 'foo', 'path': 'packages/foo\t'}, + ]), + ); + expect(errors, anyElement(contains('whitespace'))); + }); + + test('sub_packages path with embedded tab triggers newlines/tabs error', () { + // Embedded tab: trim() is identity, so newlines/tabs check catches it. + final errors = WorkflowGenerator.validate( + _validConfig(subPackages: [ + {'name': 'foo', 'path': 'packages/f\too'}, + ]), + ); + expect(errors, anyElement(contains('newlines/tabs'))); + }); + + test('sub_packages duplicate name produces error', () { + final errors = WorkflowGenerator.validate( + _validConfig(subPackages: [ + {'name': 'foo', 'path': 'packages/foo'}, + {'name': 'foo', 'path': 'packages/bar'}, + ]), + ); + expect(errors, anyElement(contains('duplicate name "foo"'))); + }); + + test('sub_packages duplicate path (after normalization) produces error', () { + final errors = WorkflowGenerator.validate( + _validConfig(subPackages: [ + {'name': 'foo', 'path': 'packages/foo'}, + {'name': 'bar', 'path': 'packages/./foo'}, + ]), + ); + expect(errors, anyElement(contains('duplicate path'))); + }); + + test('valid sub_packages passes', () { + final errors = WorkflowGenerator.validate( + _validConfig(subPackages: [ + {'name': 'core', 'path': 'packages/core'}, + {'name': 'api', 'path': 'packages/api'}, + ]), + ); + expect(errors.where((e) => e.contains('sub_packages')), isEmpty); + }); + + test('null sub_packages is fine (optional)', () { + final errors = WorkflowGenerator.validate(_validConfig()); + expect(errors.where((e) => e.contains('sub_packages')), isEmpty); + }); + }); + + // ---- runner_overrides ---- + group('runner_overrides', () { + test('non-map runner_overrides produces error', () { + final config = _validConfig(); + config['runner_overrides'] = 'invalid'; + final errors = WorkflowGenerator.validate(config); + expect(errors, anyElement(contains('runner_overrides must be an object'))); + }); + + test('runner_overrides with invalid platform key produces error', () { + final errors = WorkflowGenerator.validate( + _validConfig(runnerOverrides: {'solaris': 'my-runner'}), + ); + expect(errors, anyElement(contains('invalid platform key "solaris"'))); + }); + + test('runner_overrides with empty string value produces error', () { + final errors = WorkflowGenerator.validate( + _validConfig(runnerOverrides: {'ubuntu': ''}), + ); + expect(errors, anyElement(contains('must be a non-empty string'))); + }); + + test('valid runner_overrides passes', () { + final errors = WorkflowGenerator.validate( + _validConfig(runnerOverrides: {'ubuntu': 'custom-runner-label'}), + ); + expect(errors.where((e) => e.contains('runner_overrides')), isEmpty); + }); + }); + + // ---- fully valid config produces no errors ---- + test('fully valid config produces no errors', () { + final errors = WorkflowGenerator.validate(_validConfig( + dartSdk: '3.9.2', + features: {'proto': true, 'lfs': false}, + platforms: ['ubuntu', 'macos'], + secrets: {'API_KEY': 'MY_SECRET'}, + pat: 'MY_PAT', + lineLength: 120, + subPackages: [ + {'name': 'core', 'path': 'packages/core'}, + ], + runnerOverrides: {'ubuntu': 'custom-runner'}, + )); + expect(errors, isEmpty); + }); + + // ---- multiple errors accumulate ---- + test('multiple errors are accumulated (not short-circuited)', () { + final errors = WorkflowGenerator.validate({ + // missing dart_sdk, missing features + }); + expect(errors.length, greaterThanOrEqualTo(2)); + expect(errors, anyElement(contains('dart_sdk'))); + expect(errors, anyElement(contains('features'))); + }); + }); + + // =========================================================================== + // P0: loadCiConfig() tests + // =========================================================================== + group('WorkflowGenerator.loadCiConfig()', () { + late Directory tempDir; + + setUp(() { + tempDir = Directory.systemTemp.createTempSync('wf_gen_test_'); + }); + + tearDown(() { + tempDir.deleteSync(recursive: true); + }); + + test('returns null when config.json does not exist', () { + final result = WorkflowGenerator.loadCiConfig(tempDir.path); + expect(result, isNull); + }); + + test('returns null when config.json exists but has no "ci" key', () { + final configDir = Directory('${tempDir.path}/.runtime_ci')..createSync(); + File('${configDir.path}/config.json').writeAsStringSync(json.encode({ + 'repo_name': 'test_repo', + })); + final result = WorkflowGenerator.loadCiConfig(tempDir.path); + expect(result, isNull); + }); + + test('returns the ci map when config.json has a valid "ci" section', () { + final configDir = Directory('${tempDir.path}/.runtime_ci')..createSync(); + File('${configDir.path}/config.json').writeAsStringSync(json.encode({ + 'ci': { + 'dart_sdk': '3.9.2', + 'features': {'proto': true}, + }, + })); + final result = WorkflowGenerator.loadCiConfig(tempDir.path); + expect(result, isNotNull); + expect(result, isA>()); + expect(result!['dart_sdk'], equals('3.9.2')); + expect((result['features'] as Map)['proto'], isTrue); + }); + + test('throws StateError on malformed JSON', () { + final configDir = Directory('${tempDir.path}/.runtime_ci')..createSync(); + File('${configDir.path}/config.json').writeAsStringSync('{ not valid json'); + expect( + () => WorkflowGenerator.loadCiConfig(tempDir.path), + throwsA(isA().having((e) => e.message, 'message', contains('Malformed JSON'))), + ); + }); + + test('throws StateError when "ci" is not a Map', () { + final configDir = Directory('${tempDir.path}/.runtime_ci')..createSync(); + File('${configDir.path}/config.json').writeAsStringSync(json.encode({ + 'ci': 'not_a_map', + })); + expect( + () => WorkflowGenerator.loadCiConfig(tempDir.path), + throwsA(isA().having((e) => e.message, 'message', contains('object'))), + ); + }); + + test('throws StateError when "ci" is a list instead of a map', () { + final configDir = Directory('${tempDir.path}/.runtime_ci')..createSync(); + File('${configDir.path}/config.json').writeAsStringSync(json.encode({ + 'ci': [1, 2, 3], + })); + expect( + () => WorkflowGenerator.loadCiConfig(tempDir.path), + throwsA(isA()), + ); + }); + }); +}