diff --git a/.gemini/commands/triage.toml b/.gemini/commands/triage.toml index 9913d51..dec0e41 100644 --- a/.gemini/commands/triage.toml +++ b/.gemini/commands/triage.toml @@ -3,34 +3,51 @@ description = "Triage a GitHub issue: classify, prioritize, detect duplicates. U prompt = """ Triage GitHub issue #{{args}} for the !{grep '^name:' pubspec.yaml | sed 's/name: //'} package. -## Issue Details +## Repository Context ``` -!{gh issue view {{args}} --json number,title,body,author,labels,createdAt --jq '"#\\(.number): \\(.title)\\nAuthor: @\\(.author.login)\\nLabels: \\([.labels[].name] | join(", "))\\nCreated: \\(.createdAt)\\n\\n\\(.body)"' 2>/dev/null || echo "Could not fetch issue"} +REPO: !{gh repo view --json nameWithOwner -q .nameWithOwner 2>/dev/null || echo "UNKNOWN"} +``` + +## CRITICAL SAFETY RULES — READ BEFORE ACTING + +1. **ALWAYS use `--repo ` on EVERY `gh` command.** Never rely on git remote resolution. +2. **ONLY operate on repositories owned by: `open-runtime`, `pieces-app`.** If the repo above belongs to a different org (e.g. `grpc`, `niclas-pricken`, etc.), STOP IMMEDIATELY and report: "Refusing to triage — repo belongs to an unauthorized org." +3. **Before posting any comment**, check the existing comments below for duplicates. If a triage comment already exists, do NOT post another one. +4. **Never post, edit, label, or close issues on upstream/parent repos.** This is a fork — only operate on the fork's own issues. + +## Issue Details (including existing comments) +``` +!{REPO=$(gh repo view --json nameWithOwner -q .nameWithOwner 2>/dev/null); case "$REPO" in open-runtime/*|pieces-app/*) ;; *) echo "BLOCKED: repo $REPO belongs to unauthorized org — refusing to fetch"; exit 0;; esac; gh issue view {{args}} --repo "$REPO" --json number,title,body,author,labels,createdAt,comments --jq '"#\\(.number): \\(.title)\\nAuthor: @\\(.author.login)\\nLabels: \\([.labels[].name] | join(", "))\\nCreated: \\(.createdAt)\\n\\n\\(.body)\\n\\n--- EXISTING COMMENTS (\\(.comments | length)) ---\\n\\(.comments | map("[\\(.author.login) @ \\(.createdAt)]:\\n\\(.body)") | join("\\n---\\n"))"' 2>/dev/null || echo "Could not fetch issue"} ``` ## Package Structure ``` -!{tree lib/ -L 2 --dirsfirst -d} +!{tree lib/ -L 2 --dirsfirst -d 2>/dev/null || echo "No lib/ directory"} ``` ## Open Issues (for duplicate detection) ``` -!{gh issue list --state open --limit 50 --json number,title,labels --jq '.[] | "#\\(.number): \\(.title) [\\([.labels[].name] | join(", "))]"' 2>/dev/null || echo "Could not list issues"} +!{REPO=$(gh repo view --json nameWithOwner -q .nameWithOwner 2>/dev/null); case "$REPO" in open-runtime/*|pieces-app/*) ;; *) echo "BLOCKED: repo $REPO belongs to unauthorized org"; exit 0;; esac; gh issue list --repo "$REPO" --state open --limit 50 --json number,title,labels --jq '.[] | "#\\(.number): \\(.title) [\\([.labels[].name] | join(", "))]"' 2>/dev/null || echo "Could not list issues"} ``` ## Triage Tasks -1. **Type**: Classify as one of: bug, feature-request, enhancement, documentation, question -2. **Priority**: Assign P0-critical, P1-high, P2-medium, or P3-low -3. **Area**: Classify area(s): proto, ml-models, core, provisioning, grpc, crypto, googleapis, ci-cd, docs -4. **Duplicates**: Check open issues for duplicates (HIGH/MEDIUM/LOW confidence) -5. **Comment**: Draft a helpful, welcoming comment for the reporter +1. **Org Check**: Verify the repository belongs to `open-runtime` or `pieces-app`. If not, STOP. +2. **Duplicate Check**: Review the EXISTING COMMENTS above. If a triage comment already exists, skip commenting. +3. **Type**: Classify as one of: bug, feature-request, enhancement, documentation, question +4. **Priority**: Assign P0-critical, P1-high, P2-medium, or P3-low +5. **Area**: Classify area(s) based on the package structure above +6. **Duplicates**: Check open issues for duplicates (HIGH/MEDIUM/LOW confidence) +7. **Comment**: Draft a helpful, welcoming comment for the reporter (only if no triage comment exists yet) ## Actions -Apply your triage using gh CLI: -- `gh issue edit {{args}} --add-label ""` -- `gh issue edit {{args}} --add-label ""` -- `gh issue edit {{args}} --add-label "area/"` -- `gh issue comment {{args}} --body ""` +IMPORTANT: Replace `OWNER/REPO` below with the actual repo from "Repository Context" above. +IMPORTANT: If any existing comment above already contains triage analysis, do NOT post a duplicate. + +Apply your triage using gh CLI (ALWAYS include --repo): +- `gh issue edit {{args}} --repo OWNER/REPO --add-label ""` +- `gh issue edit {{args}} --repo OWNER/REPO --add-label ""` +- `gh issue edit {{args}} --repo OWNER/REPO --add-label "area/"` +- `gh issue comment {{args}} --repo OWNER/REPO --body ""` """ diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 23dde75..092e5bf 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -1,4 +1,4 @@ -# Generated by runtime_ci_tooling v0.11.3 +# Generated by runtime_ci_tooling v0.12.1 # Configured via .runtime_ci/config.json — run 'dart run runtime_ci_tooling:manage_cicd update --workflows' to regenerate. name: CI @@ -77,7 +77,7 @@ jobs: echo "::notice::Code is already formatted." fi - analyze-and-test: + analyze: needs: [pre-check, auto-format] if: needs.pre-check.outputs.should_run == 'true' runs-on: ubuntu-latest @@ -104,8 +104,8 @@ jobs: uses: actions/cache@v5.0.3 with: path: ~/.pub-cache - key: ${{ runner.os }}-dart-pub-${{ hashFiles('**/pubspec.yaml') }} - restore-keys: ${{ runner.os }}-dart-pub- + key: ${{ runner.os }}-${{ runner.arch }}-dart-pub-${{ hashFiles('**/pubspec.yaml') }} + restore-keys: ${{ runner.os }}-${{ runner.arch }}-dart-pub- - run: dart pub get env: @@ -119,6 +119,44 @@ jobs: exit 1 fi + test: + needs: [pre-check, analyze, auto-format] + if: needs.pre-check.outputs.should_run == 'true' + runs-on: ${{ matrix.runner }} + strategy: + fail-fast: false + matrix: + include: [{"platform_id":"ubuntu-x64","runner":"ubuntu-latest","os_family":"linux","arch":"x64"},{"platform_id":"ubuntu-arm64","runner":"runtime-ubuntu-24.04-arm64-208gb-64core","os_family":"linux","arch":"arm64"},{"platform_id":"macos-arm64","runner":"macos-latest","os_family":"macos","arch":"arm64"},{"platform_id":"macos-x64","runner":"macos-15-intel","os_family":"macos","arch":"x64"},{"platform_id":"windows-x64","runner":"windows-latest","os_family":"windows","arch":"x64"},{"platform_id":"windows-arm64","runner":"runtime-windows-11-arm64-208gb-64core","os_family":"windows","arch":"arm64"}] + steps: + - uses: actions/checkout@v6.0.2 + with: + ref: ${{ needs.auto-format.outputs.sha }} + persist-credentials: false + + - name: Configure Git for HTTPS with Token + shell: bash + run: | + TOKEN="${{ secrets.TSAVO_AT_PIECES_PERSONAL_ACCESS_TOKEN || secrets.GITHUB_TOKEN }}" + git config --global url."https://x-access-token:${TOKEN}@github.com/".insteadOf "git@github.com:" + git config --global url."https://x-access-token:${TOKEN}@github.com/".insteadOf "ssh://git@github.com/" + git config --global url."https://x-access-token:${TOKEN}@github.com/open-runtime/".insteadOf "git@github.com:open-runtime/" + git config --global url."https://x-access-token:${TOKEN}@github.com/pieces-app/".insteadOf "git@github.com:pieces-app/" + + - uses: dart-lang/setup-dart@v1.7.1 + with: + sdk: "3.9.2" + + - name: Cache Dart pub dependencies + uses: actions/cache@v5.0.3 + with: + path: ~/.pub-cache + key: ${{ runner.os }}-${{ runner.arch }}-dart-pub-${{ hashFiles('**/pubspec.yaml') }} + restore-keys: ${{ runner.os }}-${{ runner.arch }}-dart-pub- + + - run: dart pub get + env: + GIT_LFS_SKIP_SMUDGE: "1" + # --- BEGIN USER: pre-test --- # --- END USER: pre-test --- @@ -127,3 +165,6 @@ jobs: # --- BEGIN USER: post-test --- # --- END USER: post-test --- + +# --- BEGIN USER: extra-jobs --- +# --- END USER: extra-jobs --- diff --git a/.runtime_ci/config.json b/.runtime_ci/config.json index ac89dfa..25aad8a 100644 --- a/.runtime_ci/config.json +++ b/.runtime_ci/config.json @@ -76,6 +76,11 @@ "managed_test": false }, "secrets": {}, - "sub_packages": [] + "sub_packages": [], + "platforms": ["ubuntu-x64", "ubuntu-arm64", "macos-arm64", "macos-x64", "windows-x64", "windows-arm64"], + "runner_overrides": { + "ubuntu-arm64": "runtime-ubuntu-24.04-arm64-208gb-64core", + "windows-arm64": "runtime-windows-11-arm64-208gb-64core" + } } } diff --git a/.runtime_ci/template_versions.json b/.runtime_ci/template_versions.json index e422880..2f9fd51 100644 --- a/.runtime_ci/template_versions.json +++ b/.runtime_ci/template_versions.json @@ -1,6 +1,6 @@ { - "tooling_version": "0.11.3", - "updated_at": "2026-02-24T00:59:57.805138Z", + "tooling_version": "0.12.1", + "updated_at": "2026-02-24T16:04:43.651023Z", "templates": { "gemini_settings": { "hash": "93983f49dd2f40d2ed245271854946d8916b8f0698ed2cfaf12058305baa0b08", @@ -23,9 +23,9 @@ "updated_at": "2026-02-24T00:59:57.620091Z" }, "workflow_ci": { - "hash": "b88e1af6c7579d24ce0f57a0a483c3f1d7e9c7b8ca5de2ee0fe5c0c49c18bc77", - "consumer_hash": "618010803b11f765c2359fc6769edec4178162b6d54a0061f086ee1a54278c9a", - "updated_at": "2026-02-24T00:59:57.726984Z" + "hash": "92c5c82c94e96022d4c7bd7372b1d04273e927223b9f8726a182871d89d1ef77", + "consumer_hash": "1f33e89a0ccffc4ec34838b4e76f9c4d6d7ab5eefb1d9731b554dfc0ed752696", + "updated_at": "2026-02-24T16:04:43.655468Z" }, "workflow_release": { "hash": "326627cf41fdeb6cd61dae2fda98599d5815a34e63e4a8af1aaa8f7ad18435d3", diff --git a/SETUP.md b/SETUP.md index fad5700..0d99b41 100644 --- a/SETUP.md +++ b/SETUP.md @@ -309,15 +309,18 @@ The CI workflow (`.github/workflows/ci.yaml`) is generated from your `ci` sectio | `features.proto` | bool | `false` | Enable protobuf generation step | | `features.lfs` | bool | `false` | Enable Git LFS checkout | | `features.format_check` | bool | `true` | Enable `dart format` check | -| `features.analysis_cache` | bool | `true` | Cache analysis results across runs | -| `features.managed_analyze` | bool | `true` | Run `dart analyze` via tooling | -| `features.managed_test` | bool | `true` | Run `dart test` via tooling | +| `features.analysis_cache` | bool | `false` | Cache analysis results across runs | +| `features.managed_analyze` | bool | `false` | Run `dart analyze` via tooling | +| `features.managed_test` | bool | `false` | Run `dart test` via tooling | +| `platforms` | list | `["ubuntu"]` | Platform matrix. If 2+ entries, CI runs `analyze` once then `test` as a matrix. Valid: `ubuntu-x64`, `ubuntu-arm64`, `macos-arm64`, `macos-x64`, `windows-x64`, `windows-arm64` (plus aliases `ubuntu`, `macos`, `windows`). | +| `runner_overrides` | object | `{}` | Override platform IDs to custom `runs-on` labels (e.g. org-managed GitHub-hosted runners). Example: `{ "ubuntu-arm64": "runtime-ubuntu-24.04-arm64-208gb-64core" }` | | `secrets` | object | `{}` | Additional secrets as `{ "ENV_NAME": "SECRET_NAME" }` | | `sub_packages` | list | `[]` | Sub-packages as `[{ "name": "...", "path": "..." }]` | You can add custom steps before/after tests using user-preservable sections in the generated workflow — look for `# --- BEGIN USER: pre-test ---` and -`# --- END USER: post-test ---` markers. +`# --- END USER: post-test ---` markers. To add additional jobs (including reusable workflow calls), +use `# --- BEGIN USER: extra-jobs ---` / `# --- END USER: extra-jobs ---`. ### Customize the workflows diff --git a/USAGE.md b/USAGE.md index 60e3848..bae4485 100644 --- a/USAGE.md +++ b/USAGE.md @@ -1242,8 +1242,17 @@ final exists = await commandExists('git'); **Triggers:** Push to `main`, pull requests targeting `main` **Jobs:** -1. `pre-check` -- Skip bot commits (author `github-actions[bot]` or `[skip ci]`) -2. `analyze-and-test` -- Verify protos, run analysis, run tests +1. `pre-check` — Skip bot commits (author `github-actions[bot]` or `[skip ci]`) +2. Optional `auto-format` — If `ci.features.format_check=true`, auto-format `lib/` and push `bot(format)` commit +3. **Single-platform mode** (default, `ci.platforms` missing or 1 entry): + - `analyze-and-test` — Verify protos, run analysis, run tests +4. **Multi-platform mode** (`ci.platforms` has 2+ entries): + - `analyze` — Run analysis once (Ubuntu) + - `test` — Run tests as a matrix across OS+arch (`x64` + `arm64`) + +**Platform matrix configuration:** +- `ci.platforms`: list of platform IDs (e.g. `["ubuntu-x64","ubuntu-arm64","macos-arm64","macos-x64","windows-x64","windows-arm64"]`) +- `ci.runner_overrides`: optional map to point platform IDs at custom `runs-on` labels (e.g. org-managed GitHub-hosted runners) **Key steps:** ```yaml diff --git a/lib/src/cli/commands/analyze_command.dart b/lib/src/cli/commands/analyze_command.dart index 22871d4..cd018ee 100644 --- a/lib/src/cli/commands/analyze_command.dart +++ b/lib/src/cli/commands/analyze_command.dart @@ -5,8 +5,9 @@ import 'package:args/command_runner.dart'; import '../../triage/utils/config.dart'; import '../utils/logger.dart'; import '../utils/repo_utils.dart'; +import '../utils/sub_package_utils.dart'; -/// Run dart analyze. +/// Run `dart analyze` on the root package and all configured sub-packages. class AnalyzeCommand extends Command { @override final String name = 'analyze'; @@ -24,6 +25,8 @@ class AnalyzeCommand extends Command { Logger.header('Running dart analyze'); + final failures = []; + // --fatal-infos is non-negatable (infos are non-fatal by default). // --[no-]fatal-warnings supports negation; disable it so only errors fail CI. final result = Process.runSync(Platform.resolvedExecutable, [ @@ -38,10 +41,74 @@ class AnalyzeCommand extends Command { if (stderr.isNotEmpty) Logger.error(stderr); if (result.exitCode != 0) { - Logger.error('Analysis failed with exit code ${result.exitCode}'); - exit(result.exitCode); + Logger.error('Root analysis failed with exit code ${result.exitCode}'); + failures.add(config.repoName); + } else { + Logger.success('Root analysis complete'); + } + + // ── Sub-package analysis ────────────────────────────────────────────── + final subPackages = SubPackageUtils.loadSubPackages(repoRoot); + SubPackageUtils.logSubPackages(subPackages); + + for (final sp in subPackages) { + final name = sp['name'] as String; + final path = sp['path'] as String; + final dir = '$repoRoot/$path'; + + Logger.header('Analyzing sub-package: $name ($path)'); + + if (!Directory(dir).existsSync()) { + Logger.warn(' Directory not found: $dir — skipping'); + continue; + } + + if (!File('$dir/pubspec.yaml').existsSync()) { + Logger.error(' No pubspec.yaml in $dir — cannot analyze'); + failures.add(name); + continue; + } + + // Ensure dependencies are resolved (sub-packages have independent + // pubspec.yaml files that the root `dart pub get` may not cover). + final pubGetResult = Process.runSync( + Platform.resolvedExecutable, + ['pub', 'get'], + workingDirectory: dir, + environment: {'GIT_LFS_SKIP_SMUDGE': '1'}, + ); + if (pubGetResult.exitCode != 0) { + final pubGetStderr = (pubGetResult.stderr as String).trim(); + if (pubGetStderr.isNotEmpty) Logger.error(pubGetStderr); + Logger.error(' dart pub get failed for $name (exit code ${pubGetResult.exitCode})'); + failures.add(name); + continue; + } + + final spResult = Process.runSync(Platform.resolvedExecutable, [ + 'analyze', + '--no-fatal-warnings', + ], workingDirectory: dir); + + final spStdout = (spResult.stdout as String).trim(); + if (spStdout.isNotEmpty) print(spStdout); + + final spStderr = (spResult.stderr as String).trim(); + if (spStderr.isNotEmpty) Logger.error(spStderr); + + if (spResult.exitCode != 0) { + Logger.error('Analysis failed for $name (exit code ${spResult.exitCode})'); + failures.add(name); + } else { + Logger.success('Analysis passed for $name'); + } + } + + if (failures.isNotEmpty) { + Logger.error('Analysis failed for ${failures.length} package(s): ${failures.join(', ')}'); + exit(1); } - Logger.success('Analysis complete'); + Logger.success('All analysis complete'); } } diff --git a/lib/src/cli/commands/compose_command.dart b/lib/src/cli/commands/compose_command.dart index 9c7a68b..6219298 100644 --- a/lib/src/cli/commands/compose_command.dart +++ b/lib/src/cli/commands/compose_command.dart @@ -15,6 +15,7 @@ import '../utils/prompt_resolver.dart'; import '../utils/release_utils.dart'; import '../utils/repo_utils.dart'; import '../utils/step_summary.dart'; +import '../utils/sub_package_utils.dart'; import '../utils/version_detection.dart'; const String _kGeminiProModel = 'gemini-3.1-pro-preview'; @@ -79,6 +80,16 @@ class ComposeCommand extends Command { } ctx.savePrompt('compose', prompt); + // Enrich prompt with sub-package context for multi-package repos + SubPackageUtils.enrichPromptWithSubPackages( + repoRoot: repoRoot, + prevTag: prevTag, + promptFilePath: ctx.artifactPath('compose', 'prompt.txt'), + buildInstructions: SubPackageUtils.buildHierarchicalChangelogInstructions, + newVersion: newVersion, + verbose: global.verbose, + ); + if (global.dryRun) { Logger.info('[DRY-RUN] Would run Gemini CLI with composer prompt (${prompt.length} chars)'); return; diff --git a/lib/src/cli/commands/create_release_command.dart b/lib/src/cli/commands/create_release_command.dart index c8ef4c2..1a64633 100644 --- a/lib/src/cli/commands/create_release_command.dart +++ b/lib/src/cli/commands/create_release_command.dart @@ -15,6 +15,7 @@ import '../utils/process_runner.dart'; import '../utils/release_utils.dart'; import '../utils/repo_utils.dart'; import '../utils/step_summary.dart'; +import '../utils/sub_package_utils.dart'; import '../utils/version_detection.dart'; /// Create a GitHub release: copy artifacts, save release notes folder, commit, @@ -89,13 +90,43 @@ class CreateReleaseCommand extends Command { } // Step 2: Bump version in pubspec.yaml + // + // The regex matches the top-level `version:` field by requiring that it + // is followed by a semver-like value (digits and dots). This avoids + // accidentally matching YAML comments (`# version: ...`) or indented + // dependency version constraints, which are prefixed with whitespace and + // use caret/range syntax (e.g. ` version: ^1.0.0`). final pubspecFile = File('$repoRoot/pubspec.yaml'); final pubspecContent = pubspecFile.readAsStringSync(); - pubspecFile.writeAsStringSync( - pubspecContent.replaceFirst(RegExp(r'^version: .*', multiLine: true), 'version: $newVersion'), - ); + final versionPattern = RegExp(r'^version:\s+\d', multiLine: true); + if (!versionPattern.hasMatch(pubspecContent)) { + Logger.error('No "version:" field found in pubspec.yaml — cannot bump.'); + exit(1); + } + // Replace the entire version line (including the value) using a + // separate regex so we capture the full line for replacement. + final versionLinePattern = RegExp(r'^version:\s+.*', multiLine: true); + pubspecFile.writeAsStringSync(pubspecContent.replaceFirst(versionLinePattern, 'version: $newVersion')); Logger.info('Bumped pubspec.yaml to version $newVersion'); + // Step 2b: Bump version in sub-package pubspec.yaml files + final subPackages = SubPackageUtils.loadSubPackages(repoRoot); + SubPackageUtils.logSubPackages(subPackages); + for (final pkg in subPackages) { + final subPubspec = File('$repoRoot/${pkg['path']}/pubspec.yaml'); + if (subPubspec.existsSync()) { + final content = subPubspec.readAsStringSync(); + if (!versionPattern.hasMatch(content)) { + Logger.warn('No "version:" field in ${pkg['name']}/pubspec.yaml — skipping bump'); + continue; + } + subPubspec.writeAsStringSync(content.replaceFirst(versionLinePattern, 'version: $newVersion')); + Logger.info('Bumped ${pkg['name']}/pubspec.yaml to version $newVersion'); + } else { + Logger.warn('Sub-package pubspec not found: ${pkg['path']}/pubspec.yaml'); + } + } + // Step 3: Assemble release notes folder from Stage 3 artifacts final releaseDir = Directory('$repoRoot/$kReleaseNotesDir/v$newVersion'); releaseDir.createSync(recursive: true); @@ -199,6 +230,10 @@ class CreateReleaseCommand extends Command { '$kVersionBumpsDir/', '$kRuntimeCiDir/autodoc.json', ]; + // Add sub-package pubspec.yaml files to the commit + for (final pkg in subPackages) { + filesToAdd.add('${pkg['path']}/pubspec.yaml'); + } if (Directory('$repoRoot/docs').existsSync()) filesToAdd.add('docs/'); if (Directory('$repoRoot/$kCicdAuditDir').existsSync()) { filesToAdd.add('$kCicdAuditDir/'); @@ -323,6 +358,7 @@ class CreateReleaseCommand extends Command { | Tag | [`$tag`](https://github.com/$effectiveRepo/tree/$tag) | | Repository | `$effectiveRepo` | | pubspec.yaml | Bumped to `$newVersion` | +${subPackages.isNotEmpty ? '| Sub-packages | ${subPackages.map((p) => '`${p['name']}`').join(', ')} bumped to `$newVersion` |' : ''} ### Links diff --git a/lib/src/cli/commands/release_notes_command.dart b/lib/src/cli/commands/release_notes_command.dart index 3dacb9f..942dd02 100644 --- a/lib/src/cli/commands/release_notes_command.dart +++ b/lib/src/cli/commands/release_notes_command.dart @@ -15,6 +15,7 @@ import '../utils/prompt_resolver.dart'; import '../utils/release_utils.dart'; import '../utils/repo_utils.dart'; import '../utils/step_summary.dart'; +import '../utils/sub_package_utils.dart'; import '../utils/version_detection.dart'; const String _kGeminiProModel = 'gemini-3.1-pro-preview'; @@ -132,6 +133,16 @@ class ReleaseNotesCommand extends Command { } ctx.savePrompt('release-notes', prompt); + // Enrich prompt with sub-package context for multi-package repos + SubPackageUtils.enrichPromptWithSubPackages( + repoRoot: repoRoot, + prevTag: prevTag, + promptFilePath: ctx.artifactPath('release-notes', 'prompt.txt'), + buildInstructions: SubPackageUtils.buildHierarchicalReleaseNotesInstructions, + newVersion: newVersion, + verbose: global.verbose, + ); + if (global.dryRun) { Logger.info('[DRY-RUN] Would run Gemini CLI for release notes (${prompt.length} chars)'); return; @@ -353,9 +364,9 @@ String _postProcessReleaseNotes( Logger.warn( 'Stripping ${fabricated.length} fabricated issue references: ${fabricated.map((n) => "#$n").join(", ")}', ); - for (final num in fabricated) { - result = result.replaceAll(RegExp(r'- \[#' + num.toString() + r'\]\([^)]*\)[^\n]*\n'), ''); - result = result.replaceAll('(#$num)', ''); + for (final issueNum in fabricated) { + result = result.replaceAll(RegExp(r'- \[#' + issueNum.toString() + r'\]\([^)]*\)[^\n]*\n'), ''); + result = result.replaceAll('(#$issueNum)', ''); } } } diff --git a/lib/src/cli/commands/test_command.dart b/lib/src/cli/commands/test_command.dart index bce77ab..c3902e2 100644 --- a/lib/src/cli/commands/test_command.dart +++ b/lib/src/cli/commands/test_command.dart @@ -6,8 +6,9 @@ import 'package:args/command_runner.dart'; import '../../triage/utils/config.dart'; import '../utils/logger.dart'; import '../utils/repo_utils.dart'; +import '../utils/sub_package_utils.dart'; -/// Run dart test. +/// Run `dart test` on the root package and all configured sub-packages. class TestCommand extends Command { @override final String name = 'test'; @@ -25,40 +26,117 @@ class TestCommand extends Command { Logger.header('Running dart test'); + const processTimeout = Duration(minutes: 20); + final failures = []; + // Skip gracefully if no test/ directory exists final testDir = Directory('$repoRoot/test'); if (!testDir.existsSync()) { - Logger.success('No test/ directory found — skipping tests'); - return; + Logger.success('No test/ directory found — skipping root tests'); + } else { + // Use Process.start for streaming output instead of Process.runSync. + // This ensures real-time output in CI (runSync buffers everything until + // exit, so a hanging test produces zero output). + final process = await Process.start( + Platform.resolvedExecutable, + ['test', '--exclude-tags', 'gcp,integration'], + workingDirectory: repoRoot, + mode: ProcessStartMode.inheritStdio, + ); + + // Process-level timeout: kill the test process if it exceeds 20 minutes. + // Individual test timeouts should catch hangs, but this is a safety net + // for cases where the test process itself doesn't exit (e.g., leaked + // isolates, open sockets keeping the event loop alive). + final exitCode = await process.exitCode.timeout( + processTimeout, + onTimeout: () { + Logger.error('Test process exceeded ${processTimeout.inMinutes}-minute timeout — killing.'); + process.kill(ProcessSignal.sigkill); + return -1; + }, + ); + + if (exitCode != 0) { + Logger.error('Root tests failed with exit code $exitCode'); + failures.add(config.repoName); + } else { + Logger.success('Root tests passed'); + } } - // Use Process.start for streaming output instead of Process.runSync. - // This ensures real-time output in CI (runSync buffers everything until - // exit, so a hanging test produces zero output). - final process = await Process.start( - Platform.resolvedExecutable, - ['test', '--exclude-tags', 'gcp,integration'], - workingDirectory: repoRoot, - mode: ProcessStartMode.inheritStdio, - ); - - // Process-level timeout: kill the test process if it exceeds 20 minutes. - // Individual test timeouts should catch hangs, but this is a safety net - // for cases where the test process itself doesn't exit (e.g., leaked - // isolates, open sockets keeping the event loop alive). - const processTimeout = Duration(minutes: 20); - final exitCode = await process.exitCode.timeout( - processTimeout, - onTimeout: () { - Logger.error('Test process exceeded ${processTimeout.inMinutes}-minute timeout — killing.'); - process.kill(ProcessSignal.sigkill); - return -1; - }, - ); - - if (exitCode != 0) { - Logger.error('Tests failed with exit code $exitCode'); - exit(exitCode); + // ── Sub-package testing ─────────────────────────────────────────────── + final subPackages = SubPackageUtils.loadSubPackages(repoRoot); + SubPackageUtils.logSubPackages(subPackages); + + for (final sp in subPackages) { + final name = sp['name'] as String; + final path = sp['path'] as String; + final dir = '$repoRoot/$path'; + + Logger.header('Testing sub-package: $name ($path)'); + + if (!Directory(dir).existsSync()) { + Logger.warn(' Directory not found: $dir — skipping'); + continue; + } + + if (!File('$dir/pubspec.yaml').existsSync()) { + Logger.error(' No pubspec.yaml in $dir — cannot test'); + failures.add(name); + continue; + } + + // Skip sub-packages with no test/ directory + final spTestDir = Directory('$dir/test'); + if (!spTestDir.existsSync()) { + Logger.info(' No test/ directory in $name — skipping'); + continue; + } + + // Ensure dependencies are resolved (sub-packages have independent + // pubspec.yaml files that the root `dart pub get` may not cover). + final pubGetResult = Process.runSync( + Platform.resolvedExecutable, + ['pub', 'get'], + workingDirectory: dir, + environment: {'GIT_LFS_SKIP_SMUDGE': '1'}, + ); + if (pubGetResult.exitCode != 0) { + final pubGetStderr = (pubGetResult.stderr as String).trim(); + if (pubGetStderr.isNotEmpty) Logger.error(pubGetStderr); + Logger.error(' dart pub get failed for $name (exit code ${pubGetResult.exitCode})'); + failures.add(name); + continue; + } + + final spProcess = await Process.start( + Platform.resolvedExecutable, + ['test', '--exclude-tags', 'gcp,integration'], + workingDirectory: dir, + mode: ProcessStartMode.inheritStdio, + ); + + final spExitCode = await spProcess.exitCode.timeout( + processTimeout, + onTimeout: () { + Logger.error('Test process for $name exceeded ${processTimeout.inMinutes}-minute timeout — killing.'); + spProcess.kill(ProcessSignal.sigkill); + return -1; + }, + ); + + if (spExitCode != 0) { + Logger.error('Tests failed for $name (exit code $spExitCode)'); + failures.add(name); + } else { + Logger.success('Tests passed for $name'); + } + } + + if (failures.isNotEmpty) { + Logger.error('Tests failed for ${failures.length} package(s): ${failures.join(', ')}'); + exit(1); } Logger.success('All tests passed'); diff --git a/lib/src/cli/manage_cicd.dart b/lib/src/cli/manage_cicd.dart index 4c347ff..adda721 100644 --- a/lib/src/cli/manage_cicd.dart +++ b/lib/src/cli/manage_cicd.dart @@ -1103,11 +1103,11 @@ String _postProcessReleaseNotes( if (fabricated.isNotEmpty) { _warn('Stripping ${fabricated.length} fabricated issue references: ${fabricated.map((n) => "#$n").join(", ")}'); - for (final num in fabricated) { + for (final issueNum in fabricated) { // Remove the link but keep descriptive text: "[#N](url) — desc" → "desc" - result = result.replaceAll(RegExp(r'- \[#' + num.toString() + r'\]\([^)]*\)[^\n]*\n'), ''); + result = result.replaceAll(RegExp(r'- \[#' + issueNum.toString() + r'\]\([^)]*\)[^\n]*\n'), ''); // Remove inline (#N) references - result = result.replaceAll('(#$num)', ''); + result = result.replaceAll('(#$issueNum)', ''); } } } diff --git a/lib/src/cli/utils/autodoc_scaffold.dart b/lib/src/cli/utils/autodoc_scaffold.dart index e39bc60..3acebd7 100644 --- a/lib/src/cli/utils/autodoc_scaffold.dart +++ b/lib/src/cli/utils/autodoc_scaffold.dart @@ -2,6 +2,17 @@ import 'dart:convert'; import 'dart:io'; import '../../triage/utils/run_context.dart'; +import 'logger.dart'; +import 'sub_package_utils.dart'; + +/// Capitalize a snake_case name into a display-friendly title. +/// +/// Splits on `_`, capitalizes the first letter of each non-empty segment, +/// and joins with spaces. Empty segments (from leading, trailing, or +/// consecutive underscores) are silently skipped. +String _titleCase(String snakeName) { + return snakeName.split('_').where((w) => w.isNotEmpty).map((w) => '${w[0].toUpperCase()}${w.substring(1)}').join(' '); +} /// Scaffold `.runtime_ci/autodoc.json` by scanning `lib/src/` for modules. /// @@ -9,6 +20,10 @@ import '../../triage/utils/run_context.dart'; /// (or no `lib/` directory was found to scan). /// /// This is the shared implementation used by both `init` and `autodoc --init`. +/// +/// When a CI config with `sub_packages` is present, modules are also scaffolded +/// for each sub-package that has a `lib/` directory. Sub-package module IDs are +/// prefixed with `-` to avoid conflicts with root modules. bool scaffoldAutodocJson(String repoRoot, {bool overwrite = false}) { final configDir = Directory('$repoRoot/$kRuntimeCiDir'); final autodocFile = File('$repoRoot/$kRuntimeCiDir/autodoc.json'); @@ -36,7 +51,7 @@ bool scaffoldAutodocJson(String repoRoot, {bool overwrite = false}) { for (final dir in subdirs) { final dirName = dir.path.split('/').last; - final displayName = dirName.split('_').map((w) => '${w[0].toUpperCase()}${w.substring(1)}').join(' '); + final displayName = _titleCase(dirName); modules.add({ 'id': dirName, 'name': displayName, @@ -64,7 +79,7 @@ bool scaffoldAutodocJson(String repoRoot, {bool overwrite = false}) { // No lib/src/ — use lib/ as single module modules.add({ 'id': 'core', - 'name': packageName.split('_').map((w) => '${w[0].toUpperCase()}${w.substring(1)}').join(' '), + 'name': _titleCase(packageName), 'source_paths': ['lib/'], 'lib_paths': ['lib/'], 'output_path': 'docs/', @@ -74,6 +89,11 @@ bool scaffoldAutodocJson(String repoRoot, {bool overwrite = false}) { }); } + // ═══════════════════════════════════════════════════════════════════════════ + // Sub-package module scaffolding + // ═══════════════════════════════════════════════════════════════════════════ + _scaffoldSubPackageModules(repoRoot, modules); + if (modules.isEmpty) return false; configDir.createSync(recursive: true); @@ -91,3 +111,84 @@ bool scaffoldAutodocJson(String repoRoot, {bool overwrite = false}) { autodocFile.writeAsStringSync('${const JsonEncoder.withIndent(' ').convert(autodocData)}\n'); return true; } + +/// Discover sub-packages from CI config and scaffold autodoc modules for each +/// one that has a `lib/` directory. +/// +/// Each sub-package's modules are prefixed with the sub-package name to avoid +/// ID collisions with root modules (e.g. `my_sub_pkg-core`, `my_sub_pkg-utils`). +/// Output paths are scoped to the sub-package directory. +/// +/// Delegates to [SubPackageUtils.loadSubPackages] for config loading so that +/// malformed JSON is handled gracefully (logged + skipped) instead of crashing. +void _scaffoldSubPackageModules(String repoRoot, List> modules) { + final validPackages = SubPackageUtils.loadSubPackages(repoRoot); + if (validPackages.isEmpty) return; + + var scaffoldedCount = 0; + + for (final sp in validPackages) { + final spName = sp['name'] as String; + // Path is already normalized (trailing slashes stripped) by + // SubPackageUtils.loadSubPackages(). + final spPath = sp['path'] as String; + + final spLibDir = Directory('$repoRoot/$spPath/lib'); + if (!spLibDir.existsSync()) continue; + + final spSrcDir = Directory('$repoRoot/$spPath/lib/src'); + + if (spSrcDir.existsSync()) { + // Scan sub-package's lib/src/ subdirectories + final subdirs = + spSrcDir.listSync().whereType().where((d) => !d.path.split('/').last.startsWith('.')).toList() + ..sort((a, b) => a.path.compareTo(b.path)); + + for (final dir in subdirs) { + final dirName = dir.path.split('/').last; + final displayName = '$spName: ${_titleCase(dirName)}'; + modules.add({ + 'id': '$spName-$dirName', + 'name': displayName, + 'source_paths': ['$spPath/lib/src/$dirName/'], + 'lib_paths': ['$spPath/lib/src/$dirName/'], + 'output_path': '$spPath/docs/$dirName/', + 'generate': ['quickstart', 'api_reference'], + 'hash': '', + 'last_updated': null, + }); + } + + // Add top-level module for the sub-package entry points + modules.add({ + 'id': '$spName-top_level', + 'name': '$spName: Package Entry Points', + 'source_paths': ['$spPath/lib/'], + 'lib_paths': [], + 'output_path': '$spPath/docs/', + 'generate': ['quickstart'], + 'hash': '', + 'last_updated': null, + }); + } else { + // No lib/src/ — use lib/ as a single module for this sub-package + final displayName = _titleCase(spName); + modules.add({ + 'id': '$spName-core', + 'name': displayName, + 'source_paths': ['$spPath/lib/'], + 'lib_paths': ['$spPath/lib/'], + 'output_path': '$spPath/docs/', + 'generate': ['quickstart', 'api_reference'], + 'hash': '', + 'last_updated': null, + }); + } + + scaffoldedCount++; + } + + if (scaffoldedCount > 0) { + Logger.info(' Discovered $scaffoldedCount sub-package(s) for autodoc scaffolding'); + } +} diff --git a/lib/src/cli/utils/sub_package_utils.dart b/lib/src/cli/utils/sub_package_utils.dart new file mode 100644 index 0000000..13b1bee --- /dev/null +++ b/lib/src/cli/utils/sub_package_utils.dart @@ -0,0 +1,294 @@ +import 'dart:io'; + +import 'logger.dart'; +import 'process_runner.dart'; +import 'workflow_generator.dart'; + +/// Utilities for loading and working with sub-packages defined in +/// `.runtime_ci/config.json` under `ci.sub_packages`. +/// +/// Sub-packages represent independently meaningful packages within a +/// multi-package repository (e.g., `dart_custom_lint` with 5 sub-packages). +abstract final class SubPackageUtils { + /// Load validated sub-packages from the CI config. + /// + /// Returns an empty list when the repo has no sub-packages configured + /// or if the config file contains malformed JSON (logs a warning). + /// Each entry has at least `name` (String) and `path` (String). + static List> loadSubPackages(String repoRoot) { + final Map? ciConfig; + try { + ciConfig = WorkflowGenerator.loadCiConfig(repoRoot); + } on StateError catch (e) { + Logger.warn('Could not load CI config: $e'); + return []; + } + if (ciConfig == null) return []; + final raw = ciConfig['sub_packages'] as List?; + if (raw == null || raw.isEmpty) return []; + return raw + .whereType>() + .where((sp) => sp['name'] != null && sp['path'] != null) + .map( + (sp) => { + ...sp, + // Normalize: strip trailing slashes to avoid double-slash paths + // in downstream consumers (git commands, Markdown, etc.). + 'path': (sp['path'] as String).replaceAll(RegExp(r'/+$'), ''), + }, + ) + .toList(); + } + + /// Build a per-package diff summary suitable for appending to a Gemini prompt. + /// + /// For each sub-package, runs `git diff ..HEAD -- ` and + /// `git log --oneline ..HEAD -- ` to gather per-package + /// changes. The output is a Markdown-formatted section that can be + /// appended to the prompt file. + /// + /// Returns an empty string when [subPackages] is empty. + static String buildSubPackageDiffContext({ + required String repoRoot, + required String prevTag, + required List> subPackages, + bool verbose = false, + }) { + if (subPackages.isEmpty) return ''; + + // Guard: if prevTag is empty, git commands like `git log ..HEAD` are + // invalid. Fall back to showing the entire history. + // For `git diff`, use the well-known empty tree SHA so we diff against + // an empty tree (showing all files as additions). For `git log`, plain + // `HEAD` already lists the full commit history. + final diffRange = prevTag.isNotEmpty ? '$prevTag..HEAD' : '4b825dc642cb6eb9a060e54bf899d15f3f7f8f0e..HEAD'; + final logRange = prevTag.isNotEmpty ? '$prevTag..HEAD' : 'HEAD'; + + final buffer = StringBuffer(); + buffer.writeln(); + buffer.writeln('## Multi-Package Repository Structure'); + buffer.writeln(); + buffer.writeln('This is a multi-package repository containing ${subPackages.length} sub-packages.'); + buffer.writeln('Organize your output with per-package sections using a **hierarchical** format.'); + buffer.writeln(); + buffer.writeln('Sub-packages:'); + for (final pkg in subPackages) { + buffer.writeln('- **${pkg['name']}**: `${pkg['path']}/`'); + } + buffer.writeln(); + + for (final pkg in subPackages) { + final name = pkg['name'] as String; + final path = pkg['path'] as String; + + buffer.writeln('### Changes in `$name` (`$path/`)'); + buffer.writeln(); + + // Per-package commit log + final commitLog = CiProcessRunner.runSync( + 'git log $logRange --oneline --no-merges -- $path', + repoRoot, + verbose: verbose, + ); + if (commitLog.isNotEmpty) { + buffer.writeln('Commits:'); + buffer.writeln('```'); + buffer.writeln(_truncate(commitLog, 3000)); + buffer.writeln('```'); + } else { + buffer.writeln( + 'No commits touching this package since ${prevTag.isNotEmpty ? prevTag : 'repository creation'}.', + ); + } + buffer.writeln(); + + // Per-package diff stat + final diffStat = CiProcessRunner.runSync('git diff --stat $diffRange -- $path', repoRoot, verbose: verbose); + if (diffStat.isNotEmpty) { + buffer.writeln('Diff stat:'); + buffer.writeln('```'); + buffer.writeln(_truncate(diffStat, 2000)); + buffer.writeln('```'); + } + buffer.writeln(); + } + + return buffer.toString(); + } + + /// Build hierarchical changelog prompt instructions for multi-package repos. + /// + /// Instructs Gemini to produce a changelog with a top-level summary + /// followed by per-package sections. + static String buildHierarchicalChangelogInstructions({ + required String newVersion, + required List> subPackages, + }) { + if (subPackages.isEmpty) return ''; + + final packageNames = subPackages.map((p) => p['name']).join(', '); + final today = DateTime.now().toIso8601String().substring(0, 10); + + // Build example sections using ALL actual sub-package names so + // Gemini sees every package name once and doesn't invent extras. + final exampleSections = StringBuffer(); + for (final pkg in subPackages) { + exampleSections.writeln('### ${pkg['name']}'); + exampleSections.writeln('#### Added'); + exampleSections.writeln('- ...'); + exampleSections.writeln('#### Fixed'); + exampleSections.writeln('- ...'); + exampleSections.writeln(); + } + + return ''' + +## Hierarchical Changelog Format (Multi-Package) + +Because this is a multi-package repository ($packageNames), the changelog +entry MUST use a hierarchical format with per-package sections: + +``` +## [$newVersion] - $today + +### Summary +High-level summary covering ALL packages. This should be a concise +hierarchical summarization of the changes across all sub-packages. + +${exampleSections.toString().trimRight()} +``` + +Rules for hierarchical format: +- The **Summary** section comes first and covers ALL packages at a high level +- Each sub-package gets its own **### PackageName** section +- Under each package, use the standard Keep a Changelog categories (#### Added, #### Changed, etc.) +- Only include sub-package sections for packages that actually have changes +- Only include category sub-sections (#### Added, etc.) that have entries +- If a sub-package has no changes, omit it entirely +- Do NOT invent sub-package names; the ONLY valid names are: $packageNames +'''; + } + + /// Build hierarchical release notes prompt instructions for multi-package repos. + /// + /// Instructs Gemini to produce release notes with a top-level narrative + /// summary followed by per-package detail sections. + static String buildHierarchicalReleaseNotesInstructions({ + required String newVersion, + required List> subPackages, + }) { + if (subPackages.isEmpty) return ''; + + final packageNames = subPackages.map((p) => p['name']).join(', '); + + // Build example per-package sections using ALL actual names so Gemini + // sees every valid package name and doesn't hallucinate extras. + final exampleSections = StringBuffer(); + for (final pkg in subPackages) { + exampleSections.writeln('## ${pkg['name']}'); + exampleSections.writeln("### What's New"); + exampleSections.writeln('- ...'); + exampleSections.writeln('### Bug Fixes'); + exampleSections.writeln('- ...'); + exampleSections.writeln(); + } + + return ''' + +## Hierarchical Release Notes Format (Multi-Package) + +Because this is a multi-package repository ($packageNames), the release notes +MUST use a hierarchical format: + +1. **Top-level summary and highlights** cover ALL packages -- this is a + hierarchical summarization of the entire release across all sub-packages. +2. Each sub-package with changes gets its own **## PackageName** detail section + describing what changed in that specific package. +3. Shared infrastructure changes (CI, tooling, root-level config) go in a + **## Infrastructure** section if applicable. + +Structure: +```markdown +# v$newVersion + +> Executive summary covering ALL sub-packages. + +## Highlights +- **Highlight 1** covering the most impactful cross-package change +- ... + +${exampleSections.toString().trimRight()} + +## Infrastructure (if applicable) +- ... + +## Contributors +(auto-generated from verified commit data -- do NOT fabricate usernames) + +## Issues Addressed +(from issue_manifest.json or "No linked issues for this release.") +``` + +Rules: +- Only include sub-package sections for packages that actually have changes. +- Do NOT invent sub-package names; the ONLY valid names are: $packageNames +- Replace `` with the actual repository name. +'''; + } + + /// Enrich an existing prompt file with sub-package diff context and + /// hierarchical formatting instructions. + /// + /// This is the single entry-point used by both the compose and + /// release-notes commands. It reads the prompt file written by + /// [RunContext.savePrompt], appends the sub-package diff context and + /// the appropriate hierarchical instructions, and writes the result + /// back. + /// + /// [promptFilePath] is the absolute path to the prompt file to enrich. + /// [buildInstructions] is a callback that returns the hierarchical + /// instructions string (changelog vs release-notes format). + /// + /// Returns the list of sub-packages that were used for enrichment + /// (empty if the repo has no sub-packages). + static List> enrichPromptWithSubPackages({ + required String repoRoot, + required String prevTag, + required String promptFilePath, + required String Function({required String newVersion, required List> subPackages}) + buildInstructions, + required String newVersion, + bool verbose = false, + }) { + final subPackages = loadSubPackages(repoRoot); + logSubPackages(subPackages); + if (subPackages.isEmpty) return subPackages; + + final promptFile = File(promptFilePath); + final subPkgContext = buildSubPackageDiffContext( + repoRoot: repoRoot, + prevTag: prevTag, + subPackages: subPackages, + verbose: verbose, + ); + final hierarchicalInstructions = buildInstructions(newVersion: newVersion, subPackages: subPackages); + promptFile.writeAsStringSync('${promptFile.readAsStringSync()}\n$subPkgContext\n$hierarchicalInstructions'); + Logger.info('Appended sub-package context to prompt (${subPackages.length} packages)'); + return subPackages; + } + + /// Truncate a string to a maximum length, appending an indicator. + static String _truncate(String input, int maxChars) { + if (input.length <= maxChars) return input; + return '${input.substring(0, maxChars)}\n... [truncated ${input.length - maxChars} chars]'; + } + + /// Log discovered sub-packages. + static void logSubPackages(List> subPackages) { + if (subPackages.isEmpty) return; + Logger.info('Multi-package repo: ${subPackages.length} sub-packages'); + for (final pkg in subPackages) { + Logger.info(' - ${pkg['name']} (${pkg['path']}/)'); + } + } +} diff --git a/lib/src/cli/utils/workflow_generator.dart b/lib/src/cli/utils/workflow_generator.dart index d1c9b2c..e5a6d11 100644 --- a/lib/src/cli/utils/workflow_generator.dart +++ b/lib/src/cli/utils/workflow_generator.dart @@ -6,18 +6,33 @@ import 'package:mustache_template/mustache_template.dart'; import 'logger.dart'; import 'template_resolver.dart'; -/// Maps platform config identifiers to GitHub Actions runner labels. +class _PlatformDefinition { + final String osFamily; // linux | macos | windows + final String arch; // x64 | arm64 + final String runner; // default `runs-on:` label + + const _PlatformDefinition({required this.osFamily, required this.arch, required this.runner}); +} + +/// Maps platform identifiers to their default runner label + metadata. /// -/// Architecture-specific variants: -/// - `macos-arm64`: Apple Silicon (default for `macos-latest`) -/// - `macos-x64`: Intel macOS (`macos-15-intel`) -/// - `macos`: Alias for `macos-arm64` -const _platformRunners = { - 'ubuntu': 'ubuntu-latest', - 'macos': 'macos-latest', - 'macos-arm64': 'macos-latest', - 'macos-x64': 'macos-15-intel', - 'windows': 'windows-latest', +/// Consumers can override the runner label per platform via: +/// `ci.runner_overrides: { "": "" }` +const _platformDefinitions = { + // Linux + 'ubuntu': _PlatformDefinition(osFamily: 'linux', arch: 'x64', runner: 'ubuntu-latest'), + 'ubuntu-x64': _PlatformDefinition(osFamily: 'linux', arch: 'x64', runner: 'ubuntu-latest'), + 'ubuntu-arm64': _PlatformDefinition(osFamily: 'linux', arch: 'arm64', runner: 'ubuntu-24.04-arm'), + + // macOS + 'macos': _PlatformDefinition(osFamily: 'macos', arch: 'arm64', runner: 'macos-latest'), + 'macos-arm64': _PlatformDefinition(osFamily: 'macos', arch: 'arm64', runner: 'macos-latest'), + 'macos-x64': _PlatformDefinition(osFamily: 'macos', arch: 'x64', runner: 'macos-15-intel'), + + // Windows + 'windows': _PlatformDefinition(osFamily: 'windows', arch: 'x64', runner: 'windows-latest'), + 'windows-x64': _PlatformDefinition(osFamily: 'windows', arch: 'x64', runner: 'windows-latest'), + 'windows-arm64': _PlatformDefinition(osFamily: 'windows', arch: 'arm64', runner: 'windows-11-arm'), }; /// Renders CI workflow YAML from a Mustache skeleton template and config.json. @@ -98,10 +113,37 @@ class WorkflowGenerator { // Platform support final platformsRaw = ciConfig['platforms'] as List? ?? ['ubuntu']; - final platforms = platformsRaw.cast().where((p) => _platformRunners.containsKey(p)).toList(); + final platforms = []; + for (final p in platformsRaw) { + if (p is String && _platformDefinitions.containsKey(p)) { + platforms.add(p); + } + } if (platforms.isEmpty) platforms.add('ubuntu'); final isMultiPlatform = platforms.length > 1; + final runnerOverridesRaw = ciConfig['runner_overrides']; + final runnerOverrides = runnerOverridesRaw is Map ? runnerOverridesRaw : {}; + String resolveRunner(String platformId) { + final override = runnerOverrides[platformId]; + if (override is String && override.trim().isNotEmpty) { + return override.trim(); + } + return _platformDefinitions[platformId]!.runner; + } + + // For multi-platform, use a matrix.include list of objects. This allows us to + // carry architecture metadata and makes cache keys stable across x64/arm64. + final platformMatrix = platforms.map((platformId) { + final def = _platformDefinitions[platformId]!; + return { + 'platform_id': platformId, + 'runner': resolveRunner(platformId), + 'os_family': def.osFamily, + 'arch': def.arch, + }; + }).toList(); + return { 'tooling_version': toolingVersion, 'dart_sdk': ciConfig['dart_sdk'] ?? '3.9.2', @@ -130,8 +172,8 @@ class WorkflowGenerator { // Platform support 'multi_platform': isMultiPlatform, 'single_platform': !isMultiPlatform, - 'runner': isMultiPlatform ? '' : _platformRunners[platforms.first]!, - 'platform_matrix_json': json.encode(platforms.map((p) => _platformRunners[p]!).toList()), + 'runner': isMultiPlatform ? '' : resolveRunner(platforms.first), + 'platform_matrix_json': json.encode(platformMatrix), }; } @@ -201,11 +243,33 @@ class WorkflowGenerator { errors.add('ci.platforms must be an array, got ${platforms.runtimeType}'); } else { for (final p in platforms) { - if (p is! String || !_platformRunners.containsKey(p)) { + if (p is! String || !_platformDefinitions.containsKey(p)) { errors.add( 'ci.platforms contains invalid platform "$p". ' - 'Valid: ${_platformRunners.keys.join(', ')}', + 'Valid: ${_platformDefinitions.keys.join(', ')}', + ); + } + } + } + } + + final runnerOverrides = ciConfig['runner_overrides']; + if (runnerOverrides != null) { + if (runnerOverrides is! Map) { + errors.add('ci.runner_overrides must be an object, got ${runnerOverrides.runtimeType}'); + } else { + for (final entry in runnerOverrides.entries) { + final key = entry.key; + final value = entry.value; + if (key is! String || !_platformDefinitions.containsKey(key)) { + errors.add( + 'ci.runner_overrides contains invalid platform key "$key". ' + 'Valid: ${_platformDefinitions.keys.join(', ')}', ); + continue; + } + if (value is! String || value.trim().isEmpty) { + errors.add('ci.runner_overrides["$key"] must be a non-empty string'); } } } diff --git a/lib/src/triage/phases/act.dart b/lib/src/triage/phases/act.dart index 69c6f1c..cbbcc9a 100644 --- a/lib/src/triage/phases/act.dart +++ b/lib/src/triage/phases/act.dart @@ -18,11 +18,19 @@ import '../utils/json_schemas.dart'; /// - Checks existing labels before applying duplicates /// - Checks existing comments for bot signatures before posting duplicates /// - Each auto-comment includes a hidden signature for dedup +/// +/// Safety: +/// - ALL gh commands use explicit `--repo owner/repo` to prevent +/// fork → upstream leakage (resolved from .runtime_ci/config.json) +/// - Org allowlist check runs before any actions are executed // ═══════════════════════════════════════════════════════════════════════════════ // Constants // ═══════════════════════════════════════════════════════════════════════════════ +/// Allowed GitHub organizations. Actions are refused for repos outside these orgs. +const Set _kAllowedOrgs = {'open-runtime', 'pieces-app'}; + /// Hidden HTML comment signature embedded in every auto-posted comment. /// Format: String _botSignature(String runDir, int issueNumber) { @@ -30,6 +38,10 @@ String _botSignature(String runDir, int issueNumber) { return ''; } +/// The explicit `--repo owner/repo` argument derived from config. +/// Ensures gh never resolves from git remotes (which can point to upstream in forks). +String get _repoSlug => '${config.repoOwner}/${config.repoName}'; + // ═══════════════════════════════════════════════════════════════════════════════ // Public API // ═══════════════════════════════════════════════════════════════════════════════ @@ -41,7 +53,16 @@ Future> act( String repoRoot, { required String runDir, }) async { - print('Phase 3 [ACT]: Applying triage decisions for ${plan.issues.length} issue(s)'); + // Safety: refuse to act if the configured repo owner is not in the allowlist + if (!_kAllowedOrgs.contains(config.repoOwner)) { + print( + 'Phase 3 [ACT]: REFUSING to act — repo owner "${config.repoOwner}" ' + 'is not in allowed orgs: $_kAllowedOrgs', + ); + return []; + } + + print('Phase 3 [ACT]: Applying triage decisions for ${plan.issues.length} issue(s) on $_repoSlug'); final decisions = []; @@ -154,7 +175,7 @@ Future _executeAction(TriageAction action, int issueNumber, String repoRoo } // ═══════════════════════════════════════════════════════════════════════════════ -// GitHub Operations (with idempotency) +// GitHub Operations (with idempotency + explicit --repo) // ═══════════════════════════════════════════════════════════════════════════════ /// Get the current state of an issue ('OPEN' or 'CLOSED'). @@ -164,6 +185,8 @@ Future _getIssueState(int issueNumber, String repoRoot) async { 'issue', 'view', '$issueNumber', + '--repo', + _repoSlug, '--json', 'state', '--jq', @@ -185,15 +208,25 @@ Future _applyLabelIdempotent(int issueNumber, String label, String repoRoo 'issue', 'edit', '$issueNumber', + '--repo', + _repoSlug, '--add-label', label, ], workingDirectory: repoRoot); if (result.exitCode != 0) { // Label might not exist in the repo -- create it first - await Process.run('gh', ['label', 'create', label, '--force'], workingDirectory: repoRoot); + await Process.run('gh', ['label', 'create', label, '--repo', _repoSlug, '--force'], workingDirectory: repoRoot); // Retry the label application - await Process.run('gh', ['issue', 'edit', '$issueNumber', '--add-label', label], workingDirectory: repoRoot); + await Process.run('gh', [ + 'issue', + 'edit', + '$issueNumber', + '--repo', + _repoSlug, + '--add-label', + label, + ], workingDirectory: repoRoot); } } @@ -204,6 +237,8 @@ Future> _getIssueLabels(int issueNumber, String repoRoot) async { 'issue', 'view', '$issueNumber', + '--repo', + _repoSlug, '--json', 'labels', '--jq', @@ -225,6 +260,8 @@ Future _hasExistingComment(int issueNumber, String searchText, String repo 'issue', 'view', '$issueNumber', + '--repo', + _repoSlug, '--json', 'comments', '--jq', @@ -240,11 +277,19 @@ Future _hasExistingComment(int issueNumber, String searchText, String repo } Future _postComment(int issueNumber, String body, String repoRoot) async { - await Process.run('gh', ['issue', 'comment', '$issueNumber', '--body', body], workingDirectory: repoRoot); + await Process.run('gh', [ + 'issue', + 'comment', + '$issueNumber', + '--repo', + _repoSlug, + '--body', + body, + ], workingDirectory: repoRoot); } Future _closeIssue(int issueNumber, String reason, String repoRoot) async { - final args = ['issue', 'close', '$issueNumber']; + final args = ['issue', 'close', '$issueNumber', '--repo', _repoSlug]; if (reason == 'not_planned') { args.addAll(['--reason', 'not planned']); } diff --git a/lib/src/triage/phases/cross_repo_link.dart b/lib/src/triage/phases/cross_repo_link.dart index b73584d..d4453b6 100644 --- a/lib/src/triage/phases/cross_repo_link.dart +++ b/lib/src/triage/phases/cross_repo_link.dart @@ -15,11 +15,23 @@ import '../utils/json_schemas.dart'; /// /// Configuration: triage_config.json -> cross_repo.repos /// +/// Safety: +/// - ALL gh commands use explicit `--repo owner/repo` +/// - Org allowlist check: only posts to repos owned by allowed orgs +/// - Duplicate check: verifies no existing cross-reference before posting +/// /// For each triaged issue: /// 1. Extract key terms from issue title /// 2. Search each cross-repo for related issues via gh search /// 3. Post cross-reference comments on related issues +// ═══════════════════════════════════════════════════════════════════════════════ +// Constants +// ═══════════════════════════════════════════════════════════════════════════════ + +/// Allowed GitHub organizations. Cross-repo actions are refused for repos outside these orgs. +const Set _kAllowedOrgs = {'open-runtime', 'pieces-app'}; + // ═══════════════════════════════════════════════════════════════════════════════ // Public API // ═══════════════════════════════════════════════════════════════════════════════ @@ -42,7 +54,19 @@ Future crossRepoLink( return; } - print('Phase 5b [CROSS-REPO]: Searching ${repos.length} dependent repos'); + // Safety: filter out repos belonging to orgs outside the allowlist + final safeRepos = repos.where((r) => _kAllowedOrgs.contains(r.owner)).toList(); + final skippedRepos = repos.where((r) => !_kAllowedOrgs.contains(r.owner)).toList(); + for (final skipped in skippedRepos) { + print(' WARNING: Skipping ${skipped.fullName} — org "${skipped.owner}" not in allowed orgs: $_kAllowedOrgs'); + } + + if (safeRepos.isEmpty) { + print('Phase 5b [CROSS-REPO]: No cross-repo targets in allowed orgs'); + return; + } + + print('Phase 5b [CROSS-REPO]: Searching ${safeRepos.length} dependent repos'); final crossLinks = >[]; @@ -60,7 +84,7 @@ Future crossRepoLink( print(' Issue #$issueNumber: searching for "$searchTerms"'); - for (final repo in repos) { + for (final repo in safeRepos) { try { final relatedIssues = await _searchRepo(repo.owner, repo.repo, searchTerms, repoRoot); @@ -119,7 +143,7 @@ Future crossRepoLink( writeJson('$runDir/triage_cross_repo_links.json', { 'links': crossLinks, 'timestamp': DateTime.now().toIso8601String(), - 'repos_searched': repos.map((r) => r.fullName).toList(), + 'repos_searched': safeRepos.map((r) => r.fullName).toList(), }); print(' Cross-repo links created: ${crossLinks.length}'); diff --git a/lib/src/triage/phases/link.dart b/lib/src/triage/phases/link.dart index eb1c64a..fe9824a 100644 --- a/lib/src/triage/phases/link.dart +++ b/lib/src/triage/phases/link.dart @@ -14,12 +14,25 @@ import '../utils/json_schemas.dart'; /// Creates bidirectional references between issues, PRs, changelogs, /// release notes, and documentation. Ensures comprehensive traceability. +// ═══════════════════════════════════════════════════════════════════════════════ +// Constants +// ═══════════════════════════════════════════════════════════════════════════════ + +/// Allowed GitHub organizations. Actions are refused for repos outside these orgs. +const Set _kAllowedOrgs = {'open-runtime', 'pieces-app'}; + // ═══════════════════════════════════════════════════════════════════════════════ // Public API // ═══════════════════════════════════════════════════════════════════════════════ /// Cross-link all triaged issues to related artifacts. Future link(GamePlan plan, List decisions, String repoRoot, {required String runDir}) async { + // Safety: refuse to link in repos outside the allowlist. + if (!_kAllowedOrgs.contains(config.repoOwner)) { + print(' SKIPPED: org "${config.repoOwner}" not in allowlist $_kAllowedOrgs'); + return; + } + print('Phase 5 [LINK]: Cross-linking ${decisions.length} issue(s)'); final linksCreated = []; @@ -142,6 +155,9 @@ Future link(GamePlan plan, List decisions, String repoRoot // Internal // ═══════════════════════════════════════════════════════════════════════════════ +/// The explicit `--repo owner/repo` argument derived from config. +String get _repoSlug => '${config.repoOwner}/${config.repoName}'; + /// Check if a link reference already exists in issue comments. Future _isAlreadyLinked(int issueNumber, String searchText, String repoRoot) async { try { @@ -149,6 +165,8 @@ Future _isAlreadyLinked(int issueNumber, String searchText, String repoRoo 'issue', 'view', '$issueNumber', + '--repo', + _repoSlug, '--json', 'comments', '--jq', @@ -164,7 +182,15 @@ Future _isAlreadyLinked(int issueNumber, String searchText, String repoRoo } Future _postComment(int issueNumber, String body, String repoRoot) async { - await Process.run('gh', ['issue', 'comment', '$issueNumber', '--body', body], workingDirectory: repoRoot); + await Process.run('gh', [ + 'issue', + 'comment', + '$issueNumber', + '--repo', + _repoSlug, + '--body', + body, + ], workingDirectory: repoRoot); } /// Add an issue to the linked_issues.json file in a release notes folder. diff --git a/lib/src/triage/phases/plan.dart b/lib/src/triage/phases/plan.dart index 2011e38..2e8b687 100644 --- a/lib/src/triage/phases/plan.dart +++ b/lib/src/triage/phases/plan.dart @@ -80,11 +80,16 @@ GamePlan? loadPlan({String? runDir}) { // Internal // ═══════════════════════════════════════════════════════════════════════════════ +/// The explicit `--repo owner/repo` argument derived from config. +String get _repoSlug => '${config.repoOwner}/${config.repoName}'; + Future?> _fetchIssueData(int issueNumber, String repoRoot) async { final result = await Process.run('gh', [ 'issue', 'view', '$issueNumber', + '--repo', + _repoSlug, '--json', 'number,title,body,author,labels,state,comments', ], workingDirectory: repoRoot); @@ -118,6 +123,8 @@ Future>> _discoverOpenIssues(String repoRoot) async { final result = await Process.run('gh', [ 'issue', 'list', + '--repo', + _repoSlug, '--state', 'open', '--limit', diff --git a/lib/src/triage/phases/post_release.dart b/lib/src/triage/phases/post_release.dart index 45bde43..d4138a7 100644 --- a/lib/src/triage/phases/post_release.dart +++ b/lib/src/triage/phases/post_release.dart @@ -31,6 +31,15 @@ Future postReleaseTriage({ required String runDir, bool verbose = false, }) async { + // Safety: refuse to act if the configured repo owner is not in the allowlist + if (!_kAllowedOrgs.contains(config.repoOwner)) { + print( + 'POST-RELEASE: REFUSING to act — repo owner "${config.repoOwner}" ' + 'is not in allowed orgs: $_kAllowedOrgs', + ); + return; + } + print('POST-RELEASE TRIAGE: Closing the loop for v$newVersion'); final stopwatch = Stopwatch()..start(); @@ -61,7 +70,7 @@ Future postReleaseTriage({ final actionsTaken = >[]; // Step 1: Own-repo GitHub issues - if (config.postReleaseCloseOwnRepo || true) { + if (config.postReleaseCloseOwnRepo) { for (final issue in ghIssues) { final number = issue['number'] as int; final confidence = (issue['confidence'] as num?)?.toDouble() ?? 0.0; @@ -234,6 +243,13 @@ Future?> _processCrossRepoIssue({ required String repoRoot, required String runDir, }) async { + // Safety: only comment on repos in allowed orgs + final repoOwner = repo.split('/').first; + if (!_kAllowedOrgs.contains(repoOwner)) { + print(' WARNING: Skipping $repo#$issueNumber — org "$repoOwner" not in allowed orgs'); + return null; + } + final runId = runDir.split('/').last; final signature = ''; @@ -364,7 +380,18 @@ void _updateLinkedIssues({ } // ═══════════════════════════════════════════════════════════════════════════════ -// GitHub Helpers +// Constants & Helpers +// ═══════════════════════════════════════════════════════════════════════════════ + +/// Allowed GitHub organizations. Actions are refused for repos outside these orgs. +const Set _kAllowedOrgs = {'open-runtime', 'pieces-app'}; + +/// The explicit `--repo owner/repo` argument derived from config. +/// Ensures gh never resolves from git remotes (which can point to upstream in forks). +String get _repoSlug => '${config.repoOwner}/${config.repoName}'; + +// ═══════════════════════════════════════════════════════════════════════════════ +// GitHub Helpers (all commands use explicit --repo) // ═══════════════════════════════════════════════════════════════════════════════ Future _getIssueState(int issueNumber, String repoRoot) async { @@ -373,6 +400,8 @@ Future _getIssueState(int issueNumber, String repoRoot) async { 'issue', 'view', '$issueNumber', + '--repo', + _repoSlug, '--json', 'state', '--jq', @@ -386,9 +415,18 @@ Future _getIssueState(int issueNumber, String repoRoot) async { Future _hasExistingComment(int issueNumber, String signature, {String? repo, required String repoRoot}) async { try { - final args = ['issue', 'view', '$issueNumber', '--json', 'comments', '--jq', '.comments[].body']; - if (repo != null) args.addAll(['--repo', repo]); - final result = await Process.run('gh', args, workingDirectory: repoRoot); + final targetRepo = repo ?? _repoSlug; + final result = await Process.run('gh', [ + 'issue', + 'view', + '$issueNumber', + '--repo', + targetRepo, + '--json', + 'comments', + '--jq', + '.comments[].body', + ], workingDirectory: repoRoot); return (result.stdout as String).contains(signature); } catch (_) { return false; @@ -396,11 +434,18 @@ Future _hasExistingComment(int issueNumber, String signature, {String? rep } Future _postComment(int issueNumber, String body, {String? repo, required String repoRoot}) async { - final args = ['issue', 'comment', '$issueNumber', '--body', body]; - if (repo != null) args.addAll(['--repo', repo]); - await Process.run('gh', args, workingDirectory: repoRoot); + final targetRepo = repo ?? _repoSlug; + await Process.run('gh', [ + 'issue', + 'comment', + '$issueNumber', + '--repo', + targetRepo, + '--body', + body, + ], workingDirectory: repoRoot); } Future _closeIssue(int issueNumber, String repoRoot) async { - await Process.run('gh', ['issue', 'close', '$issueNumber'], workingDirectory: repoRoot); + await Process.run('gh', ['issue', 'close', '$issueNumber', '--repo', _repoSlug], workingDirectory: repoRoot); } diff --git a/lib/src/triage/phases/verify.dart b/lib/src/triage/phases/verify.dart index 78f3bc9..aa8eb03 100644 --- a/lib/src/triage/phases/verify.dart +++ b/lib/src/triage/phases/verify.dart @@ -134,6 +134,9 @@ Future verify( // Internal // ═══════════════════════════════════════════════════════════════════════════════ +/// The explicit `--repo owner/repo` argument derived from config. +String get _repoSlug => '${config.repoOwner}/${config.repoName}'; + /// Fetch current issue state from GitHub. Future?> _fetchIssueState(int issueNumber, String repoRoot) async { try { @@ -141,6 +144,8 @@ Future?> _fetchIssueState(int issueNumber, String repoRoot) 'issue', 'view', '$issueNumber', + '--repo', + _repoSlug, '--json', 'state,labels,comments', ], workingDirectory: repoRoot); diff --git a/lib/src/triage/utils/config.dart b/lib/src/triage/utils/config.dart index ddb9d0a..7a0d9bb 100644 --- a/lib/src/triage/utils/config.dart +++ b/lib/src/triage/utils/config.dart @@ -287,7 +287,7 @@ class TriageConfig { List _strList(List path, List defaultValue) { final value = _navigate(path); - if (value is List) return value.cast(); + if (value is List) return value.whereType().toList(); return defaultValue; } diff --git a/templates/config.json b/templates/config.json index fbdbc4c..2ca6a07 100644 --- a/templates/config.json +++ b/templates/config.json @@ -75,6 +75,15 @@ "managed_test": false }, "secrets": {}, - "sub_packages": [] + "sub_packages": [], + "_comment_platforms": "Optional: CI platform matrix. When 2+ entries are provided, CI splits into analyze + matrix test jobs.", + "platforms": ["ubuntu-x64", "ubuntu-arm64", "macos-arm64", "macos-x64", "windows-x64", "windows-arm64"], + "_comment_runner_overrides": "Optional: override platform IDs to custom runs-on labels (e.g. org-managed GitHub-hosted runners). Keys must match ci.platforms entries.", + "runner_overrides": { + "ubuntu-x64": "runtime-ubuntu-24.04-x64-640gb-160core", + "ubuntu-arm64": "runtime-ubuntu-24.04-arm64-208gb-64core", + "windows-x64": "runtime-windows-2025-x64-640gb-160core", + "windows-arm64": "runtime-windows-11-arm64-208gb-64core" + } } } diff --git a/templates/gemini/commands/triage.toml b/templates/gemini/commands/triage.toml index 9913d51..dec0e41 100644 --- a/templates/gemini/commands/triage.toml +++ b/templates/gemini/commands/triage.toml @@ -3,34 +3,51 @@ description = "Triage a GitHub issue: classify, prioritize, detect duplicates. U prompt = """ Triage GitHub issue #{{args}} for the !{grep '^name:' pubspec.yaml | sed 's/name: //'} package. -## Issue Details +## Repository Context ``` -!{gh issue view {{args}} --json number,title,body,author,labels,createdAt --jq '"#\\(.number): \\(.title)\\nAuthor: @\\(.author.login)\\nLabels: \\([.labels[].name] | join(", "))\\nCreated: \\(.createdAt)\\n\\n\\(.body)"' 2>/dev/null || echo "Could not fetch issue"} +REPO: !{gh repo view --json nameWithOwner -q .nameWithOwner 2>/dev/null || echo "UNKNOWN"} +``` + +## CRITICAL SAFETY RULES — READ BEFORE ACTING + +1. **ALWAYS use `--repo ` on EVERY `gh` command.** Never rely on git remote resolution. +2. **ONLY operate on repositories owned by: `open-runtime`, `pieces-app`.** If the repo above belongs to a different org (e.g. `grpc`, `niclas-pricken`, etc.), STOP IMMEDIATELY and report: "Refusing to triage — repo belongs to an unauthorized org." +3. **Before posting any comment**, check the existing comments below for duplicates. If a triage comment already exists, do NOT post another one. +4. **Never post, edit, label, or close issues on upstream/parent repos.** This is a fork — only operate on the fork's own issues. + +## Issue Details (including existing comments) +``` +!{REPO=$(gh repo view --json nameWithOwner -q .nameWithOwner 2>/dev/null); case "$REPO" in open-runtime/*|pieces-app/*) ;; *) echo "BLOCKED: repo $REPO belongs to unauthorized org — refusing to fetch"; exit 0;; esac; gh issue view {{args}} --repo "$REPO" --json number,title,body,author,labels,createdAt,comments --jq '"#\\(.number): \\(.title)\\nAuthor: @\\(.author.login)\\nLabels: \\([.labels[].name] | join(", "))\\nCreated: \\(.createdAt)\\n\\n\\(.body)\\n\\n--- EXISTING COMMENTS (\\(.comments | length)) ---\\n\\(.comments | map("[\\(.author.login) @ \\(.createdAt)]:\\n\\(.body)") | join("\\n---\\n"))"' 2>/dev/null || echo "Could not fetch issue"} ``` ## Package Structure ``` -!{tree lib/ -L 2 --dirsfirst -d} +!{tree lib/ -L 2 --dirsfirst -d 2>/dev/null || echo "No lib/ directory"} ``` ## Open Issues (for duplicate detection) ``` -!{gh issue list --state open --limit 50 --json number,title,labels --jq '.[] | "#\\(.number): \\(.title) [\\([.labels[].name] | join(", "))]"' 2>/dev/null || echo "Could not list issues"} +!{REPO=$(gh repo view --json nameWithOwner -q .nameWithOwner 2>/dev/null); case "$REPO" in open-runtime/*|pieces-app/*) ;; *) echo "BLOCKED: repo $REPO belongs to unauthorized org"; exit 0;; esac; gh issue list --repo "$REPO" --state open --limit 50 --json number,title,labels --jq '.[] | "#\\(.number): \\(.title) [\\([.labels[].name] | join(", "))]"' 2>/dev/null || echo "Could not list issues"} ``` ## Triage Tasks -1. **Type**: Classify as one of: bug, feature-request, enhancement, documentation, question -2. **Priority**: Assign P0-critical, P1-high, P2-medium, or P3-low -3. **Area**: Classify area(s): proto, ml-models, core, provisioning, grpc, crypto, googleapis, ci-cd, docs -4. **Duplicates**: Check open issues for duplicates (HIGH/MEDIUM/LOW confidence) -5. **Comment**: Draft a helpful, welcoming comment for the reporter +1. **Org Check**: Verify the repository belongs to `open-runtime` or `pieces-app`. If not, STOP. +2. **Duplicate Check**: Review the EXISTING COMMENTS above. If a triage comment already exists, skip commenting. +3. **Type**: Classify as one of: bug, feature-request, enhancement, documentation, question +4. **Priority**: Assign P0-critical, P1-high, P2-medium, or P3-low +5. **Area**: Classify area(s) based on the package structure above +6. **Duplicates**: Check open issues for duplicates (HIGH/MEDIUM/LOW confidence) +7. **Comment**: Draft a helpful, welcoming comment for the reporter (only if no triage comment exists yet) ## Actions -Apply your triage using gh CLI: -- `gh issue edit {{args}} --add-label ""` -- `gh issue edit {{args}} --add-label ""` -- `gh issue edit {{args}} --add-label "area/"` -- `gh issue comment {{args}} --body ""` +IMPORTANT: Replace `OWNER/REPO` below with the actual repo from "Repository Context" above. +IMPORTANT: If any existing comment above already contains triage analysis, do NOT post a duplicate. + +Apply your triage using gh CLI (ALWAYS include --repo): +- `gh issue edit {{args}} --repo OWNER/REPO --add-label ""` +- `gh issue edit {{args}} --repo OWNER/REPO --add-label ""` +- `gh issue edit {{args}} --repo OWNER/REPO --add-label "area/"` +- `gh issue comment {{args}} --repo OWNER/REPO --body ""` """ diff --git a/templates/github/workflows/ci.skeleton.yaml b/templates/github/workflows/ci.skeleton.yaml index c7eb71c..2954b31 100644 --- a/templates/github/workflows/ci.skeleton.yaml +++ b/templates/github/workflows/ci.skeleton.yaml @@ -119,8 +119,8 @@ jobs: uses: actions/cache@v5.0.3 with: path: ~/.pub-cache - key: ${{ runner.os }}-dart-pub-${{ hashFiles('**/pubspec.yaml') }} - restore-keys: ${{ runner.os }}-dart-pub- + key: ${{ runner.os }}-${{ runner.arch }}-dart-pub-${{ hashFiles('**/pubspec.yaml') }} + restore-keys: ${{ runner.os }}-${{ runner.arch }}-dart-pub- <%#proto%> - name: Install protoc @@ -138,8 +138,8 @@ jobs: uses: actions/cache@v5.0.3 with: path: ~/.dartServer - key: ${{ runner.os }}-dart-analysis-${{ hashFiles('**/*.dart', '**/pubspec.yaml') }} - restore-keys: ${{ runner.os }}-dart-analysis- + key: ${{ runner.os }}-${{ runner.arch }}-dart-analysis-${{ hashFiles('**/*.dart', '**/pubspec.yaml') }} + restore-keys: ${{ runner.os }}-${{ runner.arch }}-dart-analysis- <%/analysis_cache%> <%#proto%> @@ -227,8 +227,8 @@ jobs: uses: actions/cache@v5.0.3 with: path: ~/.pub-cache - key: ${{ runner.os }}-dart-pub-${{ hashFiles('**/pubspec.yaml') }} - restore-keys: ${{ runner.os }}-dart-pub- + key: ${{ runner.os }}-${{ runner.arch }}-dart-pub-${{ hashFiles('**/pubspec.yaml') }} + restore-keys: ${{ runner.os }}-${{ runner.arch }}-dart-pub- <%#proto%> - name: Install protoc @@ -284,11 +284,11 @@ jobs: test: needs: [pre-check, analyze<%#format_check%>, auto-format<%/format_check%>] if: needs.pre-check.outputs.should_run == 'true' - runs-on: ${{ matrix.os }} + runs-on: ${{ matrix.runner }} strategy: fail-fast: false matrix: - os: <%platform_matrix_json%> + include: <%platform_matrix_json%> <%#has_secrets%> env: <%#secrets_list%> @@ -323,8 +323,8 @@ jobs: uses: actions/cache@v5.0.3 with: path: ~/.pub-cache - key: ${{ runner.os }}-dart-pub-${{ hashFiles('**/pubspec.yaml') }} - restore-keys: ${{ runner.os }}-dart-pub- + key: ${{ runner.os }}-${{ runner.arch }}-dart-pub-${{ hashFiles('**/pubspec.yaml') }} + restore-keys: ${{ runner.os }}-${{ runner.arch }}-dart-pub- - run: dart pub get env: @@ -345,3 +345,6 @@ jobs: # --- BEGIN USER: post-test --- # --- END USER: post-test --- <%/multi_platform%> + +# --- BEGIN USER: extra-jobs --- +# --- END USER: extra-jobs ---