diff --git a/.github/actions/select-copilot-pat/README.md b/.github/actions/select-copilot-pat/README.md new file mode 100644 index 000000000000..f23b69f5b3c6 --- /dev/null +++ b/.github/actions/select-copilot-pat/README.md @@ -0,0 +1,93 @@ +# Select Copilot PAT + +Selects a random Copilot PAT from a numbered pool of secrets. This addresses +limitations that arise from having a single PAT shared across all workflows +that call the Copilot API, such as rate-limiting. + +> **This is a stop-gap workaround.** As soon as organization/enterprise billing +> is offered for agentic workflows, this approach will be removed. + +Based on the pattern established in [dotnet/runtime#126057](https://github.com/dotnet/runtime/pull/126057). + +## Repository Onboarding + +1. Copy this `select-copilot-pat` folder into the repository under + `.github/actions/select-copilot-pat`, including both the `README.md` + and `action.yml`. +2. Add repository secrets named `COPILOT_PAT_0` through `COPILOT_PAT_9` + (you only need as many as you have team members contributing PATs). +3. Reference the action in your workflow (see Usage below). + +## PAT Creation + +[Use this link to prefill the PAT creation form with the required settings][create-pat]: + +1. **Resource owner** is your **user account**, not an organization. +2. **Copilot Requests (Read)** must be the only permission granted. +3. **8-day expiration** must be used, which enforces a weekly renewal. +4. **Repository access** set to **Public repositories** only. + +Team members providing PATs should set weekly recurring reminders to +regenerate and update their PATs in the repository secrets. + +PATs are added through **Settings > Secrets and variables > Actions**, +saved as **Repository secrets** matching the `COPILOT_PAT_<0-9>` naming +convention. This can also be done using the GitHub CLI: + +```sh +gh secret set "COPILOT_PAT_0" --body "" --repo dotnet/sdk +``` + +## Usage + +Add a job that selects a PAT, then consume it in downstream jobs: + +```yaml +jobs: + select-pat: + name: Select Copilot PAT + runs-on: ubuntu-latest + outputs: + copilot_pat_number: ${{ steps.select-copilot-pat.outputs.copilot_pat_number }} + steps: + - name: Checkout select-copilot-pat action + uses: actions/checkout@v6 + with: + persist-credentials: false + sparse-checkout: .github/actions/select-copilot-pat + sparse-checkout-cone-mode: true + fetch-depth: 1 + + - id: select-copilot-pat + name: Select Copilot token from pool + uses: ./.github/actions/select-copilot-pat + env: + SECRET_0: ${{ secrets.COPILOT_PAT_0 }} + SECRET_1: ${{ secrets.COPILOT_PAT_1 }} + # ... up to SECRET_9 + + my-ai-job: + needs: [select-pat] + steps: + - name: Call Copilot API + env: + COPILOT_TOKEN: ${{ needs.select-pat.outputs.copilot_pat_number == '0' && secrets.COPILOT_PAT_0 || needs.select-pat.outputs.copilot_pat_number == '1' && secrets.COPILOT_PAT_1 || '' }} + run: | + curl -X POST https://api.githubcopilot.com/chat/completions \ + -H "Authorization: Bearer $COPILOT_TOKEN" \ + -H "Content-Type: application/json" \ + -d '{"model":"gpt-4o-mini","messages":[...]}' +``` + +## Output Attribution + +Team members' PATs are _only_ used for Copilot API requests. All other +workflow outputs (issues, comments, labels) use the `github-actions[bot]` +token and are attributed accordingly. + +## References + +- [dotnet/runtime#126057 — Set up GitHub Agentic Workflows](https://github.com/dotnet/runtime/pull/126057) +- [PAT creation link][create-pat] + +[create-pat]: https://github.com/settings/personal-access-tokens/new?name=dotnet%20org%20agentic%20workflows&description=GitHub+Agentic+Workflows+-+Copilot+API+authentication.++Used+for+dotnet+org+workflows.+MUST+be+configured+with+only+Copilot+Requests+permissions+and+user+account+as+resource+owner.+Weekly+expiration+and+required+renewal.&user_copilot_requests=read&expires_in=8 diff --git a/.github/actions/select-copilot-pat/action.yml b/.github/actions/select-copilot-pat/action.yml new file mode 100644 index 000000000000..198eb0393dbf --- /dev/null +++ b/.github/actions/select-copilot-pat/action.yml @@ -0,0 +1,53 @@ +name: 'Select Copilot PAT from Pool' +description: > + Selects a random Copilot PAT from a numbered pool of secrets. Secrets + are passed as environment variables SECRET_0 through SECRET_9 + by the calling workflow step. + +inputs: + random-seed: + description: 'A seed number to use for the random PAT selection, for deterministic selection if needed.' + required: false + default: '' + +outputs: + copilot_pat_number: + description: 'The 0-9 secret number selected from the pool of specified secrets' + value: ${{ steps.select-pat-number.outputs.copilot_pat_number }} + +runs: + using: composite + steps: + - id: select-pat-number + shell: bash + env: + RANDOM_SEED: ${{ inputs.random-seed }} + run: | + # Collect all secret numbers with non-empty values from SECRET_0..SECRET_9 + PAT_NUMBERS=() + for i in $(seq 0 9); do + var="SECRET_${i}" + val="${!var}" + if [ -n "$val" ]; then + PAT_NUMBERS+=(${i}) + fi + done + + # If none of the secrets in the pool have values, then emit a warning and do not + # set an output value. The consumer can then fall back to using COPILOT_GITHUB_TOKEN. + if [ ${#PAT_NUMBERS[@]} -eq 0 ]; then + echo "::warning::None of the specified secrets had values (checked SECRET_0 through SECRET_9)" + exit 0 + fi + + # Select a random index using the seed if specified + if [ -n "$RANDOM_SEED" ]; then + RANDOM=$RANDOM_SEED + fi + + PAT_INDEX=$(( RANDOM % ${#PAT_NUMBERS[@]} )) + PAT_NUMBER="${PAT_NUMBERS[$PAT_INDEX]}" + echo "Selected token ${PAT_NUMBER} (index: ${PAT_INDEX}; pool size: ${#PAT_NUMBERS[@]})" + + # Set the PAT number as the output + echo "copilot_pat_number=${PAT_NUMBER}" >> "$GITHUB_OUTPUT" diff --git a/.github/aw/actions-lock.json b/.github/aw/actions-lock.json new file mode 100644 index 000000000000..73df5e9d5f28 --- /dev/null +++ b/.github/aw/actions-lock.json @@ -0,0 +1,14 @@ +{ + "entries": { + "actions/github-script@v9": { + "repo": "actions/github-script", + "version": "v9", + "sha": "373c709c69115d41ff229c7e5df9f8788daa9553" + }, + "github/gh-aw-actions/setup@v0.68.3": { + "repo": "github/gh-aw-actions/setup", + "version": "v0.68.3", + "sha": "ba90f2186d7ad780ec640f364005fa24e797b360" + } + } +} diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md index 6667feadfe7e..6aaace1b2648 100644 --- a/.github/copilot-instructions.md +++ b/.github/copilot-instructions.md @@ -22,9 +22,10 @@ Testing: - Examples: - `dotnet test test/dotnet.Tests/dotnet.Tests.csproj --filter "Name~ItShowsTheAppropriateMessageToTheUser"` - `dotnet exec artifacts/bin/redist/Debug/dotnet.Tests.dll -method "*ItShowsTheAppropriateMessageToTheUser*"` +- For incremental test runs of `dotnet.Tests` (avoids slow full `build.cmd`), use the `incremental-test` skill. - To test CLI command changes: - Build the redist SDK: `./build.sh` from repo root - - Create a dogfood environment: `source eng/dogfood.sh` + - Create a dogfood environment: `source eng/dogfood.sh` - Test commands in the dogfood shell (e.g., `dnx --help`, `dotnet tool install --help`) - The dogfood script sets up PATH and environment to use the newly built SDK diff --git a/.github/skills/AGENTS.md b/.github/skills/AGENTS.md new file mode 100644 index 000000000000..0b022c9eba26 --- /dev/null +++ b/.github/skills/AGENTS.md @@ -0,0 +1,24 @@ +# Agent Skills + +When creating skills, follow: +- Agent skills specification: https://agentskills.io/specification.md +- Best practices: https://agentskills.io/skill-creation/best-practices.md + +## Structure + +``` +.github/skills/skill-name/ +├── SKILL.md # Required: metadata + instructions +├── scripts/ # Optional: executable code +├── references/ # Optional: documentation +├── assets/ # Optional: templates, resources +└── ... # Any additional files or directories +``` + +## Quick Checklist + +- [ ] Run `dotnet .github/skills/ValidateSkill.cs ` to validate format. +- [ ] `description` describes what the skill does and when to use it. Skill body does not include "When to use this skill". +- [ ] Skill does not explain things the agent already knows. Focus on what's specific to the task at hand. +- [ ] Deterministic processes use scripts (for example, to fetch and format data from an API). +- [ ] Scripts use PowerShell or .NET file-based apps, not bash. diff --git a/.github/skills/ValidateSkill.cs b/.github/skills/ValidateSkill.cs new file mode 100755 index 000000000000..12d1e0b51342 --- /dev/null +++ b/.github/skills/ValidateSkill.cs @@ -0,0 +1,103 @@ +#!/usr/bin/env dotnet +#:property ManagePackageVersionsCentrally=false +#:property PublishAot=false +#:package YamlDotNet@16.3.0 + +using YamlDotNet.Serialization; +using System.Text.RegularExpressions; + +if (args.Length == 0) +{ + Console.Error.WriteLine("Usage: dotnet ValidateSkill.cs "); + return 1; +} + +string skillDir = Path.GetFullPath(args[0]); +string skillName = Path.GetFileName(Path.TrimEndingDirectorySeparator(skillDir)); +string skillFile = Path.Combine(skillDir, "SKILL.md"); + +// SKILL.md must exist in the skill directory +if (!File.Exists(skillFile)) +{ + Console.Error.WriteLine($"SKILL.md not found in {skillDir}"); + return 1; +} + +string text = File.ReadAllText(skillFile); + +// SKILL.md must begin with YAML frontmatter delimited by --- +if (!text.StartsWith("---")) +{ + Console.Error.WriteLine("No YAML frontmatter found."); + return 1; +} + +Match frontmatterMatch = Regex.Match( + text, + @"\A---\r?\n(?.*?)(?:\r?\n)---(?:\r?\n|$)", + RegexOptions.Singleline); +if (!frontmatterMatch.Success) +{ + Console.Error.WriteLine("Unterminated YAML frontmatter."); + return 1; +} + +string yaml = frontmatterMatch.Groups["yaml"].Value.Trim(); + +IDeserializer deserializer = new DeserializerBuilder().Build(); +Dictionary frontmatter = deserializer.Deserialize>(yaml); + +// name is required +if (!frontmatter.TryGetValue("name", out object? nameValue) || nameValue is not string frontmatterName) +{ + Console.Error.WriteLine("Frontmatter missing 'name' field."); + return 1; +} + +// name must be 1-64 characters +if (frontmatterName.Length == 0 || frontmatterName.Length > 64) +{ + Console.Error.WriteLine($"Name is {frontmatterName.Length} chars (must be 1-64)."); + return 1; +} + +// name: lowercase alphanumeric and hyphens only, no leading/trailing/consecutive hyphens +if (!Regex.IsMatch(frontmatterName, @"^[a-z0-9]([a-z0-9-]*[a-z0-9])?$") + || frontmatterName.Contains("--")) +{ + Console.Error.WriteLine($"Invalid name '{frontmatterName}'. Must be lowercase letters, numbers, and hyphens only. Must not start/end with a hyphen or contain consecutive hyphens."); + return 1; +} + +// name must match the parent directory name +if (!string.Equals(skillName, frontmatterName, StringComparison.Ordinal)) +{ + Console.Error.WriteLine($"Name mismatch: directory is '{skillName}' but SKILL.md name is '{frontmatterName}'."); + return 1; +} + +// description is required +if (!frontmatter.TryGetValue("description", out object? descValue) || descValue is not string description) +{ + Console.Error.WriteLine("Frontmatter missing 'description' field."); + return 1; +} + +// description must be 1-1024 characters +if (description.Length == 0 || description.Length > 1024) +{ + Console.Error.WriteLine($"Description is {description.Length} chars (must be 1-1024)."); + return 1; +} + +// Keep SKILL.md under 500 lines; move detailed content to references/ or scripts/ +// See "Progressive Disclosure" at https://agentskills.io/specification.md +int lineCount = text.Split('\n').Length; +if (lineCount > 500) +{ + Console.Error.WriteLine($"SKILL.md is {lineCount} lines (max 500). See \"Progressive Disclosure\" at https://agentskills.io/specification.md"); + return 1; +} + +Console.WriteLine($"Skill '{frontmatterName}' is valid."); +return 0; diff --git a/.github/skills/incremental-test/SKILL.md b/.github/skills/incremental-test/SKILL.md new file mode 100644 index 000000000000..f4d927873152 --- /dev/null +++ b/.github/skills/incremental-test/SKILL.md @@ -0,0 +1,106 @@ +--- +name: incremental-test +description: >- + Run dotnet.Tests incrementally without a full build.cmd rebuild. Use after + modifying source code in SDK projects to quickly build only changed projects, + deploy their outputs into the redist SDK layout, and run tests against them. +--- + +# Incremental Test Runner for dotnet.Tests + +## Prerequisites + +- A full build must have been completed at least once (via `build.cmd` or `build.sh`) so that the redist SDK layout exists at `artifacts\bin\redist\Debug\dotnet\sdk\\`. +- The repo-local `.dotnet` SDK must match the version expected by the test projects. If the runtime or SDK version is out of date (e.g., test build fails with a missing framework error), run `.\restore.cmd` (or `./restore.sh` on macOS/Linux) to download the correct SDK into `.dotnet`. +- This workflow uses Windows/PowerShell commands and paths. On macOS/Linux, substitute forward slashes and use `cp` instead of `Copy-Item`. + +## Workflow + +### Step 1: Identify modified projects + +Determine which projects have been modified. Use context from: +- The files you just edited in this session. +- Or `git status`/`git diff` to find changed `.cs` files and map them to their `.csproj` projects. + +### Step 2: Build modified projects + +Build each modified project individually using the repo-local dotnet: + +``` +.\.dotnet\dotnet build -c Debug +``` + +For example: +``` +.\.dotnet\dotnet build src\Cli\Microsoft.DotNet.Cli.Utils\Microsoft.DotNet.Cli.Utils.csproj -c Debug +``` + +If the `dotnet` CLI project itself was modified, build it: +``` +.\.dotnet\dotnet build src\Cli\dotnet\dotnet.csproj -c Debug +``` + +### Step 3: Copy output DLLs to the redist SDK layout + +Discover the SDK version directory name: +```powershell +$sdkVersion = (Get-ChildItem artifacts\bin\redist\Debug\dotnet\sdk -Directory | Sort-Object LastWriteTime -Descending | Select-Object -First 1).Name +``` + +For each modified project, copy its output DLL (and any satellite assemblies) from the project's build output to the redist SDK directory: + +``` +Source: artifacts\bin\\Debug\net10.0\.dll +Target: artifacts\bin\redist\Debug\dotnet\sdk\\ +``` + +For example: +```powershell +Copy-Item artifacts\bin\Microsoft.DotNet.ProjectTools\Debug\net10.0\Microsoft.DotNet.ProjectTools.dll artifacts\bin\redist\Debug\dotnet\sdk\$sdkVersion\ +Copy-Item artifacts\bin\Microsoft.DotNet.Cli.Utils\Debug\net10.0\Microsoft.DotNet.Cli.Utils.dll artifacts\bin\redist\Debug\dotnet\sdk\$sdkVersion\ +``` + +The `dotnet` project is special — it builds into `artifacts\bin\dotnet\Debug\net10.0\` and its `dotnet.dll` must be copied to the SDK directory: +```powershell +Copy-Item artifacts\bin\dotnet\Debug\net10.0\dotnet.dll artifacts\bin\redist\Debug\dotnet\sdk\$sdkVersion\ +``` + +**Important notes:** +- For typical incremental edits, only copy DLLs that are **already present** in the target directory. If your change introduces a new shipped assembly or moves assemblies, you will need a full `build.cmd`/`build.sh` to update the layout correctly. +- Some projects multi-target (e.g., `net10.0` and `net472`). Always use the `net10.0` output. +- If localization resource DLLs were changed (in subdirectories like `cs\`, `de\`, etc.), copy those too. + +### Step 4: Build the test project (if test code was modified) + +The test project `test\dotnet.Tests\dotnet.Tests.csproj` outputs directly to `artifacts\bin\redist\Debug\` (via `TestHostFolder`), so just build it: + +``` +.\.dotnet\dotnet build test\dotnet.Tests\dotnet.Tests.csproj +``` + +### Step 5: Run the tests + +Run specific tests: +``` +.\.dotnet\dotnet exec artifacts\bin\redist\Debug\dotnet.Tests.dll -method "*TestMethodName*" +``` + +Or run filtered tests via `dotnet test`: +``` +.\.dotnet\dotnet test test\dotnet.Tests\dotnet.Tests.csproj --no-build --filter "Name~TestMethodName" +``` + +## Common project paths + +| Assembly | Project Path | +|---|---| +| `dotnet.dll` | `src\Cli\dotnet\dotnet.csproj` | +| `Microsoft.DotNet.Cli.Utils.dll` | `src\Cli\Microsoft.DotNet.Cli.Utils\Microsoft.DotNet.Cli.Utils.csproj` | +| `Microsoft.DotNet.Cli.Definitions.dll` | `src\Cli\Microsoft.DotNet.Cli.Definitions\Microsoft.DotNet.Cli.Definitions.csproj` | +| `Microsoft.DotNet.Cli.CoreUtils.dll` | `src\Cli\Microsoft.DotNet.Cli.CoreUtils\Microsoft.DotNet.Cli.CoreUtils.csproj` | +| `Microsoft.DotNet.Configurer.dll` | `src\Cli\Microsoft.DotNet.Configurer\Microsoft.DotNet.Configurer.csproj` | +| `Microsoft.DotNet.ProjectTools.dll` | `src\Microsoft.DotNet.ProjectTools\Microsoft.DotNet.ProjectTools.csproj` | +| `Microsoft.DotNet.NativeWrapper.dll` | `src\Resolvers\Microsoft.DotNet.NativeWrapper\Microsoft.DotNet.NativeWrapper.csproj` | +| `Microsoft.DotNet.TemplateLocator.dll` | `src\Microsoft.DotNet.TemplateLocator\Microsoft.DotNet.TemplateLocator.csproj` | +| `Microsoft.DotNet.InternalAbstractions.dll` | `src\Cli\Microsoft.DotNet.InternalAbstractions\Microsoft.DotNet.InternalAbstractions.csproj` | +| `dotnet.Tests.dll` | `test\dotnet.Tests\dotnet.Tests.csproj` | diff --git a/.github/skills b/.github/skills~origin_main similarity index 100% rename from .github/skills rename to .github/skills~origin_main diff --git a/.github/workflows/add-tactics-template-on-comment.lock.yml b/.github/workflows/add-tactics-template-on-comment.lock.yml new file mode 100644 index 000000000000..02a0d94bfeff --- /dev/null +++ b/.github/workflows/add-tactics-template-on-comment.lock.yml @@ -0,0 +1,1328 @@ +# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"36fd040281b4bf04fd61ce436695dc4ad23cff6ec9e36f5ebbaa259c5921daed","compiler_version":"v0.68.3","strict":true,"agent_id":"copilot"} +# gh-aw-manifest: {"version":1,"secrets":["COPILOT_GITHUB_TOKEN","COPILOT_PAT_0","COPILOT_PAT_1","COPILOT_PAT_2","COPILOT_PAT_3","COPILOT_PAT_4","COPILOT_PAT_5","COPILOT_PAT_6","COPILOT_PAT_7","COPILOT_PAT_8","COPILOT_PAT_9","GH_AW_GITHUB_MCP_SERVER_TOKEN","GH_AW_GITHUB_TOKEN","GITHUB_TOKEN"],"actions":[{"repo":"actions/checkout","sha":"de0fac2e4500dabe0009e67214ff5f5447ce83dd","version":"v6.0.2"},{"repo":"actions/download-artifact","sha":"3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c","version":"v8.0.1"},{"repo":"actions/github-script","sha":"373c709c69115d41ff229c7e5df9f8788daa9553","version":"v9"},{"repo":"actions/upload-artifact","sha":"043fb46d1a93c77aae656e7c1c64a875d1fc6a0a","version":"v7.0.1"},{"repo":"github/gh-aw-actions/setup","sha":"ba90f2186d7ad780ec640f364005fa24e797b360","version":"v0.68.3"}],"containers":[{"image":"ghcr.io/github/gh-aw-firewall/agent:0.25.20"},{"image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.20"},{"image":"ghcr.io/github/gh-aw-firewall/squid:0.25.20"},{"image":"ghcr.io/github/gh-aw-mcpg:v0.2.19"},{"image":"ghcr.io/github/github-mcp-server:v0.32.0"},{"image":"node:lts-alpine"}]} +# ___ _ _ +# / _ \ | | (_) +# | |_| | __ _ ___ _ __ | |_ _ ___ +# | _ |/ _` |/ _ \ '_ \| __| |/ __| +# | | | | (_| | __/ | | | |_| | (__ +# \_| |_/\__, |\___|_| |_|\__|_|\___| +# __/ | +# _ _ |___/ +# | | | | / _| | +# | | | | ___ _ __ _ __| |_| | _____ ____ +# | |/\| |/ _ \ '__| |/ /| _| |/ _ \ \ /\ / / ___| +# \ /\ / (_) | | | | ( | | | | (_) \ V V /\__ \ +# \/ \/ \___/|_| |_|\_\|_| |_|\___/ \_/\_/ |___/ +# +# This file was automatically generated by gh-aw (v0.68.3). DO NOT EDIT. +# +# To update this file, edit the corresponding .md file and run: +# gh aw compile +# Not all edits will cause changes to this file. +# +# For more information: https://github.github.com/gh-aw/introduction/overview/ +# +# +# Secrets used: +# - COPILOT_GITHUB_TOKEN +# - COPILOT_PAT_0 +# - COPILOT_PAT_1 +# - COPILOT_PAT_2 +# - COPILOT_PAT_3 +# - COPILOT_PAT_4 +# - COPILOT_PAT_5 +# - COPILOT_PAT_6 +# - COPILOT_PAT_7 +# - COPILOT_PAT_8 +# - COPILOT_PAT_9 +# - GH_AW_GITHUB_MCP_SERVER_TOKEN +# - GH_AW_GITHUB_TOKEN +# - GITHUB_TOKEN +# +# Custom actions used: +# - actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 +# - actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 +# - actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9 +# - actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 +# - github/gh-aw-actions/setup@ba90f2186d7ad780ec640f364005fa24e797b360 # v0.68.3 +# +# Container images used: +# - ghcr.io/github/gh-aw-firewall/agent:0.25.20 +# - ghcr.io/github/gh-aw-firewall/api-proxy:0.25.20 +# - ghcr.io/github/gh-aw-firewall/squid:0.25.20 +# - ghcr.io/github/gh-aw-mcpg:v0.2.19 +# - ghcr.io/github/github-mcp-server:v0.32.0 +# - node:lts-alpine + +name: "Add Tactics Template On Comment" +"on": + issue_comment: + types: + - created + - edited + # steps: # Steps injected into pre-activation job + # - name: Checkout the select-copilot-pat action folder + # uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd + # with: + # fetch-depth: 1 + # persist-credentials: false + # sparse-checkout: .github/actions/select-copilot-pat + # sparse-checkout-cone-mode: true + # - env: + # SECRET_0: ${{ secrets.COPILOT_PAT_0 }} + # SECRET_1: ${{ secrets.COPILOT_PAT_1 }} + # SECRET_2: ${{ secrets.COPILOT_PAT_2 }} + # SECRET_3: ${{ secrets.COPILOT_PAT_3 }} + # SECRET_4: ${{ secrets.COPILOT_PAT_4 }} + # SECRET_5: ${{ secrets.COPILOT_PAT_5 }} + # SECRET_6: ${{ secrets.COPILOT_PAT_6 }} + # SECRET_7: ${{ secrets.COPILOT_PAT_7 }} + # SECRET_8: ${{ secrets.COPILOT_PAT_8 }} + # SECRET_9: ${{ secrets.COPILOT_PAT_9 }} + # id: select-copilot-pat + # name: Select Copilot token from pool + # uses: ./.github/actions/select-copilot-pat + +permissions: {} + +concurrency: + group: "gh-aw-${{ github.workflow }}-${{ github.event.issue.number || github.event.pull_request.number || github.run_id }}" + +run-name: "Add Tactics Template On Comment" + +jobs: + activation: + needs: pre_activation + if: needs.pre_activation.outputs.activated == 'true' && (github.event.repository.fork == false) + runs-on: ubuntu-slim + permissions: + actions: read + contents: read + discussions: write + issues: write + pull-requests: write + outputs: + body: ${{ steps.sanitized.outputs.body }} + comment_id: "" + comment_repo: "" + lockdown_check_failed: ${{ steps.generate_aw_info.outputs.lockdown_check_failed == 'true' }} + model: ${{ steps.generate_aw_info.outputs.model }} + secret_verification_result: ${{ steps.validate-secret.outputs.verification_result }} + setup-trace-id: ${{ steps.setup.outputs.trace-id }} + slash_command: ${{ needs.pre_activation.outputs.matched_command }} + stale_lock_file_failed: ${{ steps.check-lock-file.outputs.stale_lock_file_failed == 'true' }} + text: ${{ steps.sanitized.outputs.text }} + title: ${{ steps.sanitized.outputs.title }} + steps: + - name: Setup Scripts + id: setup + uses: github/gh-aw-actions/setup@ba90f2186d7ad780ec640f364005fa24e797b360 # v0.68.3 + with: + destination: ${{ runner.temp }}/gh-aw/actions + job-name: ${{ github.job }} + trace-id: ${{ needs.pre_activation.outputs.setup-trace-id }} + - name: Generate agentic run info + id: generate_aw_info + env: + GH_AW_INFO_ENGINE_ID: "copilot" + GH_AW_INFO_ENGINE_NAME: "GitHub Copilot CLI" + GH_AW_INFO_MODEL: ${{ vars.GH_AW_MODEL_AGENT_COPILOT || 'auto' }} + GH_AW_INFO_VERSION: "1.0.21" + GH_AW_INFO_AGENT_VERSION: "1.0.21" + GH_AW_INFO_CLI_VERSION: "v0.68.3" + GH_AW_INFO_WORKFLOW_NAME: "Add Tactics Template On Comment" + GH_AW_INFO_EXPERIMENTAL: "false" + GH_AW_INFO_SUPPORTS_TOOLS_ALLOWLIST: "true" + GH_AW_INFO_STAGED: "false" + GH_AW_INFO_ALLOWED_DOMAINS: '["defaults"]' + GH_AW_INFO_FIREWALL_ENABLED: "true" + GH_AW_INFO_AWF_VERSION: "v0.25.20" + GH_AW_INFO_AWMG_VERSION: "" + GH_AW_INFO_FIREWALL_TYPE: "squid" + GH_AW_COMPILED_STRICT: "true" + uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9 + with: + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/generate_aw_info.cjs'); + await main(core, context); + - name: Add +1 reaction for immediate feedback + id: react + if: github.event_name == 'issues' || github.event_name == 'issue_comment' || github.event_name == 'pull_request_review_comment' || github.event_name == 'discussion' || github.event_name == 'discussion_comment' || github.event_name == 'pull_request' && github.event.pull_request.head.repo.id == github.repository_id + uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9 + env: + GH_AW_REACTION: "+1" + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/add_reaction.cjs'); + await main(); + - name: Validate COPILOT_GITHUB_TOKEN secret + id: validate-secret + run: bash "${RUNNER_TEMP}/gh-aw/actions/validate_multi_secret.sh" COPILOT_GITHUB_TOKEN 'GitHub Copilot CLI' https://github.github.com/gh-aw/reference/engines/#github-copilot-default + env: + COPILOT_GITHUB_TOKEN: ${{ case(needs.pre_activation.outputs.copilot_pat_number == '0', secrets.COPILOT_PAT_0, needs.pre_activation.outputs.copilot_pat_number == '1', secrets.COPILOT_PAT_1, needs.pre_activation.outputs.copilot_pat_number == '2', secrets.COPILOT_PAT_2, needs.pre_activation.outputs.copilot_pat_number == '3', secrets.COPILOT_PAT_3, needs.pre_activation.outputs.copilot_pat_number == '4', secrets.COPILOT_PAT_4, needs.pre_activation.outputs.copilot_pat_number == '5', secrets.COPILOT_PAT_5, needs.pre_activation.outputs.copilot_pat_number == '6', secrets.COPILOT_PAT_6, needs.pre_activation.outputs.copilot_pat_number == '7', secrets.COPILOT_PAT_7, needs.pre_activation.outputs.copilot_pat_number == '8', secrets.COPILOT_PAT_8, needs.pre_activation.outputs.copilot_pat_number == '9', secrets.COPILOT_PAT_9, secrets.COPILOT_GITHUB_TOKEN) }} + - name: Checkout .github and .agents folders + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + persist-credentials: false + sparse-checkout: | + .github + .agents + sparse-checkout-cone-mode: true + fetch-depth: 1 + - name: Check workflow lock file + id: check-lock-file + uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9 + env: + GH_AW_WORKFLOW_FILE: "add-tactics-template-on-comment.lock.yml" + GH_AW_CONTEXT_WORKFLOW_REF: "${{ github.workflow_ref }}" + with: + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/check_workflow_timestamp_api.cjs'); + await main(); + - name: Check compile-agentic version + uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9 + env: + GH_AW_COMPILED_VERSION: "v0.68.3" + with: + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/check_version_updates.cjs'); + await main(); + - name: Compute current body text + id: sanitized + uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9 + with: + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/compute_text.cjs'); + await main(); + - name: Create prompt with built-in context + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GH_AW_SAFE_OUTPUTS: ${{ runner.temp }}/gh-aw/safeoutputs/outputs.jsonl + GH_AW_GITHUB_ACTOR: ${{ github.actor }} + GH_AW_GITHUB_EVENT_COMMENT_ID: ${{ github.event.comment.id }} + GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: ${{ github.event.discussion.number }} + GH_AW_GITHUB_EVENT_ISSUE_NUMBER: ${{ github.event.issue.number }} + GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} + GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} + GH_AW_GITHUB_RUN_ID: ${{ github.run_id }} + GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }} + GH_AW_IS_PR_COMMENT: ${{ github.event.issue.pull_request && 'true' || '' }} + GH_AW_STEPS_SANITIZED_OUTPUTS_TEXT: ${{ steps.sanitized.outputs.text }} + # poutine:ignore untrusted_checkout_exec + run: | + bash "${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh" + { + cat << 'GH_AW_PROMPT_4ba01523648299ab_EOF' + + GH_AW_PROMPT_4ba01523648299ab_EOF + cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md" + cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md" + cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md" + cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md" + cat << 'GH_AW_PROMPT_4ba01523648299ab_EOF' + + Tools: update_pull_request, missing_tool, missing_data, noop + + + The following GitHub context information is available for this workflow: + {{#if __GH_AW_GITHUB_ACTOR__ }} + - **actor**: __GH_AW_GITHUB_ACTOR__ + {{/if}} + {{#if __GH_AW_GITHUB_REPOSITORY__ }} + - **repository**: __GH_AW_GITHUB_REPOSITORY__ + {{/if}} + {{#if __GH_AW_GITHUB_WORKSPACE__ }} + - **workspace**: __GH_AW_GITHUB_WORKSPACE__ + {{/if}} + {{#if __GH_AW_GITHUB_EVENT_ISSUE_NUMBER__ }} + - **issue-number**: #__GH_AW_GITHUB_EVENT_ISSUE_NUMBER__ + {{/if}} + {{#if __GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER__ }} + - **discussion-number**: #__GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER__ + {{/if}} + {{#if __GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER__ }} + - **pull-request-number**: #__GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER__ + {{/if}} + {{#if __GH_AW_GITHUB_EVENT_COMMENT_ID__ }} + - **comment-id**: __GH_AW_GITHUB_EVENT_COMMENT_ID__ + {{/if}} + {{#if __GH_AW_GITHUB_RUN_ID__ }} + - **workflow-run-id**: __GH_AW_GITHUB_RUN_ID__ + {{/if}} + + + GH_AW_PROMPT_4ba01523648299ab_EOF + cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md" + if [ "$GITHUB_EVENT_NAME" = "issue_comment" ] && [ -n "$GH_AW_IS_PR_COMMENT" ] || [ "$GITHUB_EVENT_NAME" = "pull_request_review_comment" ] || [ "$GITHUB_EVENT_NAME" = "pull_request_review" ]; then + cat "${RUNNER_TEMP}/gh-aw/prompts/pr_context_prompt.md" + fi + cat << 'GH_AW_PROMPT_4ba01523648299ab_EOF' + + {{#runtime-import .github/workflows/add-tactics-template-on-comment.md}} + GH_AW_PROMPT_4ba01523648299ab_EOF + } > "$GH_AW_PROMPT" + - name: Interpolate variables and render templates + uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9 + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GH_AW_GITHUB_ACTOR: ${{ github.actor }} + GH_AW_GITHUB_EVENT_ISSUE_NUMBER: ${{ github.event.issue.number }} + GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} + GH_AW_GITHUB_RUN_ID: ${{ github.run_id }} + GH_AW_STEPS_SANITIZED_OUTPUTS_TEXT: ${{ steps.sanitized.outputs.text }} + with: + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/interpolate_prompt.cjs'); + await main(); + - name: Substitute placeholders + uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9 + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GH_AW_GITHUB_ACTOR: ${{ github.actor }} + GH_AW_GITHUB_EVENT_COMMENT_ID: ${{ github.event.comment.id }} + GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: ${{ github.event.discussion.number }} + GH_AW_GITHUB_EVENT_ISSUE_NUMBER: ${{ github.event.issue.number }} + GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} + GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} + GH_AW_GITHUB_RUN_ID: ${{ github.run_id }} + GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }} + GH_AW_IS_PR_COMMENT: ${{ github.event.issue.pull_request && 'true' || '' }} + GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ACTIVATED: ${{ needs.pre_activation.outputs.activated }} + GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_MATCHED_COMMAND: ${{ needs.pre_activation.outputs.matched_command }} + GH_AW_STEPS_SANITIZED_OUTPUTS_TEXT: ${{ steps.sanitized.outputs.text }} + with: + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + + const substitutePlaceholders = require('${{ runner.temp }}/gh-aw/actions/substitute_placeholders.cjs'); + + // Call the substitution function + return await substitutePlaceholders({ + file: process.env.GH_AW_PROMPT, + substitutions: { + GH_AW_GITHUB_ACTOR: process.env.GH_AW_GITHUB_ACTOR, + GH_AW_GITHUB_EVENT_COMMENT_ID: process.env.GH_AW_GITHUB_EVENT_COMMENT_ID, + GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: process.env.GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER, + GH_AW_GITHUB_EVENT_ISSUE_NUMBER: process.env.GH_AW_GITHUB_EVENT_ISSUE_NUMBER, + GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: process.env.GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER, + GH_AW_GITHUB_REPOSITORY: process.env.GH_AW_GITHUB_REPOSITORY, + GH_AW_GITHUB_RUN_ID: process.env.GH_AW_GITHUB_RUN_ID, + GH_AW_GITHUB_WORKSPACE: process.env.GH_AW_GITHUB_WORKSPACE, + GH_AW_IS_PR_COMMENT: process.env.GH_AW_IS_PR_COMMENT, + GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ACTIVATED: process.env.GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ACTIVATED, + GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_MATCHED_COMMAND: process.env.GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_MATCHED_COMMAND, + GH_AW_STEPS_SANITIZED_OUTPUTS_TEXT: process.env.GH_AW_STEPS_SANITIZED_OUTPUTS_TEXT + } + }); + - name: Validate prompt placeholders + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + # poutine:ignore untrusted_checkout_exec + run: bash "${RUNNER_TEMP}/gh-aw/actions/validate_prompt_placeholders.sh" + - name: Print prompt + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + # poutine:ignore untrusted_checkout_exec + run: bash "${RUNNER_TEMP}/gh-aw/actions/print_prompt_summary.sh" + - name: Upload activation artifact + if: success() + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 + with: + name: activation + path: | + /tmp/gh-aw/aw_info.json + /tmp/gh-aw/aw-prompts/prompt.txt + /tmp/gh-aw/github_rate_limits.jsonl + if-no-files-found: ignore + retention-days: 1 + + agent: + needs: activation + runs-on: ubuntu-latest + permissions: + contents: read + issues: read + pull-requests: read + env: + DEFAULT_BRANCH: ${{ github.event.repository.default_branch }} + GH_AW_ASSETS_ALLOWED_EXTS: "" + GH_AW_ASSETS_BRANCH: "" + GH_AW_ASSETS_MAX_SIZE_KB: 0 + GH_AW_MCP_LOG_DIR: /tmp/gh-aw/mcp-logs/safeoutputs + GH_AW_WORKFLOW_ID_SANITIZED: addtacticstemplateoncomment + outputs: + agentic_engine_timeout: ${{ steps.detect-copilot-errors.outputs.agentic_engine_timeout || 'false' }} + checkout_pr_success: ${{ steps.checkout-pr.outputs.checkout_pr_success || 'true' }} + effective_tokens: ${{ steps.parse-mcp-gateway.outputs.effective_tokens }} + has_patch: ${{ steps.collect_output.outputs.has_patch }} + inference_access_error: ${{ steps.detect-copilot-errors.outputs.inference_access_error || 'false' }} + mcp_policy_error: ${{ steps.detect-copilot-errors.outputs.mcp_policy_error || 'false' }} + model: ${{ needs.activation.outputs.model }} + model_not_supported_error: ${{ steps.detect-copilot-errors.outputs.model_not_supported_error || 'false' }} + output: ${{ steps.collect_output.outputs.output }} + output_types: ${{ steps.collect_output.outputs.output_types }} + setup-trace-id: ${{ steps.setup.outputs.trace-id }} + steps: + - name: Setup Scripts + id: setup + uses: github/gh-aw-actions/setup@ba90f2186d7ad780ec640f364005fa24e797b360 # v0.68.3 + with: + destination: ${{ runner.temp }}/gh-aw/actions + job-name: ${{ github.job }} + trace-id: ${{ needs.activation.outputs.setup-trace-id }} + - name: Set runtime paths + id: set-runtime-paths + run: | + { + echo "GH_AW_SAFE_OUTPUTS=${RUNNER_TEMP}/gh-aw/safeoutputs/outputs.jsonl" + echo "GH_AW_SAFE_OUTPUTS_CONFIG_PATH=${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" + echo "GH_AW_SAFE_OUTPUTS_TOOLS_PATH=${RUNNER_TEMP}/gh-aw/safeoutputs/tools.json" + } >> "$GITHUB_OUTPUT" + - name: Checkout repository + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + persist-credentials: false + - name: Create gh-aw temp directory + run: bash "${RUNNER_TEMP}/gh-aw/actions/create_gh_aw_tmp_dir.sh" + - name: Configure gh CLI for GitHub Enterprise + run: bash "${RUNNER_TEMP}/gh-aw/actions/configure_gh_for_ghe.sh" + env: + GH_TOKEN: ${{ github.token }} + - name: Configure Git credentials + env: + REPO_NAME: ${{ github.repository }} + SERVER_URL: ${{ github.server_url }} + GITHUB_TOKEN: ${{ github.token }} + run: | + git config --global user.email "github-actions[bot]@users.noreply.github.com" + git config --global user.name "github-actions[bot]" + git config --global am.keepcr true + # Re-authenticate git with GitHub token + SERVER_URL_STRIPPED="${SERVER_URL#https://}" + git remote set-url origin "https://x-access-token:${GITHUB_TOKEN}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" + echo "Git configured with standard GitHub Actions identity" + - name: Checkout PR branch + id: checkout-pr + if: | + github.event.pull_request || github.event.issue.pull_request + uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9 + env: + GH_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + with: + github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/checkout_pr_branch.cjs'); + await main(); + - name: Install GitHub Copilot CLI + run: bash "${RUNNER_TEMP}/gh-aw/actions/install_copilot_cli.sh" 1.0.21 + env: + GH_HOST: github.com + - name: Install AWF binary + run: bash "${RUNNER_TEMP}/gh-aw/actions/install_awf_binary.sh" v0.25.20 + - name: Determine automatic lockdown mode for GitHub MCP Server + id: determine-automatic-lockdown + uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9 + env: + GH_AW_GITHUB_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN }} + GH_AW_GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN }} + with: + script: | + const determineAutomaticLockdown = require('${{ runner.temp }}/gh-aw/actions/determine_automatic_lockdown.cjs'); + await determineAutomaticLockdown(github, context, core); + - name: Download container images + run: bash "${RUNNER_TEMP}/gh-aw/actions/download_docker_images.sh" ghcr.io/github/gh-aw-firewall/agent:0.25.20 ghcr.io/github/gh-aw-firewall/api-proxy:0.25.20 ghcr.io/github/gh-aw-firewall/squid:0.25.20 ghcr.io/github/gh-aw-mcpg:v0.2.19 ghcr.io/github/github-mcp-server:v0.32.0 node:lts-alpine + - name: Write Safe Outputs Config + run: | + mkdir -p "${RUNNER_TEMP}/gh-aw/safeoutputs" + mkdir -p /tmp/gh-aw/safeoutputs + mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs + cat > "${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" << 'GH_AW_SAFE_OUTPUTS_CONFIG_0bc3e8469dd3293c_EOF' + {"create_report_incomplete_issue":{},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"false"},"report_incomplete":{},"update_pull_request":{"allow_body":true,"allow_title":false,"footer":false,"max":1,"target":"${{ github.event.issue.number }}"}} + GH_AW_SAFE_OUTPUTS_CONFIG_0bc3e8469dd3293c_EOF + - name: Write Safe Outputs Tools + env: + GH_AW_TOOLS_META_JSON: | + { + "description_suffixes": { + "update_pull_request": " CONSTRAINTS: Maximum 1 pull request(s) can be updated. Target: ${{ github.event.issue.number }}." + }, + "repo_params": {}, + "dynamic_tools": [] + } + GH_AW_VALIDATION_JSON: | + { + "missing_data": { + "defaultMax": 20, + "fields": { + "alternatives": { + "type": "string", + "sanitize": true, + "maxLength": 256 + }, + "context": { + "type": "string", + "sanitize": true, + "maxLength": 256 + }, + "data_type": { + "type": "string", + "sanitize": true, + "maxLength": 128 + }, + "reason": { + "type": "string", + "sanitize": true, + "maxLength": 256 + } + } + }, + "missing_tool": { + "defaultMax": 20, + "fields": { + "alternatives": { + "type": "string", + "sanitize": true, + "maxLength": 512 + }, + "reason": { + "required": true, + "type": "string", + "sanitize": true, + "maxLength": 256 + }, + "tool": { + "type": "string", + "sanitize": true, + "maxLength": 128 + } + } + }, + "noop": { + "defaultMax": 1, + "fields": { + "message": { + "required": true, + "type": "string", + "sanitize": true, + "maxLength": 65000 + } + } + }, + "report_incomplete": { + "defaultMax": 5, + "fields": { + "details": { + "type": "string", + "sanitize": true, + "maxLength": 65000 + }, + "reason": { + "required": true, + "type": "string", + "sanitize": true, + "maxLength": 1024 + } + } + }, + "update_pull_request": { + "defaultMax": 1, + "fields": { + "body": { + "type": "string", + "sanitize": true, + "maxLength": 65000 + }, + "draft": { + "type": "boolean" + }, + "operation": { + "type": "string", + "enum": [ + "replace", + "append", + "prepend" + ] + }, + "pull_request_number": { + "issueOrPRNumber": true + }, + "repo": { + "type": "string", + "maxLength": 256 + }, + "title": { + "type": "string", + "sanitize": true, + "maxLength": 256 + } + }, + "customValidation": "requiresOneOf:title,body" + } + } + uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9 + with: + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/generate_safe_outputs_tools.cjs'); + await main(); + - name: Generate Safe Outputs MCP Server Config + id: safe-outputs-config + run: | + # Generate a secure random API key (360 bits of entropy, 40+ chars) + # Mask immediately to prevent timing vulnerabilities + API_KEY=$(openssl rand -base64 45 | tr -d '/+=') + echo "::add-mask::${API_KEY}" + + PORT=3001 + + # Set outputs for next steps + { + echo "safe_outputs_api_key=${API_KEY}" + echo "safe_outputs_port=${PORT}" + } >> "$GITHUB_OUTPUT" + + echo "Safe Outputs MCP server will run on port ${PORT}" + + - name: Start Safe Outputs MCP HTTP Server + id: safe-outputs-start + env: + DEBUG: '*' + GH_AW_SAFE_OUTPUTS: ${{ steps.set-runtime-paths.outputs.GH_AW_SAFE_OUTPUTS }} + GH_AW_SAFE_OUTPUTS_PORT: ${{ steps.safe-outputs-config.outputs.safe_outputs_port }} + GH_AW_SAFE_OUTPUTS_API_KEY: ${{ steps.safe-outputs-config.outputs.safe_outputs_api_key }} + GH_AW_SAFE_OUTPUTS_TOOLS_PATH: ${{ runner.temp }}/gh-aw/safeoutputs/tools.json + GH_AW_SAFE_OUTPUTS_CONFIG_PATH: ${{ runner.temp }}/gh-aw/safeoutputs/config.json + GH_AW_MCP_LOG_DIR: /tmp/gh-aw/mcp-logs/safeoutputs + run: | + # Environment variables are set above to prevent template injection + export DEBUG + export GH_AW_SAFE_OUTPUTS + export GH_AW_SAFE_OUTPUTS_PORT + export GH_AW_SAFE_OUTPUTS_API_KEY + export GH_AW_SAFE_OUTPUTS_TOOLS_PATH + export GH_AW_SAFE_OUTPUTS_CONFIG_PATH + export GH_AW_MCP_LOG_DIR + + bash "${RUNNER_TEMP}/gh-aw/actions/start_safe_outputs_server.sh" + + - name: Start MCP Gateway + id: start-mcp-gateway + env: + GH_AW_SAFE_OUTPUTS: ${{ steps.set-runtime-paths.outputs.GH_AW_SAFE_OUTPUTS }} + GH_AW_SAFE_OUTPUTS_API_KEY: ${{ steps.safe-outputs-start.outputs.api_key }} + GH_AW_SAFE_OUTPUTS_PORT: ${{ steps.safe-outputs-start.outputs.port }} + GITHUB_MCP_GUARD_MIN_INTEGRITY: ${{ steps.determine-automatic-lockdown.outputs.min_integrity }} + GITHUB_MCP_GUARD_REPOS: ${{ steps.determine-automatic-lockdown.outputs.repos }} + GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + run: | + set -eo pipefail + mkdir -p /tmp/gh-aw/mcp-config + + # Export gateway environment variables for MCP config and gateway script + export MCP_GATEWAY_PORT="80" + export MCP_GATEWAY_DOMAIN="host.docker.internal" + MCP_GATEWAY_API_KEY=$(openssl rand -base64 45 | tr -d '/+=') + echo "::add-mask::${MCP_GATEWAY_API_KEY}" + export MCP_GATEWAY_API_KEY + export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" + mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" + export DEBUG="*" + + export GH_AW_ENGINE="copilot" + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.19' + + mkdir -p /home/runner/.copilot + cat << GH_AW_MCP_CONFIG_43d5126e1ec9dd03_EOF | bash "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh" + { + "mcpServers": { + "github": { + "type": "stdio", + "container": "ghcr.io/github/github-mcp-server:v0.32.0", + "env": { + "GITHUB_HOST": "\${GITHUB_SERVER_URL}", + "GITHUB_PERSONAL_ACCESS_TOKEN": "\${GITHUB_MCP_SERVER_TOKEN}", + "GITHUB_READ_ONLY": "1", + "GITHUB_TOOLSETS": "context,repos,issues,pull_requests" + }, + "guard-policies": { + "allow-only": { + "min-integrity": "$GITHUB_MCP_GUARD_MIN_INTEGRITY", + "repos": "$GITHUB_MCP_GUARD_REPOS" + } + } + }, + "safeoutputs": { + "type": "http", + "url": "http://host.docker.internal:$GH_AW_SAFE_OUTPUTS_PORT", + "headers": { + "Authorization": "\${GH_AW_SAFE_OUTPUTS_API_KEY}" + }, + "guard-policies": { + "write-sink": { + "accept": [ + "*" + ] + } + } + } + }, + "gateway": { + "port": $MCP_GATEWAY_PORT, + "domain": "${MCP_GATEWAY_DOMAIN}", + "apiKey": "${MCP_GATEWAY_API_KEY}", + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + } + } + GH_AW_MCP_CONFIG_43d5126e1ec9dd03_EOF + - name: Download activation artifact + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 + with: + name: activation + path: /tmp/gh-aw + - name: Clean git credentials + continue-on-error: true + run: bash "${RUNNER_TEMP}/gh-aw/actions/clean_git_credentials.sh" + - name: Execute GitHub Copilot CLI + id: agentic_execution + # Copilot CLI tool arguments (sorted): + timeout-minutes: 20 + run: | + set -o pipefail + touch /tmp/gh-aw/agent-step-summary.md + (umask 177 && touch /tmp/gh-aw/agent-stdio.log) + # shellcheck disable=SC1003 + sudo -E awf --container-workdir "${GITHUB_WORKSPACE}" --mount "${RUNNER_TEMP}/gh-aw:${RUNNER_TEMP}/gh-aw:ro" --mount "${RUNNER_TEMP}/gh-aw:/host${RUNNER_TEMP}/gh-aw:ro" --env-all --exclude-env COPILOT_GITHUB_TOKEN --exclude-env GITHUB_MCP_SERVER_TOKEN --exclude-env MCP_GATEWAY_API_KEY --allow-domains api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,telemetry.enterprise.githubcopilot.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com,www.googleapis.com --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --audit-dir /tmp/gh-aw/sandbox/firewall/audit --enable-host-access --image-tag 0.25.20 --skip-pull --enable-api-proxy \ + -- /bin/bash -c 'node ${RUNNER_TEMP}/gh-aw/actions/copilot_driver.cjs /usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --disable-builtin-mcps --no-ask-user --allow-all-tools --allow-all-paths --add-dir "${GITHUB_WORKSPACE}" --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log + env: + COPILOT_AGENT_RUNNER_TYPE: STANDALONE + COPILOT_GITHUB_TOKEN: ${{ case(needs.pre_activation.outputs.copilot_pat_number == '0', secrets.COPILOT_PAT_0, needs.pre_activation.outputs.copilot_pat_number == '1', secrets.COPILOT_PAT_1, needs.pre_activation.outputs.copilot_pat_number == '2', secrets.COPILOT_PAT_2, needs.pre_activation.outputs.copilot_pat_number == '3', secrets.COPILOT_PAT_3, needs.pre_activation.outputs.copilot_pat_number == '4', secrets.COPILOT_PAT_4, needs.pre_activation.outputs.copilot_pat_number == '5', secrets.COPILOT_PAT_5, needs.pre_activation.outputs.copilot_pat_number == '6', secrets.COPILOT_PAT_6, needs.pre_activation.outputs.copilot_pat_number == '7', secrets.COPILOT_PAT_7, needs.pre_activation.outputs.copilot_pat_number == '8', secrets.COPILOT_PAT_8, needs.pre_activation.outputs.copilot_pat_number == '9', secrets.COPILOT_PAT_9, secrets.COPILOT_GITHUB_TOKEN) }} + COPILOT_MODEL: ${{ vars.GH_AW_MODEL_AGENT_COPILOT || '' }} + GH_AW_MCP_CONFIG: /home/runner/.copilot/mcp-config.json + GH_AW_PHASE: agent + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GH_AW_SAFE_OUTPUTS: ${{ steps.set-runtime-paths.outputs.GH_AW_SAFE_OUTPUTS }} + GH_AW_VERSION: v0.68.3 + GITHUB_API_URL: ${{ github.api_url }} + GITHUB_AW: true + GITHUB_HEAD_REF: ${{ github.head_ref }} + GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + GITHUB_REF_NAME: ${{ github.ref_name }} + GITHUB_SERVER_URL: ${{ github.server_url }} + GITHUB_STEP_SUMMARY: /tmp/gh-aw/agent-step-summary.md + GITHUB_WORKSPACE: ${{ github.workspace }} + GIT_AUTHOR_EMAIL: github-actions[bot]@users.noreply.github.com + GIT_AUTHOR_NAME: github-actions[bot] + GIT_COMMITTER_EMAIL: github-actions[bot]@users.noreply.github.com + GIT_COMMITTER_NAME: github-actions[bot] + XDG_CONFIG_HOME: /home/runner + - name: Detect Copilot errors + id: detect-copilot-errors + if: always() + continue-on-error: true + run: node "${RUNNER_TEMP}/gh-aw/actions/detect_copilot_errors.cjs" + - name: Configure Git credentials + env: + REPO_NAME: ${{ github.repository }} + SERVER_URL: ${{ github.server_url }} + GITHUB_TOKEN: ${{ github.token }} + run: | + git config --global user.email "github-actions[bot]@users.noreply.github.com" + git config --global user.name "github-actions[bot]" + git config --global am.keepcr true + # Re-authenticate git with GitHub token + SERVER_URL_STRIPPED="${SERVER_URL#https://}" + git remote set-url origin "https://x-access-token:${GITHUB_TOKEN}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" + echo "Git configured with standard GitHub Actions identity" + - name: Copy Copilot session state files to logs + if: always() + continue-on-error: true + run: bash "${RUNNER_TEMP}/gh-aw/actions/copy_copilot_session_state.sh" + - name: Stop MCP Gateway + if: always() + continue-on-error: true + env: + MCP_GATEWAY_PORT: ${{ steps.start-mcp-gateway.outputs.gateway-port }} + MCP_GATEWAY_API_KEY: ${{ steps.start-mcp-gateway.outputs.gateway-api-key }} + GATEWAY_PID: ${{ steps.start-mcp-gateway.outputs.gateway-pid }} + run: | + bash "${RUNNER_TEMP}/gh-aw/actions/stop_mcp_gateway.sh" "$GATEWAY_PID" + - name: Redact secrets in logs + if: always() + uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9 + with: + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/redact_secrets.cjs'); + await main(); + env: + GH_AW_SECRET_NAMES: 'COPILOT_GITHUB_TOKEN,COPILOT_PAT_0,COPILOT_PAT_1,COPILOT_PAT_2,COPILOT_PAT_3,COPILOT_PAT_4,COPILOT_PAT_5,COPILOT_PAT_6,COPILOT_PAT_7,COPILOT_PAT_8,COPILOT_PAT_9,GH_AW_GITHUB_MCP_SERVER_TOKEN,GH_AW_GITHUB_TOKEN,GITHUB_TOKEN' + SECRET_COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} + SECRET_COPILOT_PAT_0: ${{ secrets.COPILOT_PAT_0 }} + SECRET_COPILOT_PAT_1: ${{ secrets.COPILOT_PAT_1 }} + SECRET_COPILOT_PAT_2: ${{ secrets.COPILOT_PAT_2 }} + SECRET_COPILOT_PAT_3: ${{ secrets.COPILOT_PAT_3 }} + SECRET_COPILOT_PAT_4: ${{ secrets.COPILOT_PAT_4 }} + SECRET_COPILOT_PAT_5: ${{ secrets.COPILOT_PAT_5 }} + SECRET_COPILOT_PAT_6: ${{ secrets.COPILOT_PAT_6 }} + SECRET_COPILOT_PAT_7: ${{ secrets.COPILOT_PAT_7 }} + SECRET_COPILOT_PAT_8: ${{ secrets.COPILOT_PAT_8 }} + SECRET_COPILOT_PAT_9: ${{ secrets.COPILOT_PAT_9 }} + SECRET_GH_AW_GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN }} + SECRET_GH_AW_GITHUB_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN }} + SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Append agent step summary + if: always() + run: bash "${RUNNER_TEMP}/gh-aw/actions/append_agent_step_summary.sh" + - name: Copy Safe Outputs + if: always() + env: + GH_AW_SAFE_OUTPUTS: ${{ steps.set-runtime-paths.outputs.GH_AW_SAFE_OUTPUTS }} + run: | + mkdir -p /tmp/gh-aw + cp "$GH_AW_SAFE_OUTPUTS" /tmp/gh-aw/safeoutputs.jsonl 2>/dev/null || true + - name: Ingest agent output + id: collect_output + if: always() + uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9 + env: + GH_AW_SAFE_OUTPUTS: ${{ steps.set-runtime-paths.outputs.GH_AW_SAFE_OUTPUTS }} + GH_AW_ALLOWED_DOMAINS: "api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,telemetry.enterprise.githubcopilot.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com,www.googleapis.com" + GITHUB_SERVER_URL: ${{ github.server_url }} + GITHUB_API_URL: ${{ github.api_url }} + GH_AW_COMMAND: tactics + with: + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/collect_ndjson_output.cjs'); + await main(); + - name: Parse agent logs for step summary + if: always() + uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9 + env: + GH_AW_AGENT_OUTPUT: /tmp/gh-aw/sandbox/agent/logs/ + with: + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/parse_copilot_log.cjs'); + await main(); + - name: Parse MCP Gateway logs for step summary + if: always() + id: parse-mcp-gateway + uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9 + with: + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/parse_mcp_gateway_log.cjs'); + await main(); + - name: Print firewall logs + if: always() + continue-on-error: true + env: + AWF_LOGS_DIR: /tmp/gh-aw/sandbox/firewall/logs + run: | + # Fix permissions on firewall logs so they can be uploaded as artifacts + # AWF runs with sudo, creating files owned by root + sudo chmod -R a+r /tmp/gh-aw/sandbox/firewall/logs 2>/dev/null || true + # Only run awf logs summary if awf command exists (it may not be installed if workflow failed before install step) + if command -v awf &> /dev/null; then + awf logs summary | tee -a "$GITHUB_STEP_SUMMARY" + else + echo 'AWF binary not installed, skipping firewall log summary' + fi + - name: Parse token usage for step summary + if: always() + continue-on-error: true + uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9 + with: + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/parse_token_usage.cjs'); + await main(); + - name: Write agent output placeholder if missing + if: always() + run: | + if [ ! -f /tmp/gh-aw/agent_output.json ]; then + echo '{"items":[]}' > /tmp/gh-aw/agent_output.json + fi + - name: Upload agent artifacts + if: always() + continue-on-error: true + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 + with: + name: agent + path: | + /tmp/gh-aw/aw-prompts/prompt.txt + /tmp/gh-aw/sandbox/agent/logs/ + /tmp/gh-aw/redacted-urls.log + /tmp/gh-aw/mcp-logs/ + /tmp/gh-aw/agent_usage.json + /tmp/gh-aw/agent-stdio.log + /tmp/gh-aw/agent/ + /tmp/gh-aw/github_rate_limits.jsonl + /tmp/gh-aw/safeoutputs.jsonl + /tmp/gh-aw/agent_output.json + /tmp/gh-aw/aw-*.patch + /tmp/gh-aw/aw-*.bundle + /tmp/gh-aw/sandbox/firewall/logs/ + /tmp/gh-aw/sandbox/firewall/audit/ + if-no-files-found: ignore + + conclusion: + needs: + - activation + - agent + - detection + - safe_outputs + if: > + always() && (needs.agent.result != 'skipped' || needs.activation.outputs.lockdown_check_failed == 'true' || + needs.activation.outputs.stale_lock_file_failed == 'true') + runs-on: ubuntu-slim + permissions: + contents: read + pull-requests: write + concurrency: + group: "gh-aw-conclusion-add-tactics-template-on-comment" + cancel-in-progress: false + outputs: + incomplete_count: ${{ steps.report_incomplete.outputs.incomplete_count }} + noop_message: ${{ steps.noop.outputs.noop_message }} + tools_reported: ${{ steps.missing_tool.outputs.tools_reported }} + total_count: ${{ steps.missing_tool.outputs.total_count }} + steps: + - name: Setup Scripts + id: setup + uses: github/gh-aw-actions/setup@ba90f2186d7ad780ec640f364005fa24e797b360 # v0.68.3 + with: + destination: ${{ runner.temp }}/gh-aw/actions + job-name: ${{ github.job }} + trace-id: ${{ needs.activation.outputs.setup-trace-id }} + - name: Download agent output artifact + id: download-agent-output + continue-on-error: true + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 + with: + name: agent + path: /tmp/gh-aw/ + - name: Setup agent output environment variable + id: setup-agent-output-env + if: steps.download-agent-output.outcome == 'success' + run: | + mkdir -p /tmp/gh-aw/ + find "/tmp/gh-aw/" -type f -print + echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/agent_output.json" >> "$GITHUB_OUTPUT" + - name: Process no-op messages + id: noop + uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9 + env: + GH_AW_AGENT_OUTPUT: ${{ steps.setup-agent-output-env.outputs.GH_AW_AGENT_OUTPUT }} + GH_AW_NOOP_MAX: "1" + GH_AW_WORKFLOW_NAME: "Add Tactics Template On Comment" + GH_AW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + GH_AW_AGENT_CONCLUSION: ${{ needs.agent.result }} + GH_AW_NOOP_REPORT_AS_ISSUE: "false" + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/handle_noop_message.cjs'); + await main(); + - name: Log detection run + id: detection_runs + uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9 + env: + GH_AW_AGENT_OUTPUT: ${{ steps.setup-agent-output-env.outputs.GH_AW_AGENT_OUTPUT }} + GH_AW_WORKFLOW_NAME: "Add Tactics Template On Comment" + GH_AW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + GH_AW_DETECTION_CONCLUSION: ${{ needs.detection.outputs.detection_conclusion }} + GH_AW_DETECTION_REASON: ${{ needs.detection.outputs.detection_reason }} + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/handle_detection_runs.cjs'); + await main(); + - name: Record missing tool + id: missing_tool + uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9 + env: + GH_AW_AGENT_OUTPUT: ${{ steps.setup-agent-output-env.outputs.GH_AW_AGENT_OUTPUT }} + GH_AW_MISSING_TOOL_CREATE_ISSUE: "true" + GH_AW_WORKFLOW_NAME: "Add Tactics Template On Comment" + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/missing_tool.cjs'); + await main(); + - name: Record incomplete + id: report_incomplete + uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9 + env: + GH_AW_AGENT_OUTPUT: ${{ steps.setup-agent-output-env.outputs.GH_AW_AGENT_OUTPUT }} + GH_AW_REPORT_INCOMPLETE_CREATE_ISSUE: "true" + GH_AW_WORKFLOW_NAME: "Add Tactics Template On Comment" + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/report_incomplete_handler.cjs'); + await main(); + - name: Handle agent failure + id: handle_agent_failure + if: always() + uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9 + env: + GH_AW_AGENT_OUTPUT: ${{ steps.setup-agent-output-env.outputs.GH_AW_AGENT_OUTPUT }} + GH_AW_WORKFLOW_NAME: "Add Tactics Template On Comment" + GH_AW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + GH_AW_AGENT_CONCLUSION: ${{ needs.agent.result }} + GH_AW_WORKFLOW_ID: "add-tactics-template-on-comment" + GH_AW_ENGINE_ID: "copilot" + GH_AW_SECRET_VERIFICATION_RESULT: ${{ needs.activation.outputs.secret_verification_result }} + GH_AW_CHECKOUT_PR_SUCCESS: ${{ needs.agent.outputs.checkout_pr_success }} + GH_AW_INFERENCE_ACCESS_ERROR: ${{ needs.agent.outputs.inference_access_error }} + GH_AW_MCP_POLICY_ERROR: ${{ needs.agent.outputs.mcp_policy_error }} + GH_AW_AGENTIC_ENGINE_TIMEOUT: ${{ needs.agent.outputs.agentic_engine_timeout }} + GH_AW_MODEL_NOT_SUPPORTED_ERROR: ${{ needs.agent.outputs.model_not_supported_error }} + GH_AW_LOCKDOWN_CHECK_FAILED: ${{ needs.activation.outputs.lockdown_check_failed }} + GH_AW_STALE_LOCK_FILE_FAILED: ${{ needs.activation.outputs.stale_lock_file_failed }} + GH_AW_GROUP_REPORTS: "false" + GH_AW_FAILURE_REPORT_AS_ISSUE: "true" + GH_AW_TIMEOUT_MINUTES: "20" + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/handle_agent_failure.cjs'); + await main(); + + detection: + needs: + - activation + - agent + if: > + always() && needs.agent.result != 'skipped' && (needs.agent.outputs.output_types != '' || needs.agent.outputs.has_patch == 'true') + runs-on: ubuntu-latest + permissions: + contents: read + outputs: + detection_conclusion: ${{ steps.detection_conclusion.outputs.conclusion }} + detection_reason: ${{ steps.detection_conclusion.outputs.reason }} + detection_success: ${{ steps.detection_conclusion.outputs.success }} + steps: + - name: Setup Scripts + id: setup + uses: github/gh-aw-actions/setup@ba90f2186d7ad780ec640f364005fa24e797b360 # v0.68.3 + with: + destination: ${{ runner.temp }}/gh-aw/actions + job-name: ${{ github.job }} + trace-id: ${{ needs.activation.outputs.setup-trace-id }} + - name: Download agent output artifact + id: download-agent-output + continue-on-error: true + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 + with: + name: agent + path: /tmp/gh-aw/ + - name: Setup agent output environment variable + id: setup-agent-output-env + if: steps.download-agent-output.outcome == 'success' + run: | + mkdir -p /tmp/gh-aw/ + find "/tmp/gh-aw/" -type f -print + echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/agent_output.json" >> "$GITHUB_OUTPUT" + - name: Checkout repository for patch context + if: needs.agent.outputs.has_patch == 'true' + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + persist-credentials: false + # --- Threat Detection --- + - name: Clean stale firewall files from agent artifact + run: | + rm -rf /tmp/gh-aw/sandbox/firewall/logs + rm -rf /tmp/gh-aw/sandbox/firewall/audit + - name: Download container images + run: bash "${RUNNER_TEMP}/gh-aw/actions/download_docker_images.sh" ghcr.io/github/gh-aw-firewall/agent:0.25.20 ghcr.io/github/gh-aw-firewall/api-proxy:0.25.20 ghcr.io/github/gh-aw-firewall/squid:0.25.20 + - name: Check if detection needed + id: detection_guard + if: always() + env: + OUTPUT_TYPES: ${{ needs.agent.outputs.output_types }} + HAS_PATCH: ${{ needs.agent.outputs.has_patch }} + run: | + if [[ -n "$OUTPUT_TYPES" || "$HAS_PATCH" == "true" ]]; then + echo "run_detection=true" >> "$GITHUB_OUTPUT" + echo "Detection will run: output_types=$OUTPUT_TYPES, has_patch=$HAS_PATCH" + else + echo "run_detection=false" >> "$GITHUB_OUTPUT" + echo "Detection skipped: no agent outputs or patches to analyze" + fi + - name: Clear MCP configuration for detection + if: always() && steps.detection_guard.outputs.run_detection == 'true' + run: | + rm -f /tmp/gh-aw/mcp-config/mcp-servers.json + rm -f /home/runner/.copilot/mcp-config.json + rm -f "$GITHUB_WORKSPACE/.gemini/settings.json" + - name: Prepare threat detection files + if: always() && steps.detection_guard.outputs.run_detection == 'true' + run: | + mkdir -p /tmp/gh-aw/threat-detection/aw-prompts + cp /tmp/gh-aw/aw-prompts/prompt.txt /tmp/gh-aw/threat-detection/aw-prompts/prompt.txt 2>/dev/null || true + cp /tmp/gh-aw/agent_output.json /tmp/gh-aw/threat-detection/agent_output.json 2>/dev/null || true + for f in /tmp/gh-aw/aw-*.patch; do + [ -f "$f" ] && cp "$f" /tmp/gh-aw/threat-detection/ 2>/dev/null || true + done + for f in /tmp/gh-aw/aw-*.bundle; do + [ -f "$f" ] && cp "$f" /tmp/gh-aw/threat-detection/ 2>/dev/null || true + done + echo "Prepared threat detection files:" + ls -la /tmp/gh-aw/threat-detection/ 2>/dev/null || true + - name: Setup threat detection + if: always() && steps.detection_guard.outputs.run_detection == 'true' + uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9 + env: + WORKFLOW_NAME: "Add Tactics Template On Comment" + WORKFLOW_DESCRIPTION: "No description provided" + HAS_PATCH: ${{ needs.agent.outputs.has_patch }} + with: + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/setup_threat_detection.cjs'); + await main(); + - name: Ensure threat-detection directory and log + if: always() && steps.detection_guard.outputs.run_detection == 'true' + run: | + mkdir -p /tmp/gh-aw/threat-detection + touch /tmp/gh-aw/threat-detection/detection.log + - name: Install GitHub Copilot CLI + run: bash "${RUNNER_TEMP}/gh-aw/actions/install_copilot_cli.sh" 1.0.21 + env: + GH_HOST: github.com + - name: Install AWF binary + run: bash "${RUNNER_TEMP}/gh-aw/actions/install_awf_binary.sh" v0.25.20 + - name: Execute GitHub Copilot CLI + if: always() && steps.detection_guard.outputs.run_detection == 'true' + id: detection_agentic_execution + # Copilot CLI tool arguments (sorted): + timeout-minutes: 20 + run: | + set -o pipefail + touch /tmp/gh-aw/agent-step-summary.md + (umask 177 && touch /tmp/gh-aw/threat-detection/detection.log) + # shellcheck disable=SC1003 + sudo -E awf --container-workdir "${GITHUB_WORKSPACE}" --mount "${RUNNER_TEMP}/gh-aw:${RUNNER_TEMP}/gh-aw:ro" --mount "${RUNNER_TEMP}/gh-aw:/host${RUNNER_TEMP}/gh-aw:ro" --env-all --exclude-env COPILOT_GITHUB_TOKEN --allow-domains api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,github.com,host.docker.internal,telemetry.enterprise.githubcopilot.com --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --audit-dir /tmp/gh-aw/sandbox/firewall/audit --enable-host-access --image-tag 0.25.20 --skip-pull --enable-api-proxy \ + -- /bin/bash -c 'node ${RUNNER_TEMP}/gh-aw/actions/copilot_driver.cjs /usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --disable-builtin-mcps --no-ask-user --allow-all-tools --add-dir "${GITHUB_WORKSPACE}" --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"' 2>&1 | tee -a /tmp/gh-aw/threat-detection/detection.log + env: + COPILOT_AGENT_RUNNER_TYPE: STANDALONE + COPILOT_GITHUB_TOKEN: ${{ case(needs.pre_activation.outputs.copilot_pat_number == '0', secrets.COPILOT_PAT_0, needs.pre_activation.outputs.copilot_pat_number == '1', secrets.COPILOT_PAT_1, needs.pre_activation.outputs.copilot_pat_number == '2', secrets.COPILOT_PAT_2, needs.pre_activation.outputs.copilot_pat_number == '3', secrets.COPILOT_PAT_3, needs.pre_activation.outputs.copilot_pat_number == '4', secrets.COPILOT_PAT_4, needs.pre_activation.outputs.copilot_pat_number == '5', secrets.COPILOT_PAT_5, needs.pre_activation.outputs.copilot_pat_number == '6', secrets.COPILOT_PAT_6, needs.pre_activation.outputs.copilot_pat_number == '7', secrets.COPILOT_PAT_7, needs.pre_activation.outputs.copilot_pat_number == '8', secrets.COPILOT_PAT_8, needs.pre_activation.outputs.copilot_pat_number == '9', secrets.COPILOT_PAT_9, secrets.COPILOT_GITHUB_TOKEN) }} + COPILOT_MODEL: ${{ vars.GH_AW_MODEL_DETECTION_COPILOT || '' }} + GH_AW_PHASE: detection + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GH_AW_VERSION: v0.68.3 + GITHUB_API_URL: ${{ github.api_url }} + GITHUB_AW: true + GITHUB_HEAD_REF: ${{ github.head_ref }} + GITHUB_REF_NAME: ${{ github.ref_name }} + GITHUB_SERVER_URL: ${{ github.server_url }} + GITHUB_STEP_SUMMARY: /tmp/gh-aw/agent-step-summary.md + GITHUB_WORKSPACE: ${{ github.workspace }} + GIT_AUTHOR_EMAIL: github-actions[bot]@users.noreply.github.com + GIT_AUTHOR_NAME: github-actions[bot] + GIT_COMMITTER_EMAIL: github-actions[bot]@users.noreply.github.com + GIT_COMMITTER_NAME: github-actions[bot] + XDG_CONFIG_HOME: /home/runner + - name: Upload threat detection log + if: always() && steps.detection_guard.outputs.run_detection == 'true' + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 + with: + name: detection + path: /tmp/gh-aw/threat-detection/detection.log + if-no-files-found: ignore + - name: Parse and conclude threat detection + id: detection_conclusion + if: always() + uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9 + env: + RUN_DETECTION: ${{ steps.detection_guard.outputs.run_detection }} + GH_AW_DETECTION_CONTINUE_ON_ERROR: "true" + with: + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/parse_threat_detection_results.cjs'); + await main(); + + pre_activation: + if: github.event.repository.fork == false + runs-on: ubuntu-slim + outputs: + activated: ${{ steps.check_membership.outputs.is_team_member == 'true' && steps.check_command_position.outputs.command_position_ok == 'true' }} + copilot_pat_number: ${{ steps.select-copilot-pat.outputs.copilot_pat_number }} + matched_command: ${{ steps.check_command_position.outputs.matched_command }} + select-copilot-pat_result: ${{ steps.select-copilot-pat.outcome }} + setup-trace-id: ${{ steps.setup.outputs.trace-id }} + steps: + - name: Setup Scripts + id: setup + uses: github/gh-aw-actions/setup@ba90f2186d7ad780ec640f364005fa24e797b360 # v0.68.3 + with: + destination: ${{ runner.temp }}/gh-aw/actions + job-name: ${{ github.job }} + - name: Check team membership for command workflow + id: check_membership + uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9 + env: + GH_AW_REQUIRED_ROLES: "admin,maintainer,write" + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/check_membership.cjs'); + await main(); + - name: Check command position + id: check_command_position + uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9 + env: + GH_AW_COMMANDS: "[\"tactics\"]" + with: + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/check_command_position.cjs'); + await main(); + - name: Checkout the select-copilot-pat action folder + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + fetch-depth: 1 + persist-credentials: false + sparse-checkout: .github/actions/select-copilot-pat + sparse-checkout-cone-mode: true + - name: Select Copilot token from pool + id: select-copilot-pat + uses: ./.github/actions/select-copilot-pat + env: + SECRET_0: ${{ secrets.COPILOT_PAT_0 }} + SECRET_1: ${{ secrets.COPILOT_PAT_1 }} + SECRET_2: ${{ secrets.COPILOT_PAT_2 }} + SECRET_3: ${{ secrets.COPILOT_PAT_3 }} + SECRET_4: ${{ secrets.COPILOT_PAT_4 }} + SECRET_5: ${{ secrets.COPILOT_PAT_5 }} + SECRET_6: ${{ secrets.COPILOT_PAT_6 }} + SECRET_7: ${{ secrets.COPILOT_PAT_7 }} + SECRET_8: ${{ secrets.COPILOT_PAT_8 }} + SECRET_9: ${{ secrets.COPILOT_PAT_9 }} + + safe_outputs: + needs: + - activation + - agent + - detection + if: (!cancelled()) && needs.agent.result != 'skipped' && needs.detection.result == 'success' + runs-on: ubuntu-slim + permissions: + contents: read + pull-requests: write + timeout-minutes: 15 + env: + GH_AW_CALLER_WORKFLOW_ID: "${{ github.repository }}/add-tactics-template-on-comment" + GH_AW_DETECTION_CONCLUSION: ${{ needs.detection.outputs.detection_conclusion }} + GH_AW_DETECTION_REASON: ${{ needs.detection.outputs.detection_reason }} + GH_AW_EFFECTIVE_TOKENS: ${{ needs.agent.outputs.effective_tokens }} + GH_AW_ENGINE_ID: "copilot" + GH_AW_ENGINE_MODEL: ${{ needs.agent.outputs.model }} + GH_AW_WORKFLOW_ID: "add-tactics-template-on-comment" + GH_AW_WORKFLOW_NAME: "Add Tactics Template On Comment" + outputs: + code_push_failure_count: ${{ steps.process_safe_outputs.outputs.code_push_failure_count }} + code_push_failure_errors: ${{ steps.process_safe_outputs.outputs.code_push_failure_errors }} + create_discussion_error_count: ${{ steps.process_safe_outputs.outputs.create_discussion_error_count }} + create_discussion_errors: ${{ steps.process_safe_outputs.outputs.create_discussion_errors }} + process_safe_outputs_processed_count: ${{ steps.process_safe_outputs.outputs.processed_count }} + process_safe_outputs_temporary_id_map: ${{ steps.process_safe_outputs.outputs.temporary_id_map }} + steps: + - name: Setup Scripts + id: setup + uses: github/gh-aw-actions/setup@ba90f2186d7ad780ec640f364005fa24e797b360 # v0.68.3 + with: + destination: ${{ runner.temp }}/gh-aw/actions + job-name: ${{ github.job }} + trace-id: ${{ needs.activation.outputs.setup-trace-id }} + - name: Download agent output artifact + id: download-agent-output + continue-on-error: true + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 + with: + name: agent + path: /tmp/gh-aw/ + - name: Setup agent output environment variable + id: setup-agent-output-env + if: steps.download-agent-output.outcome == 'success' + run: | + mkdir -p /tmp/gh-aw/ + find "/tmp/gh-aw/" -type f -print + echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/agent_output.json" >> "$GITHUB_OUTPUT" + - name: Configure GH_HOST for enterprise compatibility + id: ghes-host-config + shell: bash + run: | + # Derive GH_HOST from GITHUB_SERVER_URL so the gh CLI targets the correct + # GitHub instance (GHES/GHEC). On github.com this is a harmless no-op. + GH_HOST="${GITHUB_SERVER_URL#https://}" + GH_HOST="${GH_HOST#http://}" + echo "GH_HOST=${GH_HOST}" >> "$GITHUB_ENV" + - name: Process Safe Outputs + id: process_safe_outputs + uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9 + env: + GH_AW_AGENT_OUTPUT: ${{ steps.setup-agent-output-env.outputs.GH_AW_AGENT_OUTPUT }} + GH_AW_ALLOWED_DOMAINS: "api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,telemetry.enterprise.githubcopilot.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com,www.googleapis.com" + GITHUB_SERVER_URL: ${{ github.server_url }} + GITHUB_API_URL: ${{ github.api_url }} + GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"create_report_incomplete_issue\":{},\"missing_data\":{},\"missing_tool\":{},\"noop\":{\"max\":1,\"report-as-issue\":\"false\"},\"report_incomplete\":{},\"update_pull_request\":{\"allow_body\":true,\"allow_title\":false,\"footer\":false,\"max\":1,\"target\":\"${{ github.event.issue.number }}\"}}" + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/safe_output_handler_manager.cjs'); + await main(); + - name: Upload Safe Outputs Items + if: always() + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 + with: + name: safe-outputs-items + path: | + /tmp/gh-aw/safe-output-items.jsonl + /tmp/gh-aw/temporary-id-map.json + if-no-files-found: ignore + diff --git a/.github/workflows/add-tactics-template-on-comment.md b/.github/workflows/add-tactics-template-on-comment.md new file mode 100644 index 000000000000..185773848957 --- /dev/null +++ b/.github/workflows/add-tactics-template-on-comment.md @@ -0,0 +1,159 @@ +--- +permissions: + contents: read + issues: read + pull-requests: read + +network: defaults + +safe-outputs: + update-pull-request: + max: 1 + body: true + title: false + footer: false + target: "${{ github.event.issue.number }}" + noop: + report-as-issue: false +if: "github.event.repository.fork == false" +on: + slash_command: + name: tactics + events: [pull_request_comment] + reaction: "+1" + status-comment: false + +# ############################################################### +# Override the COPILOT_GITHUB_TOKEN secret usage for the workflow +# with a randomly-selected token from a pool of secrets. +# +# As soon as organization-level billing is offered for Agentic +# Workflows, this stop-gap approach will be removed. +# +# See: /.github/actions/select-copilot-pat/README.md +# ############################################################### + + # Add the pre-activation step of selecting a random PAT from the supplied secrets + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + name: Checkout the select-copilot-pat action folder + with: + persist-credentials: false + sparse-checkout: .github/actions/select-copilot-pat + sparse-checkout-cone-mode: true + fetch-depth: 1 + + - id: select-copilot-pat + name: Select Copilot token from pool + uses: ./.github/actions/select-copilot-pat + env: + SECRET_0: ${{ secrets.COPILOT_PAT_0 }} + SECRET_1: ${{ secrets.COPILOT_PAT_1 }} + SECRET_2: ${{ secrets.COPILOT_PAT_2 }} + SECRET_3: ${{ secrets.COPILOT_PAT_3 }} + SECRET_4: ${{ secrets.COPILOT_PAT_4 }} + SECRET_5: ${{ secrets.COPILOT_PAT_5 }} + SECRET_6: ${{ secrets.COPILOT_PAT_6 }} + SECRET_7: ${{ secrets.COPILOT_PAT_7 }} + SECRET_8: ${{ secrets.COPILOT_PAT_8 }} + SECRET_9: ${{ secrets.COPILOT_PAT_9 }} + +# Add the pre-activation output of the randomly selected PAT +jobs: + pre-activation: + outputs: + copilot_pat_number: ${{ steps.select-copilot-pat.outputs.copilot_pat_number }} + +# Override the COPILOT_GITHUB_TOKEN expression used in the activation job +# Consume the PAT number from the pre-activation step and select the corresponding secret +engine: + id: copilot + env: + # We cannot use line breaks in this expression as it leads to a syntax error in the compiled workflow + # If none of the `COPILOT_PAT_#` secrets were selected, then the default COPILOT_GITHUB_TOKEN is used + COPILOT_GITHUB_TOKEN: ${{ case(needs.pre_activation.outputs.copilot_pat_number == '0', secrets.COPILOT_PAT_0, needs.pre_activation.outputs.copilot_pat_number == '1', secrets.COPILOT_PAT_1, needs.pre_activation.outputs.copilot_pat_number == '2', secrets.COPILOT_PAT_2, needs.pre_activation.outputs.copilot_pat_number == '3', secrets.COPILOT_PAT_3, needs.pre_activation.outputs.copilot_pat_number == '4', secrets.COPILOT_PAT_4, needs.pre_activation.outputs.copilot_pat_number == '5', secrets.COPILOT_PAT_5, needs.pre_activation.outputs.copilot_pat_number == '6', secrets.COPILOT_PAT_6, needs.pre_activation.outputs.copilot_pat_number == '7', secrets.COPILOT_PAT_7, needs.pre_activation.outputs.copilot_pat_number == '8', secrets.COPILOT_PAT_8, needs.pre_activation.outputs.copilot_pat_number == '9', secrets.COPILOT_PAT_9, secrets.COPILOT_GITHUB_TOKEN) }} +--- + +## Add Tactics Template on Comment + +You are an expert .NET SDK engineer who helps fill in "tactics" for servicing pull requests in the dotnet/sdk repository. A servicing PR targets a stable release branch (e.g. `release/9.0.1xx`) and undergoes extra scrutiny before merging. Your job is to produce accurate, specific, and informative tactics based on the PR context. + +### Trigger Context + +- **Issue/PR number**: `${{ github.event.issue.number }}` +- **Repository**: `${{ github.repository }}` +- **Triggering actor**: `${{ github.actor }}` +- **Run URL**: `https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}` +- **Sanitized trigger context**: "${{ steps.sanitized.outputs.text }}" + +### Your Task + +Follow these steps precisely: + +#### Step 1: Gather PR context + +The `/tactics` command may optionally include an issue number (e.g. `/tactics #12345` or `/tactics 12345`). Parse the sanitized trigger context above to extract any issue number provided after the command. + +For PR #`${{ github.event.issue.number }}`, gather the following information: + +1. **PR details**: title, description/body, author, target (base) branch +2. **Files changed**: list filenames with addition/deletion counts (up to 20 files) +3. **PR comments**: all non-bot issue-level comments (exclude the triggering `/tactics` comment) +4. **Review comments**: all non-bot code-level review comments, including which file they reference +5. **Linked issue**: If the `/tactics` command included an issue number, use that. Otherwise, parse the PR body for keywords like `fixes #N`, `closes #N`, or `resolves #N` to find a linked issue. If found, fetch the issue title and body. + +#### Step 2: Generate tactics + +Based on all the gathered context, produce a tactics analysis following this exact template. Be specific and detailed—avoid vague statements. Do not speculate or invent details not present in the context. If information for a section is genuinely unavailable, say so clearly. + +**Guidelines per section:** + +- **Summary**: 2-4 sentences. State the root cause of the bug or regression being fixed, describe the exact code change made to address it, and explain why this fix is appropriate for a servicing release. +- **Customer Impact**: Describe concretely how customers are affected: the symptom (e.g. build error, runtime crash, incorrect output), the exact SDK version(s) impacted, the frequency/severity (all users vs. specific scenario), and any known workarounds. +- **Regression?**: Was this introduced by a specific PR or SDK version? Answer "Yes, introduced in #N (description)" or "Yes, introduced in vX.Y.Z" if known. If not a regression or unclear, say "No" or "Unknown — not enough information to determine origin". +- **Testing**: List all forms of validation: unit tests added or modified, integration tests, manual repro steps from PR comments, private/lab testing, and CI results. If only CI ran, say so explicitly. +- **Risk**: Rate as Low, Medium, or High. Justify the rating by referencing the scope of the change (e.g. lines changed, components touched), test coverage, and any known edge cases or risks. + +The output format must be exactly: + +``` +### Summary + +[your text] + +### Customer Impact + +[your text] + +### Regression? + +[your text] + +### Testing + +[your text] + +### Risk + +[your text] +``` + +#### Step 3: Apply tactics and report status + +1. Use the `update_pull_request` tool to update the **PR description** by adding (or replacing) a tactics section delimited by `` and `` markers. The section should contain: + ``` + + ## Tactics + + [generated tactics content] + + ``` + If the markers already exist in the body, replace the content between them. Otherwise, append the block at the end. + +2. If a linked issue was found and used as input, call the `noop` tool with a markdown summary: "✅ Tactics have been added to the PR description using context from issue #[issue number]." Include the PR number and a brief snippet of the generated tactics summary. + +3. If no linked issue was found, call the `noop` tool with a markdown summary: "✅ Tactics have been added to the PR description." Include the PR number and a brief snippet of the generated tactics summary. + +### Error Handling + +If any step fails unexpectedly, call the `noop` tool with a markdown summary including "❌ Failed to generate or apply tactics" and the error details. The framework will post the failure status automatically. diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index fffd778b9d48..5efe762cf00f 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -1,3 +1,9 @@ +# This workflow creates a "backport" pull request by cherry-picking changes +# from the source pull request to the target branch when /backport to +# is commented on a pull request. +# Action: https://github.com/dotnet/sdk/actions/workflows/backport.yml +# Keep documentation/project-docs/developer-guide.md up to date with changes in +# this workflow. name: Backport PR to branch on: issue_comment: @@ -17,4 +23,4 @@ jobs: pr_description_template: | Backport of #%source_pr_number% to %target_branch% - /cc %cc_users% \ No newline at end of file + /cc %cc_users% diff --git a/.github/workflows/fix-completions-on-comment.yml b/.github/workflows/fix-completions-on-comment.yml index 29f4bff3f363..e1a8c5051085 100644 --- a/.github/workflows/fix-completions-on-comment.yml +++ b/.github/workflows/fix-completions-on-comment.yml @@ -1,3 +1,8 @@ +# This workflow updates snapshots for CLI completion tests when +# /fixcompletions or /completions is commented on a pull request. +# Action: https://github.com/dotnet/sdk/actions/workflows/fix-completions-on-comment.yml +# Keep documentation/project-docs/developer-guide.md up to date with changes in +# this workflow. name: Fix completion snapshots on command on: diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index 62de55321f6f..35ae30c4a8ab 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -3,6 +3,11 @@ on: schedule: - cron: '19 4,16 * * *' # Twice daily at 19 minutes after the hour (random/uncommon time) +env: + KBE_LABEL: 'Known Build Error' + STALE_LABEL: 'stale' + KEEP_OPEN_LABEL: 'keep-open' + permissions: actions: write # For managing the operation state cache issues: write @@ -17,10 +22,72 @@ jobs: - uses: actions/stale@v9 # https://github.com/actions/stale/blob/v9/README.md with: ascending: true # Process the oldest issues first - stale-issue-message: "Due to lack of recent activity, this issue has been labeled as 'Stale'. It will be closed if no further activity occurs within 30 more days. Any new comment will remove the label." + stale-issue-message: "Due to lack of recent activity, this issue has been labeled as 'Stale'. It will be closed if no further activity occurs within 14 more days. Any new comment will remove the label." stale-pr-message: "Due to lack of recent activity, this PR has been labeled as 'Stale'. It will be closed if no further activity occurs within 7 more days. Any new comment will remove the label." - days-before-issue-stale: 1827 # ~5 years - days-before-issue-close: 30 - days-before-pr-stale: 180 # 6 months + days-before-issue-stale: 1644 # ~4.5 years + days-before-issue-close: 14 + days-before-pr-stale: 120 # ~4 months days-before-pr-close: 7 + exempt-issue-labels: ${{ env.KBE_LABEL }} operations-per-run: 4000 + + # Known Build Error issues with 0 hits for a month get labeled Stale, + # then actions/stale closes them after 7 days. + stale-known-build-errors: + if: github.repository_owner == 'dotnet' # Do not run on forks + + runs-on: ubuntu-latest + steps: + # Step 1: Label/unlabel based on hit counts + - uses: actions/github-script@v7 + with: + script: | + const issues = await github.paginate( + github.rest.issues.listForRepo, + { owner: context.repo.owner, repo: context.repo.repo, + labels: process.env.KBE_LABEL, state: 'open', per_page: 100 }); + + for (const issue of issues) { + if (issue.pull_request) continue; + if (issue.labels.some(l => l.name === process.env.KEEP_OPEN_LABEL)) continue; + + // Parse the hit count summary table from the issue body. + // Format: |24-Hour Hit Count|7-Day Hit Count|1-Month Count| + // |---|---|---| + // |N|N|N| + const match = issue.body?.match( + /\|24-Hour Hit Count\|.*\n\|[-| ]+\n\|(\d+)\|(\d+)\|(\d+)\|/); + if (!match) continue; + const monthCount = parseInt(match[3], 10); + + const hasStale = issue.labels.some(l => l.name === process.env.STALE_LABEL); + + if (monthCount === 0 && !hasStale) { + // Zero hits for a month: mark as stale + await github.rest.issues.addLabels({ + ...context.repo, issue_number: issue.number, + labels: [process.env.STALE_LABEL] }); + await github.rest.issues.createComment({ + ...context.repo, issue_number: issue.number, + body: 'This Known Build Error has had **0 hits in the past month** and has been labeled `stale`. ' + + 'It will be auto-closed in 7 days if the hit count remains at 0. ' + + 'Add the `' + process.env.KEEP_OPEN_LABEL + '` label to prevent auto-closure.' }); + } else if (monthCount > 0 && hasStale) { + // Hits resumed: remove the stale label + await github.rest.issues.removeLabel({ + ...context.repo, issue_number: issue.number, + name: process.env.STALE_LABEL }).catch(() => {}); + } + } + + # Step 2: Close KBE issues that have been Stale for 7+ days + - uses: actions/stale@v9 + with: + only-labels: ${{ env.KBE_LABEL }} + stale-issue-label: ${{ env.STALE_LABEL }} + close-issue-message: 'Closed automatically — this Known Build Error has had 0 hits for over a month.' + days-before-issue-stale: -1 # Don't auto-stale; step 1 handles that + days-before-issue-close: 7 + days-before-pr-stale: -1 + days-before-pr-close: -1 + operations-per-run: 100 diff --git a/.github/workflows/update-xlf-on-comment.yml b/.github/workflows/update-xlf-on-comment.yml index 5f67a325130a..98196608b97d 100644 --- a/.github/workflows/update-xlf-on-comment.yml +++ b/.github/workflows/update-xlf-on-comment.yml @@ -1,3 +1,8 @@ +# This workflow updates XLF files when /updatexlf or /xlf is commented on a +# pull request. +# Action: https://github.com/dotnet/sdk/actions/workflows/update-xlf-on-comment.yml +# Keep documentation/project-docs/developer-guide.md up to date with changes in +# this workflow. name: Update XLF files on command on: diff --git a/.vsts-pr.yml b/.vsts-pr.yml index ca91eabf32b6..3b7249dcc048 100644 --- a/.vsts-pr.yml +++ b/.vsts-pr.yml @@ -17,6 +17,7 @@ pr: - eng - build - src + - template_feed - test - '*.yml' - '*.props' diff --git a/cli.slnf b/cli.slnf index 82fdbf143408..55b6e2eec7c2 100644 --- a/cli.slnf +++ b/cli.slnf @@ -3,6 +3,8 @@ "path": "sdk.slnx", "projects": [ "src\\Dotnet.Watch\\dotnet-watch\\dotnet-watch.csproj", + "src\\Cli\\dn\\dn.csproj", + "src\\Cli\\dn\\dn-native-debug.vcxproj", "src\\Cli\\dotnet\\dotnet.csproj", "src\\Cli\\Microsoft.DotNet.Cli.Utils\\Microsoft.DotNet.Cli.Utils.csproj", "test\\dotnet-new.IntegrationTests\\dotnet-new.IntegrationTests.csproj", diff --git a/documentation/general/dotnet-run-file.md b/documentation/general/dotnet-run-file.md index bdaf0176fdaa..49a4946936d5 100644 --- a/documentation/general/dotnet-run-file.md +++ b/documentation/general/dotnet-run-file.md @@ -52,6 +52,8 @@ Additionally, the implicit project file has the following customizations: string? directoryPath = AppContext.GetData("EntryPointFileDirectoryPath") as string; ``` + - `EntryPointFilePath` property is set to the entry-point file path and is made visible to analyzers via `CompilerVisibleProperty`. + - `FileBasedProgram` property is set to `true` and can be used by SDK targets to detect file-based apps. - `DisableDefaultItemsInProjectFolder` property is set to `true` which results in `EnableDefaultItems=false` by default @@ -110,8 +112,8 @@ If a dash (`-`) is given instead of the target path (i.e., `dotnet run -`), the In this case, the current working directory is not used to search for other files (launch profiles, other sources in case of multi-file apps); the compilation consists solely of the single file read from the standard input. However, the current working directory is still used as the working directory for building and executing the program. -To reference projects relative to the current working directory (instead of relative to the temporary directory the file is isolated in), -you can use something like `#:project $(MSBuildStartupDirectory)/relative/path`. +To reference projects or files relative to the current working directory (instead of relative to the temporary directory the file is isolated in), +you can use something like `#:project $(MSBuildStartupDirectory)/relative/path` or `#:ref $(MSBuildStartupDirectory)/relative/lib.cs`. `dotnet path.cs` is a shortcut for `dotnet run --file path.cs` provided that `path.cs` is a valid [target path](#target-path) (`dotnet -` is currently not supported) and it is not a DLL path, built-in command, or a NuGet tool (e.g., `dotnet watch` invokes the `dotnet-watch` tool @@ -165,7 +167,7 @@ removes current user's `dotnet run` build outputs that haven't been used in 30 d They are not cleaned immediately because they can be re-used on subsequent runs for better performance. The automatic cleanup can be disabled by environment variable `DOTNET_CLI_DISABLE_FILE_BASED_APP_ARTIFACTS_AUTOMATIC_CLEANUP=true`, but other parameters of the automatic cleanup are currently not configurable. -The same cleanup can be performed manually via command `dotnet clean-file-based-app-artifacts`. +The same cleanup can be performed manually via command `dotnet clean file-based-apps`. ## Directives for project metadata @@ -178,11 +180,12 @@ which are [ignored][ignored-directives] by the C# language but recognized by the #:property LangVersion=preview #:package System.CommandLine@2.0.0-* #:project ../MyLibrary +#:ref ../lib/lib.cs #:include ./**/*.cs ``` Each directive has a kind (e.g., `package`), a name (e.g., `System.CommandLine`), a separator (e.g., `@`), and a value (e.g., the package version). -The value is required for `#:property`, optional for `#:package`/`#:sdk`, and disallowed for `#:project`/`#:include`. +The value is required for `#:property`, optional for `#:package`/`#:sdk`, and disallowed for `#:project`/`#:ref`/`#:include`. The name must be separated from the kind of the directive by whitespace and any leading and trailing white space is not considered part of the name and value. @@ -209,28 +212,43 @@ The directives are processed as follows: (because `ProjectReference` items don't support directory paths). An error is reported if zero or more than one projects are found in the directory, just like `dotnet reference add` would do. +- Each `#:ref` references another `.cs` file as a separate project reference. + A virtual project is created for the referenced file (e.g., `lib.cs` produces a virtual `lib.cs.csproj`), + and a `` is injected in an ``. + It is an error if the name is empty or if the referenced file does not exist. + Unlike `#:project`, `#:ref` points to a `.cs` file (not a `.csproj` file or directory). + + The referenced file is itself a file-based program with its own virtual project (defaulting to `OutputType=Exe`). + Library files without an entry point should use `#:property OutputType=Library` to avoid compilation errors. + Because the referenced file is compiled as a separate assembly, internal members of the referenced file are not accessible from the referencing file. + The `#:ref` directive is transitive: a referenced file can itself contain `#:ref` directives (or any other directives). + + Relative paths are resolved relative to the file containing the directive. + MSBuild variables (like `$(MSBuildProjectDirectory)`) can be used in the path. + + During [conversion](#grow-up), each `#:ref` directive creates a separate library project in a sibling directory + and a corresponding `` entry is added to the converted project. + The conversion is recursive: any `#:ref` directives in the referenced files are also converted in the same way. + + This directive is currently gated under a feature flag that can be enabled by setting the MSBuild property `ExperimentalFileBasedProgramEnableRefDirective=true`. + - Each `#:include` is injected as `<{1} Include="{0}" />` in an `` where `{0}` is the directive's value and `{1}` is determined by its extension. The mapping can be customized by setting the MSBuild property `FileBasedProgramsItemMapping` which is by default set to `.cs=Compile;.resx=EmbeddedResource;.json=None;.razor=Content`. - (The mapping customization is currently gated under a feature flag that can be enabled by setting the MSBuild property `ExperimentalFileBasedProgramEnableItemMapping=true`.) It is an error if the value is empty. Relative paths are resolved relative to the file containing the directive. - This directive is currently gated under a feature flag that can be enabled by setting the MSBuild property `ExperimentalFileBasedProgramEnableIncludeDirective=true`. - - Each `#:exclude` is injected similarly to `#:include` but with `Remove="{0}"` instead of `Include="{0}"`. - This directive is currently gated under a feature flag that can be enabled by setting the MSBuild property `ExperimentalFileBasedProgramEnableExcludeDirective=true`. - - Other directive kinds result in an error, reserving them for future use. Directive values support MSBuild variables (like `$(..)`) normally as they are translated literally and left to MSBuild engine to process. -However, in `#:project` directives, variables might not be preserved during [grow up](#grow-up), +However, in `#:project` and `#:ref` directives, variables might not be preserved during [grow up](#grow-up), because there is additional processing of those directives that makes it technically challenging to preserve variables in all cases -(project directive values need to be resolved to be relative to the target directory +(project and ref directive values need to be resolved to be relative to the target directory and also to point to a project file rather than a directory). Note that it is not expected that variables inside the path change their meaning during the conversion, so for example `#:project ../$(LibName)` is translated to `` (i.e., the variable is preserved). @@ -243,7 +261,6 @@ and can do that efficiently by stopping the search when it sees the first "C# to For a given `dotnet run file.cs`, we include directives from the current entry point file (`file.cs`) and all other non-entry-point C# files, specifically from all `Compile` items included in the project, no matter whether the `Compile` items are specified in some MSBuild code or inferred from `#:include`. -(Processing directives from other files is currently gated under a feature flag that can be enabled by setting the MSBuild property `ExperimentalFileBasedProgramEnableTransitiveDirectives=true`.) The order in which other files are processed is currently unspecified (can change across SDK versions) but deterministic (stable in a given SDK version). We do not limit these directives to appear only in entry point files because it allows: - a non-entry-point file like `Util.cs` to be self-contained and have all the `#:package`s it needs specified in it, @@ -270,6 +287,11 @@ Along with `#:`, the language also ignores `#!` which could be then used for [sh Console.WriteLine("Hello"); ``` +When a file-based program uses [`#:include`](#multiple-files) directives to include additional files, +the entry point file should start with `#!` to clearly distinguish it from included files. +This helps IDEs to properly handle multi-file scenarios and discover entry points. +The analyzer **CA2266** reports a warning if the entry point file is missing the shebang line in this scenario. + ## Implementation The build is performed using MSBuild APIs on in-memory project files. diff --git a/documentation/project-docs/developer-guide.md b/documentation/project-docs/developer-guide.md index e1bd2e70a080..6ab85c6deca7 100644 --- a/documentation/project-docs/developer-guide.md +++ b/documentation/project-docs/developer-guide.md @@ -180,9 +180,12 @@ taskkill /F /IM msbuild.exe /T The SDK repository includes GitHub Actions workflows that automate common maintenance tasks directly from pull requests. -### `/xlf or /updatexlf` - Update Translation Files +### `/xlf` or `/updatexlf` - Update Translation Files -When you modify `.resx` resource files, the corresponding `.xlf` translation files need to be updated. Instead of manually running the build locally, comment `/updatexlf` on the PR and the GitHub Action will: +- Workflow: [Update XLF files on command](https://github.com/dotnet/sdk/actions/workflows/update-xlf-on-comment.yml) +- Source: [update-xlf-on-comment.yml](../../.github/workflows/update-xlf-on-comment.yml) + +When you modify `.resx` resource files, the corresponding `.xlf` translation files need to be updated. Instead of manually running the build locally, comment `/xlf` or `/updatexlf` on the PR and the GitHub Action will: 1. Check out the PR branch 2. Run `./build.sh /t:UpdateXlf` (or full build if needed) @@ -192,9 +195,12 @@ This is useful when you've changed localized strings and the CI build is failing See also: [Localization documentation](Localization.md) -### `/completions or /fixcompletions` - Update CLI Completion Snapshots +### `/completions` or `/fixcompletions` - Update CLI Completion Snapshots + +- Workflow: [Fix completion snapshots on command](https://github.com/dotnet/sdk/actions/workflows/fix-completions-on-comment.yml) +- Source: [fix-completions-on-comment.yml](../../.github/workflows/fix-completions-on-comment.yml) -The CLI includes snapshot-based tests for shell completions (bash, zsh, pwsh, etc.). When you add or modify CLI commands, these snapshots need to be updated. Comment `/fixcompletions` on the PR and the GitHub Action will: +The CLI includes snapshot-based tests for shell completions (bash, zsh, pwsh, etc.). When you add or modify CLI commands, these snapshots need to be updated. Comment `/completions` or `/fixcompletions` on the PR and the GitHub Action will: 1. Build the repository 2. Run the completion tests @@ -205,12 +211,34 @@ This is useful when you've added new commands or options and the completion snap See also: [Snapshot-based testing documentation](snapshot-based-testing.md) +### `/backport to ` + +- Workflow: [Backport PR to branch](https://github.com/dotnet/sdk/actions/workflows/backport.yml) +- Source: [backport.yml](../../.github/workflows/backport.yml) + +Comment `/backport to ` on a merged PR to create a new pull request that cherry-picks the changes onto the target branch. The backport PR will be labeled with `backport` and will CC the original PR participants. + +### `/ba-g ` - Bypass Build Analysis + +Comment `/ba-g ` on a PR to unconditionally turn the Build Analysis check green. The reason +is captured by telemetry and should be descriptive - avoid non-specific justifications like +"unrelated issues". This is useful when CI failures are caused by known flaky tests or +infrastructure issues unrelated to the PR. -### `/backport to {branch}` +Example reasons: -The SDK team manages many branches. -You may use /backport to {branch_name} to mimic the process of cherry-picking a PR onto another branch. +- `/ba-g deadletter` - Helix work item crashed with "DeadLetter" status. +- `/ba-g docs-only change` - PR did not change any source code. +- `/ba-g insufficient info in logs` - No good unique pattern in the logs to open a known issue. +- `/ba-g recently fixed known issue #` - The known issue fix was already merged, but CI ran + before it. +- `/ba-g failures are from known issues #, #` - All failures have known issues + filed, but Build Analysis isn't turning green. +For details on triaging CI failures and filing known issues, see the [runtime failure analysis +documentation](https://github.com/dotnet/runtime/blob/main/docs/workflow/ci/failure-analysis.md). +The [Build Analysis Known Issue Helper](https://helix.dot.net/BuildAnalysis/CreateKnownIssues) can +assist in creating known issue reports with the correct labels and JSON format. ## Adding a Command diff --git a/dotnet-tools.json b/dotnet-tools.json new file mode 100644 index 000000000000..b0e38abdace3 --- /dev/null +++ b/dotnet-tools.json @@ -0,0 +1,5 @@ +{ + "version": 1, + "isRoot": true, + "tools": {} +} \ No newline at end of file diff --git a/eng/Versions.props b/eng/Versions.props index 3e37a6104252..bf010a0af66e 100644 --- a/eng/Versions.props +++ b/eng/Versions.props @@ -34,9 +34,9 @@ 36 20 - 25 - 14 - 5 + 26 + 15 + 7 <_NET70ILLinkPackVersion>7.0.100-1.23211.1 @@ -61,7 +61,6 @@ 1.0.52 1.4.0 1.12.0 - 6.13.2 diff --git a/eng/dotnet-format/dotnet-format-integration.yml b/eng/dotnet-format/dotnet-format-integration.yml index 26ecd59edc46..8df9e7fe7b05 100644 --- a/eng/dotnet-format/dotnet-format-integration.yml +++ b/eng/dotnet-format/dotnet-format-integration.yml @@ -8,19 +8,21 @@ parameters: - name: TestArguments type: object default: + # Commit SHAs align with the .NET 10.0 GA release (v10.0.100). + # See https://github.com/dotnet/dotnet/blob/v10.0.100/src/source-manifest.json - Name: Roslyn _repo: "https://github.com/dotnet/roslyn" _repoName: "dotnet/roslyn" _targetSolution: "Compilers.slnf" _branchName: "main" - _sha: "a3bb37003aeccad012a6e7dd220977599e8b8e65" + _sha: "739dc0e352a331e8a41cd66c09d2edf359255365" _useParentSdk: 0 - Name: sdk _repo: "https://github.com/dotnet/sdk" _repoName: "dotnet/sdk" _targetSolution: "sdk.slnx" _branchName: "main" - _sha: "be25db95c376bffd508a023399ddd34392fe6458" + _sha: "e6bc966cc3d1348265b0831c6daca23267169d8f" _useParentSdk: 0 - Name: project_system _repo: "https://github.com/dotnet/project-system" @@ -34,21 +36,21 @@ parameters: _repoName: "dotnet/msbuild" _targetSolution: "MSBuild.sln" _branchName: "main" - _sha: "f4fa6bde775a3f7cbb2bb90a349ee5fc759114f3" + _sha: "995a3dce41788caebf2b8ca6602a7431f08bfd06" _useParentSdk: 0 - Name: aspnetcore _repo: "https://github.com/dotnet/aspnetcore" _repoName: "dotnet/aspnetcore" - _targetSolution: "AspNetCore.sln" + _targetSolution: "AspNetCore.slnx" _branchName: "main" - _sha: "d765d7ba4871a8c2cb38d4134553d3be9a7370d7" + _sha: "7387de91234d3ef751fa50b3d1bfede4130213ff" _useParentSdk: 0 - Name: efcore _repo: "https://github.com/dotnet/efcore" _repoName: "dotnet/efcore" - _targetSolution: "All.sln" + _targetSolution: "EFCore.sln" _branchName: "main" - _sha: "1b2ff365399ab6736a9ea4c98ab1b60acda5d917" + _sha: "12b8d44bf691d2e6933a6d1003647cce4f13c3d3" _useParentSdk: 0 - name: timeoutInMinutes diff --git a/eng/dotnet-format/format-verifier.ps1 b/eng/dotnet-format/format-verifier.ps1 index eba185f3188c..c165a5dd7f22 100644 --- a/eng/dotnet-format/format-verifier.ps1 +++ b/eng/dotnet-format/format-verifier.ps1 @@ -56,8 +56,9 @@ try { if ($stage -eq "prepare" -or $stage -eq "format-workspace") { Write-Output "$(Get-Date) - Finding solutions." - $solutions = Get-ChildItem -Filter *.sln -Recurse -Depth 2 | Select-Object -ExpandProperty FullName | Where-Object { $_ -match '.sln$' } + $solutions = Get-ChildItem -Include *.sln,*.slnf,*.slnx -Recurse -Depth 2 | Select-Object -ExpandProperty FullName + $solutionFound = $false foreach ($solution in $solutions) { $solutionPath = Split-Path $solution $solutionFile = Split-Path $solution -leaf @@ -66,6 +67,7 @@ try { continue } + $solutionFound = $true Set-Location $solutionPath if ($stage -eq "prepare") { @@ -99,6 +101,12 @@ try { Write-Output "$(Get-Date) - $solutionFile - Complete" } + + if (-not $solutionFound) { + $solutionNames = $solutions | ForEach-Object { Split-Path $_ -Leaf } + Write-Output "$(Get-Date) - Target solution '$targetSolution' was not found. Available solutions ($($solutionNames.Count)): $($solutionNames -join ', ')" + exit -1 + } } if ($stage -eq "format-folder") { diff --git a/eng/pipelines/templates/jobs/sdk-build.yml b/eng/pipelines/templates/jobs/sdk-build.yml index 7d395c946674..3c9f292cc157 100644 --- a/eng/pipelines/templates/jobs/sdk-build.yml +++ b/eng/pipelines/templates/jobs/sdk-build.yml @@ -100,7 +100,6 @@ jobs: ${{ parameters.runtimeSourceProperties }} ${{ parameters.officialBuildProperties }} /p:DotNetSignType=$(_SignType) /p:TeamName=$(_TeamName) - /graph displayName: 🟣 Build env: BuildConfig: $(buildConfiguration) @@ -125,8 +124,7 @@ jobs: ${{ parameters.osProperties }} \ ${{ parameters.runtimeSourceProperties }} \ ${{ parameters.officialBuildProperties }} \ - /p:DotNetSignType=$(_SignType) /p:TeamName=$(_TeamName) \ - /graph + /p:DotNetSignType=$(_SignType) /p:TeamName=$(_TeamName) displayName: 🟣 Build env: BuildConfig: $(buildConfiguration) diff --git a/sdk.slnx b/sdk.slnx index c1a01bb05837..a99095280303 100644 --- a/sdk.slnx +++ b/sdk.slnx @@ -67,6 +67,9 @@ + + + diff --git a/src/Cli/Microsoft.DotNet.Cli.Definitions/Commands/Hidden/List/ListCommandDefinition.cs b/src/Cli/Microsoft.DotNet.Cli.Definitions/Commands/Hidden/List/ListCommandDefinition.cs index ec63176267cd..cefd8ab10014 100644 --- a/src/Cli/Microsoft.DotNet.Cli.Definitions/Commands/Hidden/List/ListCommandDefinition.cs +++ b/src/Cli/Microsoft.DotNet.Cli.Definitions/Commands/Hidden/List/ListCommandDefinition.cs @@ -20,7 +20,7 @@ public static Argument CreateSlnOrProjectArgument(string name, string de Arity = ArgumentArity.ZeroOrOne }.DefaultToCurrentDirectory(); - public readonly Argument SlnOrProjectArgument = CreateSlnOrProjectArgument(CommandDefinitionStrings.SolutionOrProjectArgumentName, CommandDefinitionStrings.SolutionOrProjectArgumentDescription); + public readonly Argument SlnOrProjectOrFileArgument = CreateSlnOrProjectArgument(CommandDefinitionStrings.SolutionOrProjectOrFileArgumentName, CommandDefinitionStrings.SolutionOrProjectOrFileArgumentDescription); public readonly ListPackageCommandDefinition PackageCommand = new(); public readonly ListReferenceCommandDefinition ReferenceCommand = new(); @@ -31,7 +31,7 @@ public ListCommandDefinition() Hidden = true; this.DocsLink = Link; - Arguments.Add(SlnOrProjectArgument); + Arguments.Add(SlnOrProjectOrFileArgument); Subcommands.Add(PackageCommand); Subcommands.Add(ReferenceCommand); } diff --git a/src/Cli/Microsoft.DotNet.Cli.Definitions/Commands/Hidden/List/ListPackageCommandDefinition.cs b/src/Cli/Microsoft.DotNet.Cli.Definitions/Commands/Hidden/List/ListPackageCommandDefinition.cs index f79679563733..6e9d02b473c8 100644 --- a/src/Cli/Microsoft.DotNet.Cli.Definitions/Commands/Hidden/List/ListPackageCommandDefinition.cs +++ b/src/Cli/Microsoft.DotNet.Cli.Definitions/Commands/Hidden/List/ListPackageCommandDefinition.cs @@ -12,6 +12,6 @@ internal sealed class ListPackageCommandDefinition() : PackageListCommandDefinit public ListCommandDefinition Parent => (ListCommandDefinition)Parents.Single(); - public override string? GetFileOrDirectory(ParseResult parseResult) - => parseResult.GetValue(Parent.SlnOrProjectArgument); + public override Argument? GetProjectOrFileArgument() + => Parent.SlnOrProjectOrFileArgument; } diff --git a/src/Cli/Microsoft.DotNet.Cli.Definitions/Commands/Hidden/List/ListReferenceCommandDefinition.cs b/src/Cli/Microsoft.DotNet.Cli.Definitions/Commands/Hidden/List/ListReferenceCommandDefinition.cs index 748187d7f4fe..d49e703a2d42 100644 --- a/src/Cli/Microsoft.DotNet.Cli.Definitions/Commands/Hidden/List/ListReferenceCommandDefinition.cs +++ b/src/Cli/Microsoft.DotNet.Cli.Definitions/Commands/Hidden/List/ListReferenceCommandDefinition.cs @@ -19,7 +19,7 @@ public ListReferenceCommandDefinition() : base(Name) public ListCommandDefinition Parent => (ListCommandDefinition)Parents.Single(); internal override string? GetFileOrDirectory(ParseResult parseResult) - => parseResult.GetValue(Parent.SlnOrProjectArgument); + => parseResult.GetValue(Parent.SlnOrProjectOrFileArgument); } internal abstract class ListReferenceCommandDefinitionBase : Command diff --git a/src/Cli/Microsoft.DotNet.Cli.Definitions/Commands/Package/PackageListCommandDefinition.cs b/src/Cli/Microsoft.DotNet.Cli.Definitions/Commands/Package/PackageListCommandDefinition.cs index 9e0eb70520c9..2db06fe0fb97 100644 --- a/src/Cli/Microsoft.DotNet.Cli.Definitions/Commands/Package/PackageListCommandDefinition.cs +++ b/src/Cli/Microsoft.DotNet.Cli.Definitions/Commands/Package/PackageListCommandDefinition.cs @@ -6,20 +6,12 @@ namespace Microsoft.DotNet.Cli.Commands.Package.List; -internal sealed class PackageListCommandDefinition : PackageListCommandDefinitionBase +internal sealed class PackageListCommandDefinition() : PackageListCommandDefinitionBase(Name) { public new const string Name = "list"; - public readonly Option ProjectOption = PackageCommandDefinition.CreateProjectOption(); - - public PackageListCommandDefinition() - : base(Name) - { - Options.Add(ProjectOption); - } - - public override string? GetFileOrDirectory(ParseResult parseResult) - => parseResult.GetValue(ProjectOption); + public override Argument? GetProjectOrFileArgument() + => null; } internal abstract class PackageListCommandDefinitionBase : Command @@ -110,6 +102,9 @@ internal abstract class PackageListCommandDefinitionBase : Command Description = CommandDefinitionStrings.CmdOutputVersionDescription }.ForwardAsSingle(o => $"--output-version:{o}"); + public readonly Option ProjectOption = PackageCommandDefinition.CreateProjectOption(); + public readonly Option FileOption = PackageCommandDefinition.CreateFileOption(); + public PackageListCommandDefinitionBase(string name) : base(name, CommandDefinitionStrings.PackageListAppFullName) { @@ -128,9 +123,11 @@ public PackageListCommandDefinitionBase(string name) Options.Add(FormatOption); Options.Add(OutputVersionOption); Options.Add(NoRestore); + Options.Add(ProjectOption); + Options.Add(FileOption); } - public abstract string? GetFileOrDirectory(ParseResult parseResult); + public abstract Argument? GetProjectOrFileArgument(); public void EnforceOptionRules(ParseResult parseResult) { diff --git a/src/Cli/Microsoft.DotNet.FileBasedPrograms/FileBasedProgramsResources.resx b/src/Cli/Microsoft.DotNet.FileBasedPrograms/FileBasedProgramsResources.resx index cbe3d3838aa3..57a847ae25f7 100644 --- a/src/Cli/Microsoft.DotNet.FileBasedPrograms/FileBasedProgramsResources.resx +++ b/src/Cli/Microsoft.DotNet.FileBasedPrograms/FileBasedProgramsResources.resx @@ -165,6 +165,14 @@ The '#:project' directive is invalid: {0} {0} is the inner error message. + + The '#:ref' directive is invalid: {0} + {Locked="#:ref"}{0} is the inner error message. + + + Could not find file '{0}'. + {0} is the file path. + Missing name of '{0}'. {0} is the directive name like 'package' or 'sdk'. diff --git a/src/Cli/Microsoft.DotNet.FileBasedPrograms/FileLevelDirectiveHelpers.cs b/src/Cli/Microsoft.DotNet.FileBasedPrograms/FileLevelDirectiveHelpers.cs index 89413dc860cc..2303d48b4c2e 100644 --- a/src/Cli/Microsoft.DotNet.FileBasedPrograms/FileLevelDirectiveHelpers.cs +++ b/src/Cli/Microsoft.DotNet.FileBasedPrograms/FileLevelDirectiveHelpers.cs @@ -323,6 +323,7 @@ public void ReportError(TextSpan span, string message) case "property": return Property.Parse(context); case "package": return Package.Parse(context); case "project": return Project.Parse(context); + case "ref": return Ref.Parse(context); case "include" or "exclude": return IncludeOrExclude.Parse(context); default: context.ReportError(string.Format(FileBasedProgramsResources.UnrecognizedDirective, context.DirectiveKind)); @@ -587,6 +588,100 @@ void ReportError(string message) public override string ToString() => $"#:project {Name}"; } + /// + /// #:ref directive. References another file-based app as a library. + /// + public sealed class Ref : Named + { + public const string ExperimentalFileBasedProgramEnableRefDirective = nameof(ExperimentalFileBasedProgramEnableRefDirective); + + [SetsRequiredMembers] + public Ref(in ParseInfo info, string name) : base(info) + { + Name = name; + OriginalName = name; + } + + /// + /// Preserved across calls, i.e., + /// this is the original directive text as entered by the user. + /// + public string OriginalName { get; init; } + + /// + /// This is the with MSBuild $(..) vars expanded. + /// + public string? ExpandedName { get; init; } + + /// + /// The resolved full path to the referenced .cs file. + /// + public string? ResolvedPath { get; init; } + + public static new Ref? Parse(in ParseContext context) + { + var directiveText = context.DirectiveText; + if (directiveText.IsWhiteSpace()) + { + context.ReportError(string.Format(FileBasedProgramsResources.MissingDirectiveName, context.DirectiveKind)); + return null; + } + + return new Ref(context.Info, directiveText); + } + + public enum NameKind + { + /// + /// Change and . + /// + Expanded = 1, + + /// + /// Change and . + /// + Resolved = 2, + + /// + /// Change only . + /// + Final = 3, + } + + public Ref WithName(string name, NameKind kind) + { + return new Ref(Info, name) + { + OriginalName = OriginalName, + ExpandedName = kind == NameKind.Expanded ? name : ExpandedName, + ResolvedPath = kind == NameKind.Resolved ? name : ResolvedPath, + }; + } + + /// + /// Resolves the path relative to the source file's directory. + /// + public Ref EnsureResolvedPath(ErrorReporter errorReporter) + { + var sourcePath = Info.SourceFile.Path; + var sourceDirectory = Path.GetDirectoryName(sourcePath) + ?? throw new InvalidOperationException($"Source file path '{sourcePath}' does not have a containing directory."); + + var resolvedFilePath = Path.GetFullPath(Path.Combine(sourceDirectory, Name.Replace('\\', '/'))); + + if (!File.Exists(resolvedFilePath)) + { + errorReporter(Info.SourceFile.Text, sourcePath, Info.Span, + string.Format(FileBasedProgramsResources.InvalidRefDirective, + string.Format(FileBasedProgramsResources.CouldNotFindRefFile, resolvedFilePath))); + } + + return WithName(resolvedFilePath, NameKind.Resolved); + } + + public override string ToString() => $"#:ref {Name}"; + } + public enum IncludeOrExcludeKind { Include, @@ -598,11 +693,6 @@ public enum IncludeOrExcludeKind /// public sealed class IncludeOrExclude(in ParseInfo info) : Named(info) { - public const string ExperimentalFileBasedProgramEnableIncludeDirective = nameof(ExperimentalFileBasedProgramEnableIncludeDirective); - public const string ExperimentalFileBasedProgramEnableExcludeDirective = nameof(ExperimentalFileBasedProgramEnableExcludeDirective); - public const string ExperimentalFileBasedProgramEnableTransitiveDirectives = nameof(ExperimentalFileBasedProgramEnableTransitiveDirectives); - public const string ExperimentalFileBasedProgramEnableItemMapping = nameof(ExperimentalFileBasedProgramEnableItemMapping); - public const string MappingPropertyName = "FileBasedProgramsItemMapping"; public static string DefaultMappingString => ".cs=Compile;.resx=EmbeddedResource;.json=None;.razor=Content"; diff --git a/src/Cli/Microsoft.DotNet.FileBasedPrograms/InternalAPI.Unshipped.txt b/src/Cli/Microsoft.DotNet.FileBasedPrograms/InternalAPI.Unshipped.txt index 8beab97ae92c..cb463f26bf99 100644 --- a/src/Cli/Microsoft.DotNet.FileBasedPrograms/InternalAPI.Unshipped.txt +++ b/src/Cli/Microsoft.DotNet.FileBasedPrograms/InternalAPI.Unshipped.txt @@ -1,8 +1,5 @@ -const Microsoft.DotNet.FileBasedPrograms.CSharpDirective.IncludeOrExclude.ExperimentalFileBasedProgramEnableExcludeDirective = "ExperimentalFileBasedProgramEnableExcludeDirective" -> string! -const Microsoft.DotNet.FileBasedPrograms.CSharpDirective.IncludeOrExclude.ExperimentalFileBasedProgramEnableIncludeDirective = "ExperimentalFileBasedProgramEnableIncludeDirective" -> string! -const Microsoft.DotNet.FileBasedPrograms.CSharpDirective.IncludeOrExclude.ExperimentalFileBasedProgramEnableItemMapping = "ExperimentalFileBasedProgramEnableItemMapping" -> string! -const Microsoft.DotNet.FileBasedPrograms.CSharpDirective.IncludeOrExclude.ExperimentalFileBasedProgramEnableTransitiveDirectives = "ExperimentalFileBasedProgramEnableTransitiveDirectives" -> string! const Microsoft.DotNet.FileBasedPrograms.CSharpDirective.IncludeOrExclude.MappingPropertyName = "FileBasedProgramsItemMapping" -> string! +const Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Ref.ExperimentalFileBasedProgramEnableRefDirective = "ExperimentalFileBasedProgramEnableRefDirective" -> string! Microsoft.DotNet.FileBasedPrograms.CSharpDirective Microsoft.DotNet.FileBasedPrograms.CSharpDirective.CSharpDirective(in Microsoft.DotNet.FileBasedPrograms.CSharpDirective.ParseInfo info) -> void Microsoft.DotNet.FileBasedPrograms.CSharpDirective.IncludeOrExclude @@ -71,6 +68,20 @@ Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Property Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Property.Property(in Microsoft.DotNet.FileBasedPrograms.CSharpDirective.ParseInfo info) -> void Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Property.Value.get -> string! Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Property.Value.init -> void +Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Ref +Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Ref.EnsureResolvedPath(Microsoft.DotNet.FileBasedPrograms.ErrorReporter! errorReporter) -> Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Ref! +Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Ref.ExpandedName.get -> string? +Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Ref.ExpandedName.init -> void +Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Ref.NameKind +Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Ref.NameKind.Expanded = 1 -> Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Ref.NameKind +Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Ref.NameKind.Final = 3 -> Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Ref.NameKind +Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Ref.NameKind.Resolved = 2 -> Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Ref.NameKind +Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Ref.OriginalName.get -> string! +Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Ref.OriginalName.init -> void +Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Ref.Ref(in Microsoft.DotNet.FileBasedPrograms.CSharpDirective.ParseInfo info, string! name) -> void +Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Ref.ResolvedPath.get -> string? +Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Ref.ResolvedPath.init -> void +Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Ref.WithName(string! name, Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Ref.NameKind kind) -> Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Ref! Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Sdk Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Sdk.Sdk(in Microsoft.DotNet.FileBasedPrograms.CSharpDirective.ParseInfo info) -> void Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Sdk.Version.get -> string? @@ -123,6 +134,7 @@ override Microsoft.DotNet.FileBasedPrograms.CSharpDirective.IncludeOrExclude.ToS override Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Package.ToString() -> string! override Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Project.ToString() -> string! override Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Property.ToString() -> string! +override Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Ref.ToString() -> string! override Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Sdk.ToString() -> string! override Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Shebang.ToString() -> string! override Microsoft.DotNet.FileBasedPrograms.SourceFile.GetHashCode() -> int @@ -134,6 +146,7 @@ static Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Package.Parse(in Micro static Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Parse(in Microsoft.DotNet.FileBasedPrograms.CSharpDirective.ParseContext context) -> Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Named? static Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Project.Parse(in Microsoft.DotNet.FileBasedPrograms.CSharpDirective.ParseContext context) -> Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Project? static Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Property.Parse(in Microsoft.DotNet.FileBasedPrograms.CSharpDirective.ParseContext context) -> Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Property? +static Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Ref.Parse(in Microsoft.DotNet.FileBasedPrograms.CSharpDirective.ParseContext context) -> Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Ref? static Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Sdk.Parse(in Microsoft.DotNet.FileBasedPrograms.CSharpDirective.ParseContext context) -> Microsoft.DotNet.FileBasedPrograms.CSharpDirective.Sdk? static Microsoft.DotNet.FileBasedPrograms.ErrorReporters.CreateCollectingReporter(out System.Collections.Immutable.ImmutableArray.Builder! builder) -> Microsoft.DotNet.FileBasedPrograms.ErrorReporter! static Microsoft.DotNet.FileBasedPrograms.ExternalHelpers.CombineHashCodes(int value1, int value2) -> int diff --git a/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.cs.xlf b/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.cs.xlf index fccd6ec81449..a5cc8144f6ea 100644 --- a/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.cs.xlf +++ b/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.cs.xlf @@ -17,6 +17,11 @@ Nenašel se projekt ani adresář {0}. + + Could not find file '{0}'. + Soubor {0} nebyl nalezen. + {0} is the file path. + error chyba @@ -57,6 +62,11 @@ Direktiva #:project je neplatná: {0} {0} is the inner error message. + + The '#:ref' directive is invalid: {0} + Direktiva #:ref je neplatná: {0}. + {Locked="#:ref"}{0} is the inner error message. + Missing name of '{0}'. Chybí název pro: {0}. diff --git a/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.de.xlf b/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.de.xlf index acfc69549e69..0a2844c607d6 100644 --- a/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.de.xlf +++ b/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.de.xlf @@ -17,6 +17,11 @@ Das Projekt oder Verzeichnis "{0}" wurde nicht gefunden. + + Could not find file '{0}'. + Die Datei "{0}" konnte nicht gefunden werden. + {0} is the file path. + error Fehler @@ -57,6 +62,11 @@ Die Anweisung „#:p roject“ ist ungültig: {0} {0} is the inner error message. + + The '#:ref' directive is invalid: {0} + Die „#:ref“-Direktive ist ungültig: {0} + {Locked="#:ref"}{0} is the inner error message. + Missing name of '{0}'. Fehlender Name der Anweisung „{0}“. diff --git a/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.es.xlf b/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.es.xlf index 7fcfcf9a443e..1e8f0b7167a2 100644 --- a/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.es.xlf +++ b/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.es.xlf @@ -17,6 +17,11 @@ No se encuentra el proyecto o directorio "{0}". + + Could not find file '{0}'. + No se pudo encontrar el archivo '{0}'. + {0} is the file path. + error error @@ -57,6 +62,11 @@ La directiva "#:project" no es válida: {0} {0} is the inner error message. + + The '#:ref' directive is invalid: {0} + La directiva "#:ref" no es válida: {0} + {Locked="#:ref"}{0} is the inner error message. + Missing name of '{0}'. Falta el nombre de "{0}". diff --git a/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.fr.xlf b/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.fr.xlf index 88c34c93ba8a..61d4881186ad 100644 --- a/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.fr.xlf +++ b/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.fr.xlf @@ -17,6 +17,11 @@ Projet ou répertoire '{0}' introuvable. + + Could not find file '{0}'. + Impossible de trouver le fichier '{0}'. + {0} is the file path. + error erreur @@ -57,6 +62,11 @@ La directive « #:project » n’est pas valide : {0} {0} is the inner error message. + + The '#:ref' directive is invalid: {0} + La directive « #:ref » est invalide : {0} + {Locked="#:ref"}{0} is the inner error message. + Missing name of '{0}'. Nom manquant pour « {0} ». diff --git a/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.it.xlf b/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.it.xlf index 655d5b367356..08aa5428a90b 100644 --- a/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.it.xlf +++ b/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.it.xlf @@ -17,6 +17,11 @@ Non sono stati trovati progetti o directory `{0}`. + + Could not find file '{0}'. + Il file '{0}' non è stato trovato. + {0} is the file path. + error errore @@ -57,6 +62,11 @@ La direttiva '#:project' non è valida: {0} {0} is the inner error message. + + The '#:ref' directive is invalid: {0} + La direttiva "#:ref" non è valida: {0} + {Locked="#:ref"}{0} is the inner error message. + Missing name of '{0}'. Manca il nome di '{0}'. diff --git a/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.ja.xlf b/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.ja.xlf index 2d2cc195c23a..259a6b3fb80c 100644 --- a/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.ja.xlf +++ b/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.ja.xlf @@ -17,6 +17,11 @@ プロジェクトまたはディレクトリ `{0}` が見つかりませんでした。 + + Could not find file '{0}'. + ファイル '{0}' が見つかりませんでした。 + {0} is the file path. + error エラー @@ -57,6 +62,11 @@ '#:p roject' ディレクティブが無効です: {0} {0} is the inner error message. + + The '#:ref' directive is invalid: {0} + '#:ref' ディレクティブが無効です: {0} + {Locked="#:ref"}{0} is the inner error message. + Missing name of '{0}'. '{0}' の名前がありません。 diff --git a/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.ko.xlf b/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.ko.xlf index 7082b47f9aa8..5226d09c601e 100644 --- a/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.ko.xlf +++ b/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.ko.xlf @@ -17,6 +17,11 @@ 프로젝트 또는 디렉터리 {0}을(를) 찾을 수 없습니다. + + Could not find file '{0}'. + '{0}' 파일을 찾을 수 없습니다. + {0} is the file path. + error 오류 @@ -57,6 +62,11 @@ '#:p roject' 지시문이 잘못되었습니다. {0} {0} is the inner error message. + + The '#:ref' directive is invalid: {0} + ‘#:ref’ 지시문이 잘못되었습니다: {0} + {Locked="#:ref"}{0} is the inner error message. + Missing name of '{0}'. '{0}' 이름이 없습니다. diff --git a/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.pl.xlf b/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.pl.xlf index 4cd99b2c63c3..206da655e6ed 100644 --- a/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.pl.xlf +++ b/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.pl.xlf @@ -17,6 +17,11 @@ Nie można odnaleźć projektu ani katalogu „{0}”. + + Could not find file '{0}'. + Nie można odnaleźć pliku '{0}'. + {0} is the file path. + error błąd @@ -57,6 +62,11 @@ Dyrektywa „#:project” jest nieprawidłowa: {0} {0} is the inner error message. + + The '#:ref' directive is invalid: {0} + Dyrektywa „#:ref” jest nieprawidłowa: {0} + {Locked="#:ref"}{0} is the inner error message. + Missing name of '{0}'. Brak nazwy „{0}”. diff --git a/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.pt-BR.xlf b/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.pt-BR.xlf index a5ef266abb7e..b389de338664 100644 --- a/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.pt-BR.xlf +++ b/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.pt-BR.xlf @@ -17,6 +17,11 @@ Não foi possível encontrar o projeto ou diretório ‘{0}’. + + Could not find file '{0}'. + Não foi possível encontrar arquivo "{0}". + {0} is the file path. + error erro @@ -57,6 +62,11 @@ A diretiva '#:project' é inválida:{0} {0} is the inner error message. + + The '#:ref' directive is invalid: {0} + A diretiva ''#:ref'' é inválida: {0} + {Locked="#:ref"}{0} is the inner error message. + Missing name of '{0}'. Nome de '{0}' ausente. diff --git a/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.ru.xlf b/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.ru.xlf index 3405fe15916b..bc0686e203f6 100644 --- a/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.ru.xlf +++ b/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.ru.xlf @@ -17,6 +17,11 @@ Не удалось найти проект или каталог "{0}". + + Could not find file '{0}'. + Не удалось найти файл "{0}". + {0} is the file path. + error ошибка @@ -57,6 +62,11 @@ Недопустимая директива "#:project": {0} {0} is the inner error message. + + The '#:ref' directive is invalid: {0} + Недопустимая директива "#:ref": {0} + {Locked="#:ref"}{0} is the inner error message. + Missing name of '{0}'. Отсутствует имя "{0}". diff --git a/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.tr.xlf b/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.tr.xlf index 40ee3f703b98..5db8e086b974 100644 --- a/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.tr.xlf +++ b/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.tr.xlf @@ -17,6 +17,11 @@ `{0}` projesi veya dizini bulunamadı. + + Could not find file '{0}'. + '{0}' dosyası bulunamadı. + {0} is the file path. + error hata @@ -57,6 +62,11 @@ ‘#:project’ yönergesi geçersizdir: {0} {0} is the inner error message. + + The '#:ref' directive is invalid: {0} + '#:ref' yönergesi geçersiz: {0} + {Locked="#:ref"}{0} is the inner error message. + Missing name of '{0}'. '{0}' adı eksik. diff --git a/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.zh-Hans.xlf b/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.zh-Hans.xlf index b5c92edfd759..9abb7769ed62 100644 --- a/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.zh-Hans.xlf +++ b/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.zh-Hans.xlf @@ -17,6 +17,11 @@ 找不到项目或目录“{0}”。 + + Could not find file '{0}'. + 找不到文件“{0}”。 + {0} is the file path. + error 错误 @@ -57,6 +62,11 @@ '#:project' 指令无效: {0} {0} is the inner error message. + + The '#:ref' directive is invalid: {0} + "#:ref" 指令无效: {0} + {Locked="#:ref"}{0} is the inner error message. + Missing name of '{0}'. 缺少 '{0}' 的名称。 diff --git a/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.zh-Hant.xlf b/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.zh-Hant.xlf index d65f38fe59f2..a6aa49ad64fd 100644 --- a/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.zh-Hant.xlf +++ b/src/Cli/Microsoft.DotNet.FileBasedPrograms/xlf/FileBasedProgramsResources.zh-Hant.xlf @@ -17,6 +17,11 @@ 找不到專案或目錄 `{0}`。 + + Could not find file '{0}'. + 找不到檔案 '{0}'。 + {0} is the file path. + error 錯誤 @@ -57,6 +62,11 @@ '#:project' 指示詞無效: {0} {0} is the inner error message. + + The '#:ref' directive is invalid: {0} + '#:ref' 指示詞無效: {0} + {Locked="#:ref"}{0} is the inner error message. + Missing name of '{0}'. 缺少 '{0}' 的名稱。 diff --git a/src/Cli/Microsoft.TemplateEngine.Cli/Alias/AliasModel.cs b/src/Cli/Microsoft.TemplateEngine.Cli/Alias/AliasModel.cs index 2228284f581c..ec8b35dd3af8 100644 --- a/src/Cli/Microsoft.TemplateEngine.Cli/Alias/AliasModel.cs +++ b/src/Cli/Microsoft.TemplateEngine.Cli/Alias/AliasModel.cs @@ -1,7 +1,7 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. -using Newtonsoft.Json; +using System.Text.Json.Serialization; namespace Microsoft.TemplateEngine.Cli.Alias { @@ -17,7 +17,7 @@ internal AliasModel(IReadOnlyDictionary> commandAl CommandAliases = new Dictionary>(commandAliases.ToDictionary(x => x.Key, x => x.Value), StringComparer.OrdinalIgnoreCase); } - [JsonProperty] + [JsonInclude] internal Dictionary> CommandAliases { get; set; } internal void AddCommandAlias(string aliasName, IReadOnlyList aliasTokens) diff --git a/src/Cli/Microsoft.TemplateEngine.Cli/Alias/AliasRegistry.cs b/src/Cli/Microsoft.TemplateEngine.Cli/Alias/AliasRegistry.cs index faa3c0f34179..05b2f9d7c8e0 100644 --- a/src/Cli/Microsoft.TemplateEngine.Cli/Alias/AliasRegistry.cs +++ b/src/Cli/Microsoft.TemplateEngine.Cli/Alias/AliasRegistry.cs @@ -3,7 +3,8 @@ using Microsoft.TemplateEngine.Abstractions; using Microsoft.TemplateEngine.Utils; -using Newtonsoft.Json.Linq; +using System.Text.Json; +using System.Text.Json.Nodes; namespace Microsoft.TemplateEngine.Cli.Alias { @@ -135,7 +136,7 @@ private void EnsureLoaded() _aliases = new AliasModel(); return; } - JObject parsed = _environmentSettings.Host.FileSystem.ReadObject(_aliasesFilePath); + JsonObject parsed = _environmentSettings.Host.FileSystem.ReadObject(_aliasesFilePath); IReadOnlyDictionary> commandAliases = ToStringListDictionary(parsed, StringComparer.OrdinalIgnoreCase, "CommandAliases"); _aliases = new AliasModel(commandAliases); @@ -145,7 +146,19 @@ private void Save() { if (_aliases is AliasModel { CommandAliases: { Count: > 0 } }) { - _environmentSettings.Host.FileSystem.WriteObject(_aliasesFilePath, _aliases); + JsonObject root = new(); + JsonObject commandAliases = new(); + foreach (var kvp in _aliases.CommandAliases) + { + JsonArray arr = new(); + foreach (string item in kvp.Value) + { + arr.Add((JsonNode)JsonValue.Create(item)!); + } + commandAliases[kvp.Key] = arr; + } + root["CommandAliases"] = commandAliases; + _environmentSettings.Host.FileSystem.WriteObject(_aliasesFilePath, root); } else { @@ -154,50 +167,52 @@ private void Save() } // reads a dictionary whose values can either be string literals, or arrays of strings. - private IReadOnlyDictionary> ToStringListDictionary(JToken token, StringComparer? comparer = null, string? propertyName = null) + private IReadOnlyDictionary> ToStringListDictionary(JsonObject token, StringComparer? comparer = null, string? propertyName = null) { Dictionary> result = new(comparer ?? StringComparer.Ordinal); - JObject? jObj = token as JObject; - if (jObj == null || propertyName == null || !jObj.TryGetValue(propertyName, StringComparison.OrdinalIgnoreCase, out JToken? element)) + + if (propertyName == null) { return result; } - jObj = element as JObject; - if (jObj == null) + // Case-insensitive property lookup for compatibility with Newtonsoft.Json behavior + JsonNode? element = null; + foreach (var prop in token) + { + if (string.Equals(prop.Key, propertyName, StringComparison.OrdinalIgnoreCase)) + { + element = prop.Value; + break; + } + } + + if (element is not JsonObject jObj) { return result; } - foreach (JProperty property in jObj.Properties()) + foreach (KeyValuePair property in jObj) { if (property.Value == null) { continue; } - else if (property.Value.Type == JTokenType.String) + else if (property.Value.GetValueKind() == JsonValueKind.String) { - result[property.Name] = new List() { property.Value.ToString() }; + result[property.Key] = new List() { property.Value.GetValue() }; } - else if (property.Value.Type == JTokenType.Array) + else if (property.Value is JsonArray arr) { - JArray? arr = property.Value as JArray; - if (arr == null) - { - result[property.Name] = Array.Empty(); - } - else + List values = new(); + foreach (JsonNode? item in arr) { - List values = new(); - foreach (JToken item in arr) + if (item != null && item.GetValueKind() == JsonValueKind.String) { - if (item != null && item.Type == JTokenType.String) - { - values.Add(item.ToString()); - } + values.Add(item.GetValue()); } - result[property.Name] = values; } + result[property.Key] = values; } } diff --git a/src/Cli/Microsoft.TemplateEngine.Cli/HostSpecificDataLoader.cs b/src/Cli/Microsoft.TemplateEngine.Cli/HostSpecificDataLoader.cs index 3f421e11b12a..5ba02797f1d3 100644 --- a/src/Cli/Microsoft.TemplateEngine.Cli/HostSpecificDataLoader.cs +++ b/src/Cli/Microsoft.TemplateEngine.Cli/HostSpecificDataLoader.cs @@ -2,13 +2,13 @@ // The .NET Foundation licenses this file to you under the MIT license. using System.Collections.Concurrent; +using System.Text.Json; +using System.Text.Json.Nodes; using Microsoft.Extensions.Logging; using Microsoft.TemplateEngine.Abstractions; using Microsoft.TemplateEngine.Abstractions.Mount; using Microsoft.TemplateEngine.Edge.Settings; using Microsoft.TemplateEngine.Utils; -using Newtonsoft.Json; -using Newtonsoft.Json.Linq; namespace Microsoft.TemplateEngine.Cli { @@ -16,6 +16,12 @@ public class HostSpecificDataLoader : IHostSpecificDataLoader { private readonly IEngineEnvironmentSettings _engineEnvironment; + private static readonly JsonDocumentOptions s_jsonDocumentOptions = new() + { + CommentHandling = JsonCommentHandling.Skip, + AllowTrailingCommas = true + }; + private readonly ConcurrentDictionary _cache = new(); @@ -39,7 +45,7 @@ private HostSpecificTemplateData ReadHostSpecificTemplateDataUncached(ITemplateI { if (!string.IsNullOrWhiteSpace(hostData)) { - JObject jObject = JObject.Parse(hostData); + JsonObject? jObject = JsonNode.Parse(hostData, nodeOptions: null, s_jsonDocumentOptions)?.AsObject(); return new HostSpecificTemplateData(jObject); } } @@ -60,12 +66,10 @@ private HostSpecificTemplateData ReadHostSpecificTemplateDataUncached(ITemplateI file = mountPoint.FileInfo(templateInfo.HostConfigPlace); if (file != null && file.Exists) { - JObject jsonData; + JsonObject? jsonData; using (Stream stream = file.OpenRead()) - using (TextReader textReader = new StreamReader(stream, true)) - using (JsonReader jsonReader = new JsonTextReader(textReader)) { - jsonData = JObject.Load(jsonReader); + jsonData = JsonNode.Parse(stream, nodeOptions: null, s_jsonDocumentOptions)?.AsObject(); } return new HostSpecificTemplateData(jsonData); diff --git a/src/Cli/Microsoft.TemplateEngine.Cli/HostSpecificTemplateData.cs b/src/Cli/Microsoft.TemplateEngine.Cli/HostSpecificTemplateData.cs index c5f3f3204546..3ed21b27aac6 100644 --- a/src/Cli/Microsoft.TemplateEngine.Cli/HostSpecificTemplateData.cs +++ b/src/Cli/Microsoft.TemplateEngine.Cli/HostSpecificTemplateData.cs @@ -1,8 +1,9 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. -using Newtonsoft.Json; -using Newtonsoft.Json.Linq; +using System.Text.Json; +using System.Text.Json.Nodes; +using System.Text.Json.Serialization; namespace Microsoft.TemplateEngine.Cli { @@ -14,7 +15,7 @@ public class HostSpecificTemplateData private const string ShortNameKey = "shortName"; private const string AlwaysShowKey = "alwaysShow"; - internal HostSpecificTemplateData(JObject? jObject) + internal HostSpecificTemplateData(JsonObject? jObject) { var symbolsInfo = new Dictionary>(); @@ -24,34 +25,63 @@ internal HostSpecificTemplateData(JObject? jObject) return; } - if (jObject.GetValue(nameof(UsageExamples), StringComparison.OrdinalIgnoreCase) is JArray usagesArray) + JsonNode? usagesNode = GetPropertyCaseInsensitive(jObject, nameof(UsageExamples)); + if (usagesNode is JsonArray usagesArray) { - UsageExamples = new List(usagesArray.Values().Where(v => v != null).OfType()); + UsageExamples = new List(usagesArray + .Where(v => v != null && v.GetValueKind() == JsonValueKind.String) + .Select(v => v!.GetValue())); } - if (jObject.GetValue(nameof(SymbolInfo), StringComparison.OrdinalIgnoreCase) is JObject symbols) + JsonNode? symbolsNode = GetPropertyCaseInsensitive(jObject, nameof(SymbolInfo)); + if (symbolsNode is JsonObject symbols) { - foreach (var symbolInfo in symbols.Properties()) + foreach (var symbolInfo in symbols) { - if (!(symbolInfo.Value is JObject symbol)) + if (symbolInfo.Value is not JsonObject symbol) { continue; } var symbolProperties = new Dictionary(); - foreach (var symbolProperty in symbol.Properties()) + foreach (var symbolProperty in symbol) { - symbolProperties[symbolProperty.Name] = symbolProperty.Value.Value() ?? ""; + if (symbolProperty.Value is null) + { + symbolProperties[symbolProperty.Key] = ""; + } + else + { + var kind = symbolProperty.Value.GetValueKind(); + symbolProperties[symbolProperty.Key] = kind switch + { + JsonValueKind.String => symbolProperty.Value.GetValue(), + JsonValueKind.True => "true", + JsonValueKind.False => "false", + _ => symbolProperty.Value.ToJsonString() + }; + } } - symbolsInfo[symbolInfo.Name] = symbolProperties; + symbolsInfo[symbolInfo.Key] = symbolProperties; } } SymbolInfo = symbolsInfo; - IsHidden = jObject.Value(nameof(IsHidden)); - + JsonNode? isHiddenNode = GetPropertyCaseInsensitive(jObject, nameof(IsHidden)); + if (isHiddenNode != null) + { + var kind = isHiddenNode.GetValueKind(); + if (kind == JsonValueKind.True) + { + IsHidden = true; + } + else if (kind == JsonValueKind.String && bool.TryParse(isHiddenNode.GetValue(), out bool hidden)) + { + IsHidden = hidden; + } + } } internal HostSpecificTemplateData( @@ -144,7 +174,7 @@ public Dictionary ShortNameOverrides } } - internal static HostSpecificTemplateData Default { get; } = new HostSpecificTemplateData((JObject?)null); + internal static HostSpecificTemplateData Default { get; } = new HostSpecificTemplateData((JsonObject?)null); internal string DisplayNameForParameter(string parameterName) { @@ -157,26 +187,50 @@ internal string DisplayNameForParameter(string parameterName) return parameterName; } - private class HostSpecificTemplateDataJsonConverter : JsonConverter + private static JsonNode? GetPropertyCaseInsensitive(JsonObject obj, string key) { - public override HostSpecificTemplateData ReadJson(JsonReader reader, Type objectType, HostSpecificTemplateData? existingValue, bool hasExistingValue, JsonSerializer serializer) => throw new NotImplementedException(); + if (obj.TryGetPropertyValue(key, out JsonNode? result)) + { + return result; + } - public override void WriteJson(JsonWriter writer, HostSpecificTemplateData? value, JsonSerializer serializer) + foreach (var kvp in obj) { - if (value == null) + if (string.Equals(kvp.Key, key, StringComparison.OrdinalIgnoreCase)) { - return; + return kvp.Value; } + } + + return null; + } + + private class HostSpecificTemplateDataJsonConverter : JsonConverter + { + public override HostSpecificTemplateData Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) => throw new NotImplementedException(); + + public override void Write(Utf8JsonWriter writer, HostSpecificTemplateData value, JsonSerializerOptions options) + { writer.WriteStartObject(); if (value.IsHidden) { - writer.WritePropertyName(nameof(IsHidden)); - writer.WriteValue(value.IsHidden); + writer.WriteBoolean(nameof(IsHidden), value.IsHidden); } if (value.SymbolInfo.Any()) { writer.WritePropertyName(nameof(SymbolInfo)); - serializer.Serialize(writer, value.SymbolInfo); + writer.WriteStartObject(); + foreach (var symbol in value.SymbolInfo) + { + writer.WritePropertyName(symbol.Key); + writer.WriteStartObject(); + foreach (var prop in symbol.Value) + { + writer.WriteString(prop.Key, prop.Value); + } + writer.WriteEndObject(); + } + writer.WriteEndObject(); } if (value.UsageExamples != null && value.UsageExamples.Any(e => !string.IsNullOrWhiteSpace(e))) @@ -187,7 +241,7 @@ public override void WriteJson(JsonWriter writer, HostSpecificTemplateData? valu { if (!string.IsNullOrWhiteSpace(example)) { - writer.WriteValue(example); + writer.WriteStringValue(example); } } writer.WriteEndArray(); diff --git a/src/Cli/Microsoft.TemplateEngine.Cli/JExtensions.cs b/src/Cli/Microsoft.TemplateEngine.Cli/JExtensions.cs index 5cd2d77b6f96..e9602d353c1e 100644 --- a/src/Cli/Microsoft.TemplateEngine.Cli/JExtensions.cs +++ b/src/Cli/Microsoft.TemplateEngine.Cli/JExtensions.cs @@ -1,45 +1,48 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. -#if !NET6_0_OR_GREATER -using System; -using System.Collections.Generic; -using System.IO; -#endif +using System.Text.Json; +using System.Text.Json.Nodes; using Microsoft.TemplateEngine.Abstractions.PhysicalFileSystem; -using Newtonsoft.Json; -using Newtonsoft.Json.Linq; namespace Microsoft.TemplateEngine { internal static class JExtensions { - internal static string? ToString(this JToken? token, string? key) + private static readonly JsonDocumentOptions DocOptions = new() { CommentHandling = JsonCommentHandling.Skip, AllowTrailingCommas = true }; + + internal static string? ToString(this JsonNode? token, string? key) { if (key == null) { - if (token == null || token.Type != JTokenType.String) + if (token == null) { return null; } - return token.ToString(); + if (token is JsonValue val && val.GetValueKind() == JsonValueKind.String) + { + return val.GetValue(); + } + + return null; } - if (token is not JObject obj) + if (token is not JsonObject obj) { return null; } - if (!obj.TryGetValue(key, StringComparison.OrdinalIgnoreCase, out JToken? element) || element.Type != JTokenType.String) + JsonNode? element = GetPropertyCaseInsensitive(obj, key); + if (element == null || element.GetValueKind() != JsonValueKind.String) { return null; } - return element.ToString(); + return element.GetValue(); } - internal static bool TryGetValue(this JToken? token, string? key, out JToken? result) + internal static bool TryGetValue(this JsonNode? token, string? key, out JsonNode? result) { result = null; @@ -52,25 +55,42 @@ internal static bool TryGetValue(this JToken? token, string? key, out JToken? re { result = token; } - else if (!((JObject)token).TryGetValue(key, StringComparison.OrdinalIgnoreCase, out result)) + else { - return false; + result = GetPropertyCaseInsensitive(token.AsObject(), key); + if (result == null) + { + return false; + } } return true; } - internal static bool TryParseBool(this JToken token, out bool result) + internal static bool TryParseBool(this JsonNode token, out bool result) { result = false; - return (token.Type == JTokenType.Boolean || token.Type == JTokenType.String) - && - bool.TryParse(token.ToString(), out result); + var kind = token.GetValueKind(); + if (kind == JsonValueKind.True) + { + result = true; + return true; + } + if (kind == JsonValueKind.False) + { + result = false; + return true; + } + if (kind == JsonValueKind.String) + { + return bool.TryParse(token.GetValue(), out result); + } + return false; } - internal static bool ToBool(this JToken? token, string? key = null, bool defaultValue = false) + internal static bool ToBool(this JsonNode? token, string? key = null, bool defaultValue = false) { - if (!token.TryGetValue(key, out JToken? checkToken)) + if (!token.TryGetValue(key, out JsonNode? checkToken)) { return defaultValue; } @@ -83,12 +103,11 @@ internal static bool ToBool(this JToken? token, string? key = null, bool default return result; } - internal static int ToInt32(this JToken? token, string? key = null, int defaultValue = 0) + internal static int ToInt32(this JsonNode? token, string? key = null, int defaultValue = 0) { - int value; if (key == null) { - if (token == null || token.Type != JTokenType.Integer || !int.TryParse(token.ToString(), out value)) + if (token == null || !token.TryParseInt(out int value)) { return defaultValue; } @@ -96,28 +115,21 @@ internal static int ToInt32(this JToken? token, string? key = null, int defaultV return value; } - if (token is not JObject obj) + if (token is not JsonObject obj) { return defaultValue; } - if (!obj.TryGetValue(key, StringComparison.OrdinalIgnoreCase, out JToken? element)) + JsonNode? element = GetPropertyCaseInsensitive(obj, key); + if (element == null || !element.TryParseInt(out int result)) { return defaultValue; } - else if (element.Type == JTokenType.Integer) - { - return element.ToInt32(); - } - else if (int.TryParse(element.ToString(), out value)) - { - return value; - } - return defaultValue; + return result; } - internal static T ToEnum(this JToken token, string? key = null, T defaultValue = default) + internal static T ToEnum(this JsonNode token, string? key = null, T defaultValue = default) where T : struct { string? val = token.ToString(key); @@ -129,7 +141,7 @@ internal static T ToEnum(this JToken token, string? key = null, T defaultValu return result; } - internal static Guid ToGuid(this JToken token, string? key = null, Guid defaultValue = default) + internal static Guid ToGuid(this JsonNode token, string? key = null, Guid defaultValue = default) { string? val = token.ToString(key); if (val == null || !Guid.TryParse(val, out Guid result)) @@ -140,97 +152,84 @@ internal static Guid ToGuid(this JToken token, string? key = null, Guid defaultV return result; } - internal static IEnumerable PropertiesOf(this JToken? token, string? key = null) + internal static IEnumerable> PropertiesOf(this JsonNode? token, string? key = null) { - JObject? currentJObj = token as JObject; - if (currentJObj == null) + if (token is not JsonObject currentJObj) { - return Array.Empty(); + return Array.Empty>(); } if (key != null) { - if (!currentJObj.TryGetValue(key, StringComparison.OrdinalIgnoreCase, out JToken? element)) + JsonNode? element = GetPropertyCaseInsensitive(currentJObj, key); + if (element is not JsonObject nested) { - return Array.Empty(); + return Array.Empty>(); } - currentJObj = element as JObject; - } - if (currentJObj == null) - { - return Array.Empty(); + return nested.ToList(); } - return currentJObj.Properties(); + return currentJObj.ToList(); } - internal static T? Get(this JToken? token, string? key) - where T : JToken + internal static T? Get(this JsonNode? token, string? key) + where T : JsonNode { - if (token is not JObject obj || key == null) - { - return default; - } - - if (!obj.TryGetValue(key, StringComparison.OrdinalIgnoreCase, out JToken? res)) + if (token is not JsonObject obj || key == null) { return default; } + JsonNode? res = GetPropertyCaseInsensitive(obj, key); return res as T; } - internal static IReadOnlyList ArrayAsStrings(this JToken? token, string? propertyName = null) + internal static IReadOnlyList ArrayAsStrings(this JsonNode? token, string? propertyName = null) { if (propertyName != null) { - token = token.Get(propertyName); + token = token.Get(propertyName); } - if (token is not JArray arr) + if (token is not JsonArray arr) { return Array.Empty(); } List values = new(); - foreach (JToken item in arr) + foreach (JsonNode? item in arr) { - if (item != null && item.Type == JTokenType.String) + if (item != null && item.GetValueKind() == JsonValueKind.String) { - values.Add(item.ToString()); + values.Add(item.GetValue()); } } return values; } - internal static JObject ReadObject(this IPhysicalFileSystem fileSystem, string path) + internal static JsonObject ReadObject(this IPhysicalFileSystem fileSystem, string path) { - using (Stream fileStream = fileSystem.OpenRead(path)) - using (var textReader = new StreamReader(fileStream, Encoding.UTF8, true)) - using (var jsonReader = new JsonTextReader(textReader)) - { - return JObject.Load(jsonReader); - } + using Stream fileStream = fileSystem.OpenRead(path); + using var textReader = new StreamReader(fileStream, Encoding.UTF8, true); + string json = textReader.ReadToEnd(); + return (JsonObject?)JsonNode.Parse(json, null, DocOptions) + ?? throw new InvalidOperationException($"Failed to parse JSON from '{path}'."); } - internal static void WriteObject(this IPhysicalFileSystem fileSystem, string path, object obj) + internal static void WriteObject(this IPhysicalFileSystem fileSystem, string path, JsonNode obj) { - using (Stream fileStream = fileSystem.CreateFile(path)) - using (var textWriter = new StreamWriter(fileStream, Encoding.UTF8)) - using (var jsonWriter = new JsonTextWriter(textWriter)) - { - var serializer = new JsonSerializer(); - serializer.Serialize(jsonWriter, obj); - } + using Stream fileStream = fileSystem.CreateFile(path); + using var writer = new Utf8JsonWriter(fileStream); + obj.WriteTo(writer); } - internal static bool TryParse(this string arg, out JToken? token) + internal static bool TryParse(this string arg, out JsonNode? token) { try { - token = JToken.Parse(arg); + token = JsonNode.Parse(arg, null, DocOptions); return true; } catch @@ -240,5 +239,42 @@ internal static bool TryParse(this string arg, out JToken? token) } } + private static bool TryParseInt(this JsonNode token, out int result) + { + result = default; + var kind = token.GetValueKind(); + if (kind == JsonValueKind.Number) + { + if (token is JsonValue jv && jv.TryGetValue(out int intVal)) + { + result = intVal; + return true; + } + return int.TryParse(token.ToJsonString(), out result); + } + if (kind == JsonValueKind.String) + { + return int.TryParse(token.GetValue(), out result); + } + return false; + } + + private static JsonNode? GetPropertyCaseInsensitive(JsonObject obj, string key) + { + if (obj.TryGetPropertyValue(key, out JsonNode? result)) + { + return result; + } + + foreach (var kvp in obj) + { + if (string.Equals(kvp.Key, key, StringComparison.OrdinalIgnoreCase)) + { + return kvp.Value; + } + } + + return null; + } } } diff --git a/src/Cli/Microsoft.TemplateEngine.Cli/PostActionProcessors/ChmodPostActionProcessor.cs b/src/Cli/Microsoft.TemplateEngine.Cli/PostActionProcessors/ChmodPostActionProcessor.cs index b3b919c87680..3fb827553386 100644 --- a/src/Cli/Microsoft.TemplateEngine.Cli/PostActionProcessors/ChmodPostActionProcessor.cs +++ b/src/Cli/Microsoft.TemplateEngine.Cli/PostActionProcessors/ChmodPostActionProcessor.cs @@ -4,7 +4,7 @@ using System.Diagnostics; using Microsoft.DotNet.Cli.Utils; using Microsoft.TemplateEngine.Abstractions; -using Newtonsoft.Json.Linq; +using System.Text.Json.Nodes; namespace Microsoft.TemplateEngine.Cli.PostActionProcessors { @@ -22,12 +22,12 @@ protected override bool ProcessInternal(IEngineEnvironmentSettings environment, string[] values; try { - JArray valueArray = JArray.Parse(entry.Value); + JsonArray valueArray = JsonNode.Parse(entry.Value)!.AsArray(); values = new string[valueArray.Count]; for (int i = 0; i < valueArray.Count; ++i) { - values[i] = valueArray[i].ToString(); + values[i] = valueArray[i]?.GetValue() ?? ""; } } catch diff --git a/src/Cli/Microsoft.TemplateEngine.Cli/PostActionProcessors/PostActionProcessorBase.cs b/src/Cli/Microsoft.TemplateEngine.Cli/PostActionProcessors/PostActionProcessorBase.cs index 266cfb653e34..35e41b8652a3 100644 --- a/src/Cli/Microsoft.TemplateEngine.Cli/PostActionProcessors/PostActionProcessorBase.cs +++ b/src/Cli/Microsoft.TemplateEngine.Cli/PostActionProcessors/PostActionProcessorBase.cs @@ -3,7 +3,8 @@ using Microsoft.TemplateEngine.Abstractions; using Microsoft.TemplateEngine.Utils; -using Newtonsoft.Json.Linq; +using System.Text.Json; +using System.Text.Json.Nodes; namespace Microsoft.TemplateEngine.Cli.PostActionProcessors { @@ -122,26 +123,26 @@ protected abstract bool ProcessInternal( private static bool TryParseAsJson(string targetFiles, out IReadOnlyList paths) { paths = new List(); - targetFiles.TryParse(out JToken? config); + targetFiles.TryParse(out JsonNode? config); if (config is null) { return false; } - if (config.Type == JTokenType.String) + if (config.GetValueKind() == JsonValueKind.String) { - paths = config.ToString().Split(new[] { ';' }, StringSplitOptions.RemoveEmptyEntries); + paths = config.GetValue().Split(new[] { ';' }, StringSplitOptions.RemoveEmptyEntries); return true; } - if (config is not JArray arr) + if (config is not JsonArray arr) { return false; } var parts = arr - .Where(token => token.Type == JTokenType.String) - .Select(token => token.ToString()).ToList(); + .Where(token => token != null && token.GetValueKind() == JsonValueKind.String) + .Select(token => token!.GetValue()).ToList(); if (parts.Count == 0) { diff --git a/src/Cli/Microsoft.TemplateEngine.Cli/TemplateSearch/CliHostSearchCacheData.cs b/src/Cli/Microsoft.TemplateEngine.Cli/TemplateSearch/CliHostSearchCacheData.cs index cc1a2d8a1f48..3df2d3d5ee9c 100644 --- a/src/Cli/Microsoft.TemplateEngine.Cli/TemplateSearch/CliHostSearchCacheData.cs +++ b/src/Cli/Microsoft.TemplateEngine.Cli/TemplateSearch/CliHostSearchCacheData.cs @@ -2,7 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. using Microsoft.DotNet.Cli.Utils; -using Newtonsoft.Json.Linq; +using System.Text.Json.Nodes; namespace Microsoft.TemplateEngine.Cli.TemplateSearch { @@ -13,29 +13,30 @@ public static class CliHostSearchCacheData public static Func Reader => (obj) => { - JObject? cacheObject = obj as JObject; + JsonObject? cacheObject = obj as JsonObject; if (cacheObject == null) { return HostSpecificTemplateData.Default; } try { - if (_hostDataPropertyNames.Contains(cacheObject.Properties().First().Name, StringComparer.OrdinalIgnoreCase)) + var keys = new HashSet(cacheObject.Select(p => p.Key), StringComparer.OrdinalIgnoreCase); + if (_hostDataPropertyNames.Any(keys.Contains)) { return new HostSpecificTemplateData(cacheObject); } //fallback to old behavior Dictionary cliData = new(); - foreach (JProperty data in cacheObject.Properties()) + foreach (KeyValuePair data in cacheObject) { try { - cliData[data.Name] = new HostSpecificTemplateData(data.Value as JObject); + cliData[data.Key] = new HostSpecificTemplateData(data.Value as JsonObject); } catch (Exception ex) { - Reporter.Verbose.WriteLine($"Error deserializing the cli host specific template data for template {data.Name}, details:{ex}"); + Reporter.Verbose.WriteLine($"Error deserializing the cli host specific template data for template {data.Key}, details:{ex}"); } } return cliData; diff --git a/src/Cli/dn/.gitignore b/src/Cli/dn/.gitignore new file mode 100644 index 000000000000..c6adc278bc15 --- /dev/null +++ b/src/Cli/dn/.gitignore @@ -0,0 +1,5 @@ +# Generated by PublishAotForDebug MSBuild target with machine-specific paths +Properties/launchSettings.json +debug-dn.cmd +dn-native-debug.vcxproj.user +dn.csproj.Backup.tmp diff --git a/src/Cli/dn/Program.cs b/src/Cli/dn/Program.cs new file mode 100644 index 000000000000..6528c040f157 --- /dev/null +++ b/src/Cli/dn/Program.cs @@ -0,0 +1,178 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System.Runtime.InteropServices; + +namespace Microsoft.DotNet.Cli; + +partial class Program +{ + [LibraryImport("dotnet-aot", EntryPoint = "dotnet_execute")] + private static partial int DotnetExecute( + nint hostPath, + nint dotnetRoot, + nint sdkDir, + nint hostfxrPath, + int argc, + nint argv); + + static unsafe int Main(string[] args) + { + string hostPath = Environment.ProcessPath!; + string baseDir = AppContext.BaseDirectory.TrimEnd(Path.DirectorySeparatorChar); + string dotnetRoot = ResolveDotnetRoot(); + string sdkDir = baseDir; + string hostfxrPath = ResolveHostfxrPath(dotnetRoot); + + // Marshal argv to native platform strings (UTF-16 on Windows, UTF-8 on Unix) + // to match hostfxr's char_t definition used by PlatformStringMarshaller + // in dotnet-aot.dll. + nint* nativeArgv = stackalloc nint[args.Length]; + try + { + for (int i = 0; i < args.Length; i++) + { + nativeArgv[i] = MarshalStringToNative(args[i]); + } + + nint hpNative = MarshalStringToNative(hostPath); + nint drNative = MarshalStringToNative(dotnetRoot); + nint sdNative = MarshalStringToNative(sdkDir); + nint hfNative = MarshalStringToNative(hostfxrPath); + + try + { + return DotnetExecute( + hpNative, + drNative, + sdNative, + hfNative, + args.Length, + (nint)nativeArgv); + } + finally + { + Marshal.FreeCoTaskMem(hpNative); + Marshal.FreeCoTaskMem(drNative); + Marshal.FreeCoTaskMem(sdNative); + Marshal.FreeCoTaskMem(hfNative); + } + } + finally + { + for (int i = 0; i < args.Length; i++) + { + if (nativeArgv[i] != 0) + { + Marshal.FreeCoTaskMem(nativeArgv[i]); + } + } + } + } + + /// + /// Resolves the .NET installation root directory, mimicking muxer behavior. + /// + private static string ResolveDotnetRoot() + { + // Check DOTNET_ROOT first (standard on all platforms) + string? dotnetRoot = Environment.GetEnvironmentVariable("DOTNET_ROOT"); + if (!string.IsNullOrEmpty(dotnetRoot) && Directory.Exists(dotnetRoot)) + { + return dotnetRoot; + } + + // On Windows, also check the architecture-specific variant + if (OperatingSystem.IsWindows()) + { + string archVar = RuntimeInformation.ProcessArchitecture switch + { + Architecture.X64 => "DOTNET_ROOT(x64)", + Architecture.X86 => "DOTNET_ROOT(x86)", + Architecture.Arm64 => "DOTNET_ROOT(ARM64)", + _ => "" + }; + + if (!string.IsNullOrEmpty(archVar)) + { + dotnetRoot = Environment.GetEnvironmentVariable(archVar); + if (!string.IsNullOrEmpty(dotnetRoot) && Directory.Exists(dotnetRoot)) + { + return dotnetRoot; + } + } + } + + // Fall back to resolving from the process path + string? processPath = Environment.ProcessPath; + if (processPath is not null) + { + string? processDir = Path.GetDirectoryName(processPath); + if (processDir is not null) + { + // Walk up looking for a directory with dotnet(.exe) + string? candidate = processDir; + while (candidate is not null) + { + if (File.Exists(Path.Combine(candidate, "dotnet" + (OperatingSystem.IsWindows() ? ".exe" : "")))) + { + return candidate; + } + candidate = Path.GetDirectoryName(candidate); + } + } + } + + // Last resort: assume relative to AppContext.BaseDirectory + return Path.GetDirectoryName(AppContext.BaseDirectory.TrimEnd(Path.DirectorySeparatorChar)) ?? AppContext.BaseDirectory; + } + + /// + /// Finds the hostfxr library path under the given .NET root. + /// + private static string ResolveHostfxrPath(string dotnetRoot) + { + string fxrDir = Path.Combine(dotnetRoot, "host", "fxr"); + if (!Directory.Exists(fxrDir)) + { + return string.Empty; + } + + // Pick the highest version directory by parsing version numbers + string? latestFxr = Directory.GetDirectories(fxrDir) + .Select(path => new + { + Path = path, + Version = Version.TryParse(Path.GetFileName(path), out Version? version) ? version : null + }) + .Where(candidate => candidate.Version is not null) + .OrderByDescending(candidate => candidate.Version) + .Select(candidate => candidate.Path) + .FirstOrDefault(); + + if (latestFxr is null) + { + return string.Empty; + } + + string hostfxrName = OperatingSystem.IsWindows() + ? "hostfxr.dll" + : OperatingSystem.IsMacOS() + ? "libhostfxr.dylib" + : "libhostfxr.so"; + + string hostfxrPath = Path.Combine(latestFxr, hostfxrName); + return File.Exists(hostfxrPath) ? hostfxrPath : string.Empty; + } + + /// + /// Marshals a string to a native platform string (UTF-16 on Windows, UTF-8 on Unix) + /// to match hostfxr's char_t definition. + /// + private static nint MarshalStringToNative(string value) + { + return OperatingSystem.IsWindows() + ? Marshal.StringToCoTaskMemUni(value) + : Marshal.StringToCoTaskMemUTF8(value); + } +} diff --git a/src/Cli/dn/dn-native-debug.vcxproj b/src/Cli/dn/dn-native-debug.vcxproj new file mode 100644 index 000000000000..ea0f3bc53b95 --- /dev/null +++ b/src/Cli/dn/dn-native-debug.vcxproj @@ -0,0 +1,165 @@ + + + + + + + <_HasVCTargets Condition="'$(VCTargetsPath)' != '' and Exists('$(VCTargetsPath)\Microsoft.Cpp.Default.props')">true + + + + + + + + + + <_ValidProjectsForRestore Include="$(MSBuildProjectFullPath)" /> + + + + + + + + + + + Debug + x64 + + + Release + x64 + + + + + {E9A0B5D7-2F4A-4C8E-9D3B-1A6F5E8C7D2A} + dn-native-debug + dn-native-debug + + 17.0 + + + + + + + Makefile + v143 + + + + + + + <_RepoRoot>$([MSBuild]::NormalizePath('$(MSBuildThisFileDirectory)', '..', '..', '..')) + <_DotNetTool>$([MSBuild]::NormalizePath('$(_RepoRoot)', '.dotnet', 'dotnet.exe')) + <_DnPublishDir>$([MSBuild]::NormalizePath('$(_RepoRoot)', 'artifacts', 'bin', 'dn', '$(Configuration)', 'net11.0', 'win-x64', 'publish')) + <_DnExePath>$([MSBuild]::NormalizePath('$(_DnPublishDir)', 'dn.exe')) + <_DotnetRootPath>$([MSBuild]::NormalizePath('$(_RepoRoot)', '.dotnet')) + + + + + "$(_DotNetTool)" publish "$(MSBuildThisFileDirectory)..\dotnet-aot\dotnet-aot.csproj" -r win-x64 -c $(Configuration) && "$(_DotNetTool)" publish "$(MSBuildThisFileDirectory)dn.csproj" -r win-x64 -c $(Configuration) && "$(_DotNetTool)" build "$(MSBuildThisFileDirectory)..\dotnet\dotnet.csproj" -c $(Configuration) && copy /Y "$(_RepoRoot)\artifacts\bin\dotnet-aot\$(Configuration)\net11.0\win-x64\publish\dotnet-aot.dll" "$(_DnPublishDir)\" && xcopy "$(_RepoRoot)\artifacts\bin\dotnet\$(Configuration)\net11.0\*" "$(_DnPublishDir)\" /S /Y /Q + if exist "$(_DnPublishDir)" rd /s /q "$(_DnPublishDir)" + $(NMakeCleanCommandLine) && $(NMakeBuildCommandLine) + $(_DnExePath) + + + + + $(_DnExePath) + --info + $(_DnPublishDir) + DOTNET_ROOT=$(_DotnetRootPath) +DOTNET_CLI_ENABLEAOT=true + WindowsLocalDebugger + + + + + + + + + + + + + + + + + + + + + diff --git a/src/Cli/dn/dn.csproj b/src/Cli/dn/dn.csproj new file mode 100644 index 000000000000..c969c489389e --- /dev/null +++ b/src/Cli/dn/dn.csproj @@ -0,0 +1,138 @@ + + + + $(SdkTargetFramework) + dn + Exe + Microsoft.DotNet.Cli + true + true + MicrosoftAspNetCore + true + true + true + true + false + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + <_DnPublishDir>$(ArtifactsBinDir)dn\$(Configuration)\$(SdkTargetFramework)\$(TargetRid)\publish\ + <_DotnetAotPublishDir>$(ArtifactsBinDir)dotnet-aot\$(Configuration)\$(SdkTargetFramework)\$(TargetRid)\publish\ + <_DotnetManagedOutDir>$(ArtifactsBinDir)dotnet\$(Configuration)\$(SdkTargetFramework)\ + + + + + + + + + + + + + + + + + + + <_DotnetManagedFiles Include="$(_DotnetManagedOutDir)**\*" /> + + + + + + <_DnExePath>$([MSBuild]::NormalizePath('$(_DnPublishDir)', 'dn.exe')) + <_DnCwdPath>$([MSBuild]::NormalizePath('$(_DnPublishDir)')) + <_DotnetRootPath>$([MSBuild]::NormalizePath('$(RepoRoot)', '.dotnet')) + + + <_LaunchSettingsLines Include="{" /> + <_LaunchSettingsLines Include=" %22profiles%22: {" /> + <_LaunchSettingsLines Include=" %22Debug dn (managed path)%22: {" /> + <_LaunchSettingsLines Include=" %22commandName%22: %22Executable%22," /> + <_LaunchSettingsLines Include=" %22executablePath%22: %22$(_DnExePath.Replace('\','\\'))%22," /> + <_LaunchSettingsLines Include=" %22commandLineArgs%22: %22--info%22," /> + <_LaunchSettingsLines Include=" %22workingDirectory%22: %22$(_DnCwdPath.Replace('\','\\'))%22," /> + <_LaunchSettingsLines Include=" %22environmentVariables%22: {" /> + <_LaunchSettingsLines Include=" %22DOTNET_ROOT%22: %22$(_DotnetRootPath.Replace('\','\\'))%22," /> + <_LaunchSettingsLines Include=" %22DOTNET_CLI_ENABLEAOT%22: %22true%22" /> + <_LaunchSettingsLines Include=" }," /> + <_LaunchSettingsLines Include=" %22nativeDebugging%22: true" /> + <_LaunchSettingsLines Include=" }" /> + <_LaunchSettingsLines Include=" }" /> + <_LaunchSettingsLines Include="}" /> + + + + + + <_VcxUserLines Include="<?xml version=%221.0%22 encoding=%22utf-8%22?>" /> + <_VcxUserLines Include="<Project ToolsVersion=%2217.0%22 xmlns=%22http://schemas.microsoft.com/developer/msbuild/2003%22>" /> + <_VcxUserLines Include=" <PropertyGroup Condition=%22'$(Configuration)|$(Platform)'=='$(Configuration)|x64'%22>" /> + <_VcxUserLines Include=" <LocalDebuggerCommand>$(_DnExePath)</LocalDebuggerCommand>" /> + <_VcxUserLines Include=" <LocalDebuggerCommandArguments>--info</LocalDebuggerCommandArguments>" /> + <_VcxUserLines Include=" <LocalDebuggerWorkingDirectory>$(_DnCwdPath)</LocalDebuggerWorkingDirectory>" /> + <_VcxUserLines Include=" <LocalDebuggerEnvironment>DOTNET_ROOT=$(_DotnetRootPath) DOTNET_CLI_ENABLEAOT=true</LocalDebuggerEnvironment>" /> + <_VcxUserLines Include=" <DebuggerFlavor>WindowsLocalDebugger</DebuggerFlavor>" /> + <_VcxUserLines Include=" </PropertyGroup>" /> + <_VcxUserLines Include="</Project>" /> + + + + + + <_DebugScriptLines Include="@echo off" /> + <_DebugScriptLines Include="REM Launch the published NativeAOT dn.exe under Visual Studio's native debugger." /> + <_DebugScriptLines Include="REM This opens a new VS instance with the native debugger attached." /> + <_DebugScriptLines Include="REM Set breakpoints in the source file view and press F5 to start." /> + <_DebugScriptLines Include="REM" /> + <_DebugScriptLines Include="REM Tip: to break on managed exceptions, add a Function Breakpoint on RhThrowEx" /> + <_DebugScriptLines Include="REM (Debug > Windows > Breakpoints > New > Function, Language = All Languages)." /> + <_DebugScriptLines Include="REM" /> + <_DebugScriptLines Include="set DOTNET_ROOT=$(_DotnetRootPath)" /> + <_DebugScriptLines Include="set DOTNET_CLI_ENABLEAOT=true" /> + <_DebugScriptLines Include="devenv /debugexe %22$(_DnExePath)%22 --info" /> + + + + + diff --git a/src/Cli/dotnet-aot/DESIGN.md b/src/Cli/dotnet-aot/DESIGN.md new file mode 100644 index 000000000000..9e75132c2944 --- /dev/null +++ b/src/Cli/dotnet-aot/DESIGN.md @@ -0,0 +1,465 @@ +# NativeAOT Design for the .NET SDK CLI + +This document describes the design for adding a NativeAOT-compiled entry point +to the .NET SDK CLI. The goal is to achieve near-instant startup for common +commands while preserving full functionality through the managed CLI. + +The current implementation uses a standalone `dn.exe` host that lives alongside +the existing `dotnet` CLI. `dn.exe` emulates the muxer's `try_invoke_aot_sdk` +function — see +[dotnet/runtime#126171](https://github.com/dotnet/runtime/issues/126171). The +muxer looks for `dotnet-aot` in the resolved SDK directory and, when found, +calls `dotnet_execute` directly. `dn.exe` follows the same contract and serves +as a local development and testing entry point. The AOT fast path is gated +behind `DOTNET_CLI_ENABLEAOT=true`; when the variable is unset or false, the +bridge falls through to the managed CLI immediately. + +## Motivation + +The `dotnet` CLI today runs as a managed application hosted by CoreCLR. Every +invocation pays the cost of JIT compilation, type loading, and runtime +initialization — even for trivial operations like `dotnet --version`. A NativeAOT +entry point eliminates that overhead for supported commands while keeping the +full managed CLI as an automatic fallback. + +## Architecture + +The design uses three components arranged in layers. Each layer is compiled and +debugged differently. + +| Layer | Project | Output | Compilation | +|-------|---------|--------|-------------| +| 1 — Native Host | `src/Cli/dn/` | `dn.exe` | NativeAOT (`PublishAot`, `OutputType=Exe`) | +| 2 — AOT Bridge | `src/Cli/dotnet-aot/` | `dotnet-aot.dll` / `.so` / `.dylib` | NativeAOT (`PublishAot`, `NativeLib=Shared`) | +| 3 — Managed CLI | `src/Cli/dotnet/` | `dotnet.dll` | Standard managed build | + +```mermaid +graph TD + User["User runs dn.exe"] --> L1 + + subgraph L1["Layer 1 · dn.exe (Native AOT Executable)"] + Resolve["Resolve DOTNET_ROOT, hostfxr path"] + Marshal["Marshal args to native strings"] + PInvoke["P/Invoke: dotnet_execute()"] + Resolve --> Marshal --> PInvoke + end + + PInvoke -->|"DLL import"| L2 + + subgraph L2["Layer 2 · dotnet-aot.dll (Native AOT Shared Library)"] + Entry["NativeEntryPoint.Execute()"] + AotCheck{"DOTNET_CLI_ENABLEAOT
enabled?"} + Parse["Parser.Parse(args)"] + Fast{"Command handled
by AOT path?"} + Invoke["Parser.Invoke()"] + HostInit["ManagedHost.RunApp()"] + Entry --> AotCheck + AotCheck -- "Yes" --> Parse --> Fast + AotCheck -- "No" --> HostInit + Fast -- "Yes" --> Invoke + Fast -- "No" --> HostInit + end + + Invoke --> Done["Return exit code"] + HostInit -->|"hostfxr loads CoreCLR"| L3 + + subgraph L3["Layer 3 · dotnet.dll (Managed CLI on CoreCLR)"] + FullCLI["Full dotnet CLI
All commands, telemetry, workloads"] + end + + FullCLI --> Done + + style L1 fill:#2d4a7a,stroke:#5b9bd5,color:#fff + style L2 fill:#4a2d6b,stroke:#9b59b6,color:#fff + style L3 fill:#2d6b3a,stroke:#27ae60,color:#fff +``` + +### Layer 1 — `dn.exe` (Native Host) + +A minimal NativeAOT executable whose only job is to locate the .NET installation, +resolve `hostfxr`, marshal command-line arguments into platform-native strings, +and call into Layer 2 via P/Invoke. + +Key responsibilities: + +- Resolve `DOTNET_ROOT` from environment variables or by walking up from the + process path. +- Locate the highest-versioned `hostfxr` under `/host/fxr/`. +- Marshal `string[] args` to `nint*` (UTF-16 on Windows, UTF-8 on Unix). +- Call `dotnet_execute` exported from `dotnet-aot.dll`. + +### Layer 2 — `dotnet-aot.dll` (AOT Bridge) + +A NativeAOT shared library (`NativeLib=Shared`) that exports a single +`[UnmanagedCallersOnly]` entry point: `dotnet_execute`. This layer contains +the dual-path dispatch logic. + +**Fast path** — When `DOTNET_CLI_ENABLEAOT=true`, the AOT bridge compiles a +minimal `Parser` (guarded by `#if CLI_AOT`) that handles simple commands +(`--version`, `--info`) entirely in native code. If the parser recognizes the +command, it executes immediately and returns. + +**Slow path** — When `DOTNET_CLI_ENABLEAOT` is not set or the AOT parser does +not handle the command, the bridge calls `ManagedHost.RunApp()`, which uses the +hostfxr native hosting APIs (`hostfxr_initialize_for_dotnet_command_line` / +`hostfxr_set_runtime_property_value` / `hostfxr_run_app`) to bootstrap CoreCLR +and run `dotnet.dll`. The bridge passes through the `host_path`, `dotnet_root`, +and `hostfxr_path` received from the caller so that the runtime is configured +exactly as the muxer would configure it for an SDK command. + +```mermaid +sequenceDiagram + participant dn as dn.exe (Layer 1) + participant aot as dotnet-aot.dll (Layer 2) + participant hfxr as hostfxr + participant clr as CoreCLR + participant cli as dotnet.dll (Layer 3) + + dn->>aot: dotnet_execute(hostPath, dotnetRoot, sdkDir, hostfxrPath, argc, argv) + aot->>aot: Parser.Parse(args) + + alt DOTNET_CLI_ENABLEAOT=true and command handled by AOT + aot->>aot: Parser.Invoke(parseResult) + aot-->>dn: exit code + else Command not handled or AOT disabled + aot->>hfxr: hostfxr_initialize_for_dotnet_command_line(args, host_path, dotnet_root) + aot->>hfxr: hostfxr_set_runtime_property_value(handle, "HOSTFXR_PATH", hostfxrPath) + aot->>hfxr: hostfxr_run_app(handle) + hfxr->>clr: Load CoreCLR runtime + hfxr->>cli: Program.Main(args) + cli-->>hfxr: exit code + hfxr-->>aot: exit code + aot-->>dn: exit code + end +``` + +### Layer 3 — `dotnet.dll` (Managed CLI) + +The existing managed CLI, unchanged. It contains all commands, telemetry, +workload management, NuGet integration, and everything else the SDK supports. +It runs on CoreCLR with full runtime capabilities (reflection, JIT, dynamic +assembly loading, hot reload). + +## Source Sharing and Conditional Compilation + +The `dotnet-aot` project does not duplicate source files. Instead, it links +files from `dotnet` and uses the `CLI_AOT` preprocessor constant to select +the appropriate implementation: + +```xml + +$(DefineConstants);CLI_AOT + + + + +``` + +In the shared files: + +- **`Parser.cs`** — Under `#if CLI_AOT`, defines a minimal parser with only + `--version` and `--info`. Under `#else`, defines the full command tree. +- **`Program.cs`** — Under `#if CLI_AOT`, provides a simple `Main` that + delegates to the AOT parser. Under `#else`, provides the full CLI entry point + with telemetry, signal handlers, and workload checks. +- **`CommandLineInfo.cs`** — Uses `#if CLI_AOT` to substitute lightweight + implementations for workload info, localized strings, and OS detection that + would otherwise pull in dependencies incompatible with AOT. + +```mermaid +graph LR + subgraph "dotnet-aot.csproj (CLI_AOT defined)" + PA["Parser.cs — minimal"] + PR["Program.cs — simple Main"] + end + + subgraph "dotnet.csproj (CLI_AOT not defined)" + PB["Parser.cs — full commands"] + PS["Program.cs — full CLI"] + end + + SRC["Source files in src/Cli/dotnet/"] -->|"Compile link"| PA + SRC -->|"Compile link"| PR + SRC -->|"Direct compile"| PB + SRC -->|"Direct compile"| PS + + style SRC fill:#555,stroke:#999,color:#fff +``` + +## Build Process + +Building for debug involves publishing two NativeAOT projects and overlaying the +managed output. The `dn.csproj` contains a `PublishAotForDebug` MSBuild target +that automates this when building inside Visual Studio: + +```mermaid +flowchart LR + A["1. Publish dotnet-aot.csproj"] --> B["2. Publish dn.csproj"] + B --> C["3. Build dotnet.csproj"] + C --> D["4. Copy dotnet-aot.dll → dn publish dir"] + D --> E["5. Overlay managed files → dn publish dir"] + E --> F["6. Generate launchSettings.json
& debug scripts"] +``` + +The final publish directory contains: + +```text +publish/ +├── dn.exe ← Layer 1 (native) +├── dn.pdb ← Native debug symbols for Layer 1 +├── dotnet-aot.dll ← Layer 2 (native shared lib) +├── dotnet-aot.pdb ← Native debug symbols for Layer 2 +├── dotnet.dll ← Layer 3 (managed) +├── dotnet.pdb ← Managed debug symbols for Layer 3 +├── dotnet.runtimeconfig.json ← Runtime config for hosting Layer 3 +└── ... ← Other managed assemblies +``` + +For command-line builds, use the VS Code tasks or run the publish steps +manually: + +```bash +# Publish the AOT shared library +dotnet publish src/Cli/dotnet-aot/dotnet-aot.csproj -r win-x64 -c Debug + +# Publish the AOT host executable +dotnet publish src/Cli/dn/dn.csproj -r win-x64 -c Debug + +# Build the managed CLI +dotnet build src/Cli/dotnet/dotnet.csproj -c Debug + +# Copy artifacts into the dn publish directory +cp artifacts/bin/dotnet-aot/Debug//win-x64/publish/dotnet-aot.dll \ + artifacts/bin/dn/Debug//win-x64/publish/ +cp -r artifacts/bin/dotnet/Debug//* \ + artifacts/bin/dn/Debug//win-x64/publish/ +``` + +## Debugging + +Debugging this architecture requires understanding which debugger engine works +with which layer. The key constraint: **NativeAOT output is pure native code +with no IL. Only a native debugger can bind breakpoints in Layers 1 and 2.** + +### Debugger Compatibility Matrix + +| What you want to debug | Debugger engine | VS project | VS Code config | +|------------------------|-----------------|------------|----------------| +| Layer 1 (`dn.exe`) | Native | `dn-native-debug.vcxproj` | `cppvsdbg` launch config | +| Layer 2 (`dotnet-aot.dll`) | Native | `dn-native-debug.vcxproj` | `cppvsdbg` launch config | +| Layer 3 (`dotnet.dll`) | Managed or mixed-mode | `dn.csproj` launch profile | `coreclr` launch config | +| Layers 1+2+3 together | Two debugger sessions | See [Mixed-mode](#mixed-mode-debugging-visual-studio) | See [VS Code mixed](#mixed-mode-vs-code) | + +### Debugging in Visual Studio + +#### Native debugging (Layers 1 & 2) + +The `dn-native-debug.vcxproj` is a stub C++ Makefile project that exists solely +to provide an F5 launch target using the native debugger engine +(`WindowsLocalDebugger`). It performs no C++ compilation. + +1. Open the solution (`cli.slnf` or `sdk.slnx`) in Visual Studio. +2. Set **dn-native-debug** as the startup project. +3. Set breakpoints in AOT source files (`NativeEntryPoint.cs`, `ManagedHost.cs`, + `Program.cs` under `#if CLI_AOT`, etc.). +4. Press **F5**. + +The native debugger reads the PDB generated by ILC and maps C# source lines to +native addresses. Breakpoints bind correctly in all AOT-compiled code. + +> **Why not use `launchSettings.json` with `nativeDebugging: true`?** +> That flag enables *mixed-mode* debugging where the managed debugger is primary +> and a native debugger is attached as an add-on. But there is no CLR loaded yet +> in Layers 1 and 2, so the managed engine finds nothing to attach to and C# +> breakpoints in AOT code won't bind. + +#### Alternative: `devenv /debugexe` + +The build generates a `debug-dn.cmd` script that launches the published `dn.exe` +directly under Visual Studio's native debugger: + +```cmd +set DOTNET_ROOT=\.dotnet +devenv /debugexe "\dn.exe" --info +``` + +This opens a new VS instance with the native debugger attached. Set breakpoints +in the Source Files view and press F5. + +#### Managed debugging (Layer 3) + +Use the `dn.csproj` project with its generated `launchSettings.json` profile +("Debug dn (managed path)"). This profile has `nativeDebugging: true` which +enables mixed-mode, allowing the managed debugger to attach once CoreCLR loads. + +1. Set **dn** as the startup project. +2. Set breakpoints in managed source files (`Program.cs` under the non-AOT path, + command implementations, etc.). +3. Press **F5**. + +Breakpoints in managed code bind after `hostfxr_run_app` loads CoreCLR and +begins executing `dotnet.dll`. + +#### Mixed-mode debugging (Visual Studio) + +To debug across all three layers in a single session: + +```mermaid +flowchart LR + subgraph VS["Visual Studio"] + Native["Native debugger
(via dn-native-debug)"] + Managed["Managed debugger
(attaches when CLR loads)"] + end + + Native --> L1L2["Layers 1 & 2
AOT code"] + Managed --> L3["Layer 3
Managed code"] + + L1L2 -->|"hostfxr loads CLR"| L3 +``` + +1. Set **dn-native-debug** as startup project and press F5 (native debugger). +2. When execution reaches `ManagedHost.RunApp()` and the CLR is loaded, use + **Debug → Attach to Process** to attach the managed debugger to the same + process. + +Alternatively, the AOT bridge automatically detects a native debugger and sets +`DOTNET_LAUNCH_MANAGED_DEBUGGER=1`, which signals the managed code to call +`Debugger.Launch()` — prompting you to attach a managed debugger at CLR startup. + +### Debugging in VS Code + +#### Native debugging (Layers 1 & 2) + +Use the C/C++ extension (`ms-vscode.cpptools`) with a `cppvsdbg` (Windows) or +`cppdbg` (Linux/macOS) launch configuration: + +```jsonc +{ + "name": "Debug dn (native)", + "type": "cppvsdbg", // Windows; use "cppdbg" on Linux/macOS + "request": "launch", + "program": "${workspaceFolder}/artifacts/bin/dn/Debug//win-x64/publish/dn.exe", + "args": ["--info"], + "cwd": "${workspaceFolder}/artifacts/bin/dn/Debug//win-x64/publish", + "environment": [ + { "name": "DOTNET_ROOT", "value": "${workspaceFolder}/.dotnet" } + ], + "symbolSearchPath": "${workspaceFolder}/artifacts/bin/dn/Debug//win-x64/publish" +} +``` + +Set breakpoints in any AOT-compiled source file. The native debugger reads the +ILC-generated PDB/DWARF symbols and binds them. + +#### Managed debugging (Layer 3) + +Use the C# extension (`ms-dotnettools.csharp`) with a `coreclr` launch +configuration. Point it at the published `dn.exe` so it can attach once +CoreCLR loads: + +```jsonc +{ + "name": "Debug dn (managed)", + "type": "coreclr", + "request": "launch", + "program": "${workspaceFolder}/artifacts/bin/dn/Debug//win-x64/publish/dn.exe", + "args": ["build"], + "cwd": "${workspaceFolder}", + "env": { + "DOTNET_ROOT": "${workspaceFolder}/.dotnet" + } +} +``` + +> **Caveat**: The managed debugger will not break on anything until CoreCLR is +> loaded by `hostfxr`. Breakpoints in Layers 1 and 2 will be skipped silently. + +#### Mixed-mode (VS Code) + +VS Code does not support true mixed-mode debugging in a single session. The +workaround is to run two separate debug sessions: + +1. Launch with `cppvsdbg` for native breakpoints in Layers 1 & 2. +2. Separately, attach with `coreclr` after the CLR loads for Layer 3 breakpoints. + +Use the `DOTNET_LAUNCH_MANAGED_DEBUGGER` mechanism: the AOT bridge detects the +native debugger and sets the environment variable, causing the managed path to +call `Debugger.Launch()`. This gives you a window to attach the managed debugger. + +### Debugger Detection + +The AOT bridge (`NativeEntryPoint.cs`) detects whether a native debugger is +attached before falling through to the managed path: + +```mermaid +flowchart TD + Check{"Native debugger
attached?"} + Win["Windows: IsDebuggerPresent()"] + Linux["Linux: /proc/self/status
TracerPid != 0"] + SetEnv["Set DOTNET_LAUNCH_MANAGED_DEBUGGER=1"] + Continue["Continue to managed fallback"] + + Check --> Win + Check --> Linux + Win -- "true" --> SetEnv + Linux -- "true" --> SetEnv + Win -- "false" --> Continue + Linux -- "false" --> Continue + SetEnv --> Continue +``` + +When the managed CLI starts and sees `DOTNET_LAUNCH_MANAGED_DEBUGGER=1`, it +calls `System.Diagnostics.Debugger.Launch()`, which triggers the JIT debugger +dialog (or auto-attaches in configured environments). + +## Limitations and Caveats + +### AOT Layer Limitations + +- **No reflection** — AOT code cannot use unbounded reflection. The AOT parser + must be manually maintained. +- **No dynamic loading** — Assemblies cannot be loaded at runtime in AOT layers. +- **Limited exception inspection** — In the native debugger, managed exception + types appear with mangled names (e.g., `S_P_CoreLib_System_Exception`). + Inspecting exception messages requires casting pointers manually. +- **No Edit and Continue** — Not available for AOT-compiled code. +- **No Hot Reload** — Not available for AOT-compiled code. + +### Debugging Limitations + +- **No single-session mixed-mode in VS Code** — Must use two debugger sessions. +- **Managed breakpoints don't bind in AOT code** — The managed debugger engine + (`coreclr`) cannot see code that has no IL. Breakpoints set via the managed + debugger in files compiled by Layer 2 will not hit. +- **Breakpoint binding delay for Layer 3** — Managed breakpoints only bind after + `hostfxr_run_app` loads CoreCLR. Before that, they appear as hollow circles. +- **Generic type inspection** — Generic types in AOT code have mangled names + that include instantiation info, making Watch/Locals windows harder to read. +- **Stepping across the hosting boundary** — You cannot seamlessly step from + AOT code into managed code. The hostfxr call is opaque; you need to set a + breakpoint on the managed side and continue. + +### Platform-Specific Notes + +| Platform | Native Debugger | Symbol Format | Notes | +|----------|-----------------|---------------|-------| +| Windows | `cppvsdbg` / WinDbg | `.pdb` (native) | Full VS integration via vcxproj | +| Linux | `cppdbg` (gdb/lldb) | DWARF (`.dbg`) | Ensure `.dbg` is alongside binary | +| macOS | `cppdbg` (lldb) | `.dSYM` directory | `dsymutil` runs automatically | + +## Future Work + +- **Muxer integration** — The muxer's `try_invoke_aot_sdk` function + ([dotnet/runtime#126171](https://github.com/dotnet/runtime/issues/126171)) + already calls `dotnet_execute` from the resolved SDK directory, passing + `host_path`, `dotnet_root`, `sdk_dir`, and `hostfxr_path`. `dn.exe` + emulates this same contract for local development and testing. +- **Remove AOT commands from managed package** — After the AOT path is + validated and shipping, the `#if CLI_AOT` implementations in `Parser.cs` + and `Program.cs` can be removed from the managed `dotnet.dll` build. +- **Expand AOT-handled commands** — Move more commands into the AOT parser to + reduce fallback frequency. +- **Async managed host initialization** — Start loading CoreCLR while parsing + to hide runtime startup latency on fallback paths. +- **Single-binary distribution** — Explore embedding `dotnet-aot.dll` as a + static library linked directly into `dn.exe` (or the muxer). diff --git a/src/Cli/dotnet-aot/ManagedHost.cs b/src/Cli/dotnet-aot/ManagedHost.cs new file mode 100644 index 000000000000..ed6905aa7373 --- /dev/null +++ b/src/Cli/dotnet-aot/ManagedHost.cs @@ -0,0 +1,222 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using Microsoft.DotNet.NativeWrapper; + +namespace Microsoft.DotNet.Cli; + +/// +/// Hosts the .NET runtime and provides the ability to load and invoke managed assemblies. +/// Designed to be initialized asynchronously so the runtime is ready when needed. +/// +internal sealed unsafe class ManagedHost : IDisposable +{ + private nint _hostContextHandle; + private delegate* unmanaged[Cdecl] _loadAssemblyAndGetFunctionPointer; + private Task? _initTask; + private readonly string _runtimeConfigPath; + private readonly string _dotnetRoot; + private bool _disposed; + + /// + /// Creates a new for the given runtime config. + /// + /// Path to the .runtimeconfig.json file. + /// Path to the .NET installation root. + public ManagedHost(string runtimeConfigPath, string dotnetRoot) + { + _runtimeConfigPath = runtimeConfigPath; + _dotnetRoot = dotnetRoot; + } + + /// + /// Starts initializing the .NET runtime asynchronously. + /// + public void StartInitialization() + { + _initTask = Task.Run(Initialize); + } + + /// + /// Ensures the runtime is initialized, blocking if necessary. + /// + public void EnsureInitialized() + { + if (_initTask is null) + { + Initialize(); + } + else + { + _initTask.GetAwaiter().GetResult(); + } + } + + /// + /// Loads the specified assembly and invokes an [UnmanagedCallersOnly] method + /// with the default component entry point signature: int fn(nint args, int sizeBytes). + /// + /// Full path to the managed assembly. + /// Fully qualified type name (e.g., "Microsoft.DotNet.Cli.Program, dotnet"). + /// Method name to invoke. + /// The exit code returned by the managed method. + public int InvokeMethod(string assemblyPath, string typeName, string methodName) + { + EnsureInitialized(); + + if (_loadAssemblyAndGetFunctionPointer is null) + { + throw new InvalidOperationException("Runtime initialization failed - load_assembly_and_get_function_pointer is not available."); + } + + nint functionPointer; + + nint assemblyPathNative = PlatformStringMarshaller.ConvertToUnmanaged(assemblyPath); + nint typeNameNative = PlatformStringMarshaller.ConvertToUnmanaged(typeName); + nint methodNameNative = PlatformStringMarshaller.ConvertToUnmanaged(methodName); + + try + { + // delegate_type_name = -1 means UNMANAGEDCALLERSONLY_METHOD, reserved = 0 + int result = _loadAssemblyAndGetFunctionPointer( + assemblyPathNative, + typeNameNative, + methodNameNative, + -1, + 0, + (nint)(&functionPointer)); + + if (result != 0) + { + throw new InvalidOperationException($"Failed to load assembly and get function pointer. HRESULT: 0x{result:X8}"); + } + } + finally + { + PlatformStringMarshaller.Free(assemblyPathNative); + PlatformStringMarshaller.Free(typeNameNative); + PlatformStringMarshaller.Free(methodNameNative); + } + + var entryPoint = (delegate* unmanaged[Cdecl])functionPointer; + return entryPoint(0, 0); + } + + /// + /// Runs the managed application using the hostfxr command-line hosting path. + /// This is the simplest way to invoke dotnet.dll Program.Main(args). + /// + /// Path to the host executable (e.g., dotnet.exe). + /// Path to the .NET installation root. + /// Path to the hostfxr library. + /// Command-line arguments (first element should be the app path). + /// The application exit code. + public static int RunApp(string hostPath, string dotnetRoot, string hostfxrPath, string[] args) + { + var parameters = new Interop.hostfxr_initialize_parameters + { + size = sizeof(Interop.hostfxr_initialize_parameters), + host_path = PlatformStringMarshaller.ConvertToUnmanaged(hostPath), + dotnet_root = PlatformStringMarshaller.ConvertToUnmanaged(dotnetRoot), + }; + + try + { + StatusCode result = Interop.hostfxr_initialize_for_dotnet_command_line( + args.Length, + args, + in parameters, + out nint handle); + + if (result != StatusCode.Success && handle == 0) + { + throw new InvalidOperationException($"hostfxr_initialize_for_dotnet_command_line failed. Status: {result} (0x{(uint)result:X8})"); + } + + try + { + // Set HOSTFXR_PATH property to match the muxer's behavior for SDK commands. + // The muxer sets this when is_sdk_command=true so the SDK can load hostfxr + // without relying on dlopen/LoadLibrary to find it. + if (!string.IsNullOrEmpty(hostfxrPath)) + { + StatusCode propertyResult = Interop.hostfxr_set_runtime_property_value( + handle, Constants.RuntimeProperty.HostFxrPath, hostfxrPath); + if (propertyResult != StatusCode.Success) + { + throw new InvalidOperationException( + $"hostfxr_set_runtime_property_value failed for {Constants.RuntimeProperty.HostFxrPath}. Status: {propertyResult} (0x{(uint)propertyResult:X8})"); + } + } + + StatusCode appResult = Interop.hostfxr_run_app(handle); + return (int)appResult; + } + finally + { + Interop.hostfxr_close(handle); + } + } + finally + { + PlatformStringMarshaller.Free(parameters.host_path); + PlatformStringMarshaller.Free(parameters.dotnet_root); + } + } + + private void Initialize() + { + nint dotnetRootNative = PlatformStringMarshaller.ConvertToUnmanaged(_dotnetRoot); + nint runtimeConfigPathNative = PlatformStringMarshaller.ConvertToUnmanaged(_runtimeConfigPath); + + try + { + var parameters = new Interop.hostfxr_initialize_parameters + { + size = sizeof(Interop.hostfxr_initialize_parameters), + dotnet_root = dotnetRootNative, + }; + + StatusCode result = Interop.hostfxr_initialize_for_runtime_config( + runtimeConfigPathNative, + in parameters, + out _hostContextHandle); + + if (result != StatusCode.Success && _hostContextHandle == 0) + { + throw new InvalidOperationException($"hostfxr_initialize_for_runtime_config failed. Status: {result} (0x{(uint)result:X8})"); + } + + nint loadAssemblyDelegate; + result = Interop.hostfxr_get_runtime_delegate( + _hostContextHandle, + Interop.hostfxr_delegate_type.hdt_load_assembly_and_get_function_pointer, + out loadAssemblyDelegate); + + if (result != StatusCode.Success) + { + Interop.hostfxr_close(_hostContextHandle); + _hostContextHandle = 0; + throw new InvalidOperationException($"hostfxr_get_runtime_delegate failed. Status: {result} (0x{(uint)result:X8})"); + } + + _loadAssemblyAndGetFunctionPointer = + (delegate* unmanaged[Cdecl])loadAssemblyDelegate; + } + finally + { + PlatformStringMarshaller.Free(dotnetRootNative); + PlatformStringMarshaller.Free(runtimeConfigPathNative); + } + } + + public void Dispose() + { + if (!_disposed && _hostContextHandle != 0) + { + Interop.hostfxr_close(_hostContextHandle); + _hostContextHandle = 0; + _disposed = true; + } + } +} diff --git a/src/Cli/dotnet-aot/NativeEntryPoint.cs b/src/Cli/dotnet-aot/NativeEntryPoint.cs new file mode 100644 index 000000000000..dc71475d4dbf --- /dev/null +++ b/src/Cli/dotnet-aot/NativeEntryPoint.cs @@ -0,0 +1,64 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using Microsoft.DotNet.NativeWrapper; + +namespace Microsoft.DotNet.Cli; + +static unsafe partial class NativeEntryPoint +{ + [UnmanagedCallersOnly(EntryPoint = "dotnet_execute")] + static int Execute( + nint hostPathPtr, // const char_t* host_path + nint dotnetRootPtr, // const char_t* dotnet_root + nint sdkDirPtr, // const char_t* sdk_dir + nint hostfxrPathPtr, // const char_t* hostfxr_path + int argc, // int argc (user args, no dotnet exe) + nint argvPtr) // const char_t** argv + { + string hostPath = PlatformStringMarshaller.ConvertToManaged(hostPathPtr) ?? string.Empty; + string dotnetRoot = PlatformStringMarshaller.ConvertToManaged(dotnetRootPtr) ?? string.Empty; + string sdkDir = PlatformStringMarshaller.ConvertToManaged(sdkDirPtr) ?? string.Empty; + string hostfxrPath = PlatformStringMarshaller.ConvertToManaged(hostfxrPathPtr) ?? string.Empty; + + // Make hostfxr discoverable for NativeWrapper P/Invokes (required on non-Windows) + if (!string.IsNullOrEmpty(hostfxrPath)) + { + AppContext.SetData("HOSTFXR_PATH", hostfxrPath); + } + + string[] args = new string[argc]; + nint* argv = (nint*)argvPtr; + for (int i = 0; i < argc; i++) + { + args[i] = PlatformStringMarshaller.ConvertToManaged(argv[i]) ?? string.Empty; + } + + // Try the AOT-compiled path for supported commands (if enabled) + if (EnvironmentVariableParser.ParseBool(Environment.GetEnvironmentVariable(EnvironmentVariableNames.DOTNET_CLI_ENABLEAOT), defaultValue: false)) + { + var parseResult = Parser.Parse(args); + if (parseResult.Errors.Count == 0) + { + return Parser.Invoke(parseResult); + } + } + + // Fall back to the fully managed dotnet CLI by hosting .NET + string dotnetDll = Path.Join(sdkDir, "dotnet.dll"); + string runtimeConfig = Path.Join(sdkDir, "dotnet.runtimeconfig.json"); + + if (File.Exists(dotnetDll) && File.Exists(runtimeConfig)) + { + // Use the command-line hosting path to run dotnet.dll + string[] appArgs = new string[args.Length + 1]; + appArgs[0] = dotnetDll; + Array.Copy(args, 0, appArgs, 1, args.Length); + return ManagedHost.RunApp(hostPath, dotnetRoot, hostfxrPath, appArgs); + } + + // No managed fallback available + Console.Error.WriteLine($"The managed fallback could not be located. Expected '{dotnetDll}' and '{runtimeConfig}'."); + return 1; + } +} diff --git a/src/Cli/dotnet-aot/dotnet-aot.csproj b/src/Cli/dotnet-aot/dotnet-aot.csproj new file mode 100644 index 000000000000..87ccce2bf072 --- /dev/null +++ b/src/Cli/dotnet-aot/dotnet-aot.csproj @@ -0,0 +1,46 @@ + + + + $(SdkTargetFramework) + dotnet-aot + Library + Shared + true + true + false + false + MicrosoftAspNetCore + true + true + Microsoft.DotNet.Cli + $(DefineConstants);EXCLUDE_ASPNETCORE + $(DefineConstants);DotnetCsproj;CLI_AOT + false + true + + + $(NoWarn);CS8002 + + + $(NoWarn);CA1416 + + + + + + + + + + + + + + + + + + + + + diff --git a/src/Cli/dotnet/CommandFactory/CommandResolution/MuxerCommandSpecMaker.cs b/src/Cli/dotnet/CommandFactory/CommandResolution/MuxerCommandSpecMaker.cs index 9a958f15d5e7..0473f8c55a9a 100644 --- a/src/Cli/dotnet/CommandFactory/CommandResolution/MuxerCommandSpecMaker.cs +++ b/src/Cli/dotnet/CommandFactory/CommandResolution/MuxerCommandSpecMaker.cs @@ -19,7 +19,7 @@ internal static CommandSpec CreatePackageCommandSpecUsingMuxer(string commandPat arguments.Add(commandPath); var filteredCommandArgs = rollForwardArgument.Any() - ? commandArguments.Except(rollForwardArgument) + ? commandArguments.Where(arg => !arg.Equals("--allow-roll-forward", StringComparison.OrdinalIgnoreCase)) : commandArguments; arguments.AddRange(filteredCommandArgs); diff --git a/src/Cli/dotnet/CommandLineInfo.cs b/src/Cli/dotnet/CommandLineInfo.cs index b1a4cd319ae5..b4b99b807711 100644 --- a/src/Cli/dotnet/CommandLineInfo.cs +++ b/src/Cli/dotnet/CommandLineInfo.cs @@ -3,10 +3,14 @@ #nullable disable +#if !CLI_AOT using Microsoft.DotNet.Cli.Commands.Workload; +#endif using Microsoft.DotNet.Cli.Utils; +#if !CLI_AOT using LocalizableStrings = Microsoft.DotNet.Cli.Utils.LocalizableStrings; using RuntimeEnvironment = Microsoft.DotNet.Cli.Utils.RuntimeEnvironment; +#endif namespace Microsoft.DotNet.Cli; @@ -21,21 +25,36 @@ public static void PrintInfo() { DotnetVersionFile versionFile = DotnetFiles.VersionFileObject; var commitSha = versionFile.CommitSha ?? "N/A"; +#if CLI_AOT + Reporter.Output.WriteLine(".NET SDK:"); +#else Reporter.Output.WriteLine($"{LocalizableStrings.DotNetSdkInfoLabel}"); +#endif Reporter.Output.WriteLine($" Version: {Product.Version}"); Reporter.Output.WriteLine($" Commit: {commitSha}"); +#if !CLI_AOT Reporter.Output.WriteLine($" Workload version: {WorkloadInfoHelper.GetWorkloadsVersion()}"); Reporter.Output.WriteLine($" MSBuild version: {MSBuildForwardingAppWithoutLogging.MSBuildVersion}"); +#endif Reporter.Output.WriteLine(); +#if CLI_AOT + Reporter.Output.WriteLine("Runtime Environment:"); + Reporter.Output.WriteLine($" OS Name: {System.Runtime.InteropServices.RuntimeInformation.OSDescription}"); + Reporter.Output.WriteLine($" OS Platform: {(OperatingSystem.IsWindows() ? "Windows" : OperatingSystem.IsMacOS() ? "Darwin" : "Linux")}"); +#else Reporter.Output.WriteLine($"{LocalizableStrings.DotNetRuntimeInfoLabel}"); Reporter.Output.WriteLine($" OS Name: {RuntimeEnvironment.OperatingSystem}"); Reporter.Output.WriteLine($" OS Version: {RuntimeEnvironment.OperatingSystemVersion}"); Reporter.Output.WriteLine($" OS Platform: {RuntimeEnvironment.OperatingSystemPlatform}"); - Reporter.Output.WriteLine($" RID: {GetDisplayRid(versionFile)}"); +#endif + Reporter.Output.WriteLine($" RID: {RuntimeInformation.RuntimeIdentifier}"); Reporter.Output.WriteLine($" Base Path: {AppContext.BaseDirectory}"); +#if !CLI_AOT PrintWorkloadsInfo(); +#endif } +#if !CLI_AOT private static void PrintWorkloadsInfo() { Reporter.Output.WriteLine(); @@ -55,4 +74,5 @@ private static string GetDisplayRid(DotnetVersionFile versionFile) currentRid : versionFile.BuildRid; } +#endif } diff --git a/src/Cli/dotnet/Commands/CliCommandStrings.resx b/src/Cli/dotnet/Commands/CliCommandStrings.resx index b0aa3faeb66b..f0b4b0883ff6 100644 --- a/src/Cli/dotnet/Commands/CliCommandStrings.resx +++ b/src/Cli/dotnet/Commands/CliCommandStrings.resx @@ -1564,6 +1564,10 @@ Tool '{1}' (version '{2}') was successfully installed. Entry is added to the man No - keep all source files + + Multiple referenced files would be converted into the same directory: '{0}' + {0} is the target directory path. + PROJECT_MANIFEST @@ -1729,6 +1733,22 @@ The default is to publish a framework-dependent application. Warning: Settings from '{0}' are not used because '{1}' has precedence. {0} is an app.run.json file path. {1} is a launchSettings.json file path.
+ + Warning: '{0}' appears to be a file-based app but was passed as an argument to the project '{1}'. To run it as a file-based app, use 'dotnet run --file {0}'. To pass it as an application argument, use 'dotnet run -- {0}' to suppress this warning. + {0} is the file path argument. {1} is the project file path.{Locked="dotnet run --file"}{Locked="dotnet run --"} + + + Warning: '{0}' looks like a file-based app but the file was not found, and it was passed as an argument to the project '{1}'. To pass it as an application argument, use 'dotnet run -- {0}' to suppress this warning. + {0} is the .cs file path argument. {1} is the project file path.{Locked="dotnet run --"} + + + Warning: '{0}' appears to be a file-based app but was treated as an MSBuild argument. To treat it as a file-based app, use 'dotnet {1} {0}'. + {0} is the file path argument. {1} is the command name (e.g. build, clean, publish).{Locked="dotnet"} + + + Warning: '{0}' looks like a file-based app but the file was not found, and it was treated as an MSBuild argument. + {0} is the .cs file path argument. + Specify which project file to use because {0} contains more than one project file. diff --git a/src/Cli/dotnet/Commands/DotNetCommandFactory.cs b/src/Cli/dotnet/Commands/DotNetCommandFactory.cs index 56d8f1dc4f59..c122888235f5 100644 --- a/src/Cli/dotnet/Commands/DotNetCommandFactory.cs +++ b/src/Cli/dotnet/Commands/DotNetCommandFactory.cs @@ -6,8 +6,10 @@ using System.Diagnostics; using Microsoft.DotNet.Cli.CommandFactory; using Microsoft.DotNet.Cli.CommandLine; +using Microsoft.DotNet.Cli.Commands; using Microsoft.DotNet.Cli.Commands.Run; using Microsoft.DotNet.Cli.Utils; +using Microsoft.DotNet.Cli.Utils.Extensions; using Microsoft.DotNet.ProjectTools; using NuGet.Frameworks; @@ -72,6 +74,26 @@ internal static CommandBase CreateVirtualOrPhysicalCommand( } else { + // Warn if any argument looks like a file-based program entry point but we're falling back to MSBuild. + // This can happen when extra positional arguments prevent the single-arg file-based path from being taken, + // or when a .cs file doesn't exist (so IsValidEntryPointPath returns false). + foreach (var candidate in nonBinLogArgs) + { + if (VirtualProjectBuilder.IsValidEntryPointPath(candidate)) + { + Reporter.Error.WriteLine( + string.Format(CliCommandStrings.WarningFileArgumentPassedToMSBuild, candidate, commandDefinition.Name).Yellow()); + break; + } + + if (candidate.EndsWith(".cs", StringComparison.OrdinalIgnoreCase)) + { + Reporter.Error.WriteLine( + string.Format(CliCommandStrings.WarningCsFileArgumentPassedToMSBuild, candidate).Yellow()); + break; + } + } + var msbuildArgs = MSBuildArgs.AnalyzeMSBuildArguments([.. forwardedArgs, .. args], [.. optionsToUseWhenParsingMSBuildFlags]); msbuildArgs = transformer?.Invoke(msbuildArgs) ?? msbuildArgs; return createPhysicalCommand(msbuildArgs, msbuildPath); diff --git a/src/Cli/dotnet/Commands/NuGet/NuGetCommand.cs b/src/Cli/dotnet/Commands/NuGet/NuGetCommand.cs index 9a68c31b22e7..296e57ede7db 100644 --- a/src/Cli/dotnet/Commands/NuGet/NuGetCommand.cs +++ b/src/Cli/dotnet/Commands/NuGet/NuGetCommand.cs @@ -6,19 +6,36 @@ using System.CommandLine; using Microsoft.DotNet.Cli.Extensions; using Microsoft.DotNet.Cli.Utils; +using Microsoft.DotNet.ProjectTools; namespace Microsoft.DotNet.Cli.Commands.NuGet; -public class NuGetCommand +internal class NuGetCommand { - public static int Run(string[] args) + public static int Run(string[] args, bool isFileBasedApp = false) { - return Run(args, new NuGetCommandRunner()); + return Run(args, isFileBasedApp + ? new InProcessNuGetCommandRunner(NuGetVirtualProjectBuilder.Instance) + : new NuGetCommandRunner()); } public static int Run(ParseResult parseResult) { - return Run(parseResult.GetArguments(), new NuGetCommandRunner()); + ICommandRunner runner; + + if (parseResult.CommandResult.Command.Name == "why" + && parseResult.CommandResult.Command.Arguments.FirstOrDefault() is Argument pathArg + && parseResult.GetValue(pathArg) is { } path + && VirtualProjectBuilder.IsValidEntryPointPath(path)) + { + runner = new InProcessNuGetCommandRunner(NuGetVirtualProjectBuilder.Instance); + } + else + { + runner = new NuGetCommandRunner(); + } + + return Run(parseResult.GetArguments(), runner); } public static int Run(string[] args, ICommandRunner nugetCommandRunner) @@ -43,11 +60,28 @@ private class NuGetCommandRunner : ICommandRunner public int Run(string[] args) { var nugetApp = new NuGetForwardingApp(args); - nugetApp.WithEnvironmentVariable("DOTNET_HOST_PATH", GetDotnetPath()); + nugetApp.WithEnvironmentVariable(EnvironmentVariableNames.DOTNET_HOST_PATH, GetDotnetPath()); return nugetApp.Execute(); } } + private class InProcessNuGetCommandRunner(NuGetVirtualProjectBuilder virtualProjectBuilder) : ICommandRunner + { + public int Run(string[] args) + { + var originalDotNetHostPath = Environment.GetEnvironmentVariable(EnvironmentVariableNames.DOTNET_HOST_PATH); + Environment.SetEnvironmentVariable(EnvironmentVariableNames.DOTNET_HOST_PATH, GetDotnetPath()); + try + { + return global::NuGet.CommandLine.XPlat.Program.Run(args, virtualProjectBuilder); + } + finally + { + Environment.SetEnvironmentVariable(EnvironmentVariableNames.DOTNET_HOST_PATH, originalDotNetHostPath); + } + } + } + private static string GetDotnetPath() { return new Muxer().MuxerPath; diff --git a/src/Cli/dotnet/Commands/NuGet/NuGetVirtualProjectBuilder.cs b/src/Cli/dotnet/Commands/NuGet/NuGetVirtualProjectBuilder.cs new file mode 100644 index 000000000000..a37f3e2c7aa1 --- /dev/null +++ b/src/Cli/dotnet/Commands/NuGet/NuGetVirtualProjectBuilder.cs @@ -0,0 +1,47 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using Microsoft.Build.Construction; +using Microsoft.Build.Evaluation; +using Microsoft.DotNet.Cli.Commands.Package; +using Microsoft.DotNet.Cli.Commands.Run; +using Microsoft.DotNet.FileBasedPrograms; +using Microsoft.DotNet.ProjectTools; +using NuGet.CommandLine.XPlat; + +namespace Microsoft.DotNet.Cli.Commands.NuGet; + +internal sealed class NuGetVirtualProjectBuilder : IVirtualProjectBuilder +{ + public static NuGetVirtualProjectBuilder Instance => field ??= new(); + + private NuGetVirtualProjectBuilder() { } + + public bool IsValidEntryPointPath(string entryPointFilePath) => VirtualProjectBuilder.IsValidEntryPointPath(entryPointFilePath); + + public string GetVirtualProjectPath(string entryPointFilePath) => VirtualProjectBuilder.GetVirtualProjectPath(entryPointFilePath); + + public ProjectRootElement CreateProjectRootElement(string entryPointFilePath, ProjectCollection projectCollection) + { + if (!Path.IsPathFullyQualified(entryPointFilePath)) + { + throw new ArgumentException($"'{entryPointFilePath}' is not a fully qualified path.", paramName: nameof(entryPointFilePath)); + } + + var builder = new VirtualProjectBuilder(entryPointFilePath, VirtualProjectBuildingCommand.TargetFramework); + + builder.CreateProjectInstance( + projectCollection, + ErrorReporters.IgnoringReporter, + project: out _, + out var projectRootElement, + evaluatedDirectives: out _); + + return projectRootElement; + } + + public void SaveProject(string entryPointFilePath, ProjectRootElement projectRootElement) + { + VirtualProjectPackageReflector.ReflectChangesToDirectives(projectRootElement, entryPointFilePath); + } +} diff --git a/src/Cli/dotnet/Commands/Package/Add/PackageAddCommand.cs b/src/Cli/dotnet/Commands/Package/Add/PackageAddCommand.cs index 7c2bbed46ac4..ceba7066a361 100644 --- a/src/Cli/dotnet/Commands/Package/Add/PackageAddCommand.cs +++ b/src/Cli/dotnet/Commands/Package/Add/PackageAddCommand.cs @@ -3,17 +3,12 @@ using System.CommandLine; using System.Diagnostics; -using Microsoft.Build.Construction; -using Microsoft.Build.Evaluation; -using Microsoft.CodeAnalysis; using Microsoft.DotNet.Cli.CommandLine; using Microsoft.DotNet.Cli.Commands.MSBuild; using Microsoft.DotNet.Cli.Commands.NuGet; using Microsoft.DotNet.Cli.Commands.Run; using Microsoft.DotNet.Cli.Utils; -using Microsoft.DotNet.FileBasedPrograms; using Microsoft.DotNet.ProjectTools; -using NuGet.ProjectModel; namespace Microsoft.DotNet.Cli.Commands.Package.Add; @@ -31,16 +26,14 @@ public override int Execute() { var (fileOrDirectory, allowedAppKinds) = PackageCommandParser.ProcessPathOptions(Definition.FileOption, Definition.ProjectOption, Definition.GetProjectOrFileArgument(), _parseResult); - if (allowedAppKinds.HasFlag(AppKinds.FileBased) && VirtualProjectBuilder.IsValidEntryPointPath(fileOrDirectory)) - { - return ExecuteForFileBasedApp(fileOrDirectory); - } + bool isFileBasedApp = allowedAppKinds.HasFlag(AppKinds.FileBased) && VirtualProjectBuilder.IsValidEntryPointPath(fileOrDirectory); - Debug.Assert(allowedAppKinds.HasFlag(AppKinds.ProjectBased)); + Debug.Assert(isFileBasedApp || allowedAppKinds.HasFlag(AppKinds.ProjectBased)); string projectFilePath; if (!File.Exists(fileOrDirectory)) { + Debug.Assert(!isFileBasedApp); projectFilePath = MsbuildProject.GetProjectFileFromDirectory(fileOrDirectory); } else @@ -48,11 +41,15 @@ public override int Execute() projectFilePath = fileOrDirectory; } + if (isFileBasedApp) + { + projectFilePath = Path.GetFullPath(projectFilePath); + } + var tempDgFilePath = string.Empty; if (!_parseResult.GetValue(Definition.NoRestoreOption)) { - try { // Create a Dependency Graph file for the project @@ -64,46 +61,69 @@ public override int Execute() throw new GracefulException(string.Format(CliCommandStrings.CmdDGFileIOException, projectFilePath), ioex); } - GetProjectDependencyGraph(projectFilePath, tempDgFilePath); + GetProjectDependencyGraph(projectFilePath, tempDgFilePath, isFileBasedApp); } - var result = NuGetCommand.Run( - TransformArgs( - _packageId, - tempDgFilePath, - projectFilePath)); + var args = TransformArgs( + _packageId, + tempDgFilePath, + projectFilePath); + + var result = NuGetCommand.Run(args, isFileBasedApp); + DisposeTemporaryFile(tempDgFilePath); return result; } - private static void GetProjectDependencyGraph(string projectFilePath, string dgFilePath) + private static void GetProjectDependencyGraph(string projectFilePath, string dgFilePath, bool isFileBasedApp) { - List args = - [ - // Pass the project file path - projectFilePath, - - // Pass the task as generate restore Dependency Graph file - "-target:GenerateRestoreGraphFile", + int result; + if (isFileBasedApp) + { + result = new VirtualProjectBuildingCommand( + projectFilePath, + MSBuildArgs + .FromProperties(new Dictionary + { + { "RestoreGraphOutputPath", dgFilePath }, + { "RestoreRecursive", "false" }, + { "RestoreDotnetCliToolReferences", "false" }, + }.AsReadOnly()) + .CloneWithVerbosity(VerbosityOptions.quiet) + .CloneWithAdditionalTargets("GenerateRestoreGraphFile")) + { + NoRestore = true, + NoCache = true, + NoWriteBuildMarkers = true, + }.Execute(); + } + else + { + result = new MSBuildForwardingApp( + [ + // Pass the project file path + projectFilePath, - // Pass Dependency Graph file output path - $"-property:RestoreGraphOutputPath=\"{dgFilePath}\"", + // Pass the task as generate restore Dependency Graph file + "-target:GenerateRestoreGraphFile", - // Turn off recursive restore - $"-property:RestoreRecursive=false", + // Pass Dependency Graph file output path + $"-property:RestoreGraphOutputPath=\"{dgFilePath}\"", - // Turn off restore for Dotnet cli tool references so that we do not generate extra dg specs - $"-property:RestoreDotnetCliToolReferences=false", + // Turn off recursive restore + "-property:RestoreRecursive=false", - // Output should not include MSBuild version header - "--nologo", + // Turn off restore for Dotnet cli tool references so that we do not generate extra dg specs + "-property:RestoreDotnetCliToolReferences=false", - // Set verbosity to quiet to avoid cluttering the output for this 'inner' build - "-v:quiet" - ]; + // Output should not include MSBuild version header + "--nologo", - var result = new MSBuildForwardingApp(args).Execute(); + // Set verbosity to quiet to avoid cluttering the output for this 'inner' build + "-v:quiet" + ]).Execute(); + } if (result != 0) { @@ -152,210 +172,4 @@ private string[] TransformArgs(PackageIdentityWithRange packageId, string tempDg return [.. args]; } - - // More logic should live in NuGet: https://github.com/NuGet/Home/issues/14390 - private int ExecuteForFileBasedApp(string path) - { - // Check disallowed options. - ReadOnlySpan