diff --git a/.github/workflows/daily-token-consumption-report.lock.yml b/.github/workflows/daily-token-consumption-report.lock.yml
index 5c4a7a8ea13..02882267501 100644
--- a/.github/workflows/daily-token-consumption-report.lock.yml
+++ b/.github/workflows/daily-token-consumption-report.lock.yml
@@ -1,5 +1,5 @@
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"1653e70cf82b1d082bc423c602fdedb85a48617878823d18be1da051d86a1345","compiler_version":"v0.68.3","strict":true,"agent_id":"claude"}
-# gh-aw-manifest: {"version":1,"secrets":["ANTHROPIC_API_KEY","GH_AW_GITHUB_MCP_SERVER_TOKEN","GH_AW_GITHUB_TOKEN","GH_AW_OTEL_ENDPOINT","GH_AW_OTEL_HEADERS","GITHUB_TOKEN","SENTRY_ACCESS_TOKEN","SENTRY_OPENAI_API_KEY"],"actions":[{"repo":"actions/checkout","sha":"de0fac2e4500dabe0009e67214ff5f5447ce83dd","version":"v6.0.2"},{"repo":"actions/download-artifact","sha":"3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c","version":"v8.0.1"},{"repo":"actions/github-script","sha":"373c709c69115d41ff229c7e5df9f8788daa9553","version":"v9"},{"repo":"actions/setup-node","sha":"53b83947a5a98c8d113130e565377fae1a50d02f","version":"v6.3.0"},{"repo":"actions/upload-artifact","sha":"043fb46d1a93c77aae656e7c1c64a875d1fc6a0a","version":"v7.0.1"},{"repo":"github/gh-aw-actions/setup","sha":"v0.68.3","version":"v0.68.3"}],"containers":[{"image":"ghcr.io/github/gh-aw-firewall/agent:0.25.20","digest":"sha256:9161f2415a3306a344aca34dd671ee69f122317e0a512e66dc64c94b9c508682","pinned_image":"ghcr.io/github/gh-aw-firewall/agent:0.25.20@sha256:9161f2415a3306a344aca34dd671ee69f122317e0a512e66dc64c94b9c508682"},{"image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.20","digest":"sha256:6971639e381e82e45134bcd333181f456df3a52cd6f818a3e3d6de068ff91519","pinned_image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.20@sha256:6971639e381e82e45134bcd333181f456df3a52cd6f818a3e3d6de068ff91519"},{"image":"ghcr.io/github/gh-aw-firewall/squid:0.25.20","digest":"sha256:5411d903f73ee597e6a084971c2adef3eb0bd405910df3ed7bf5e3d6bd58a236","pinned_image":"ghcr.io/github/gh-aw-firewall/squid:0.25.20@sha256:5411d903f73ee597e6a084971c2adef3eb0bd405910df3ed7bf5e3d6bd58a236"},{"image":"ghcr.io/github/gh-aw-mcpg:v0.2.19","digest":"sha256:44d4d8de7e6c37aaea484eba489940c52df6a0b54078ddcbc9327592d5b3c3dd","pinned_image":"ghcr.io/github/gh-aw-mcpg:v0.2.19@sha256:44d4d8de7e6c37aaea484eba489940c52df6a0b54078ddcbc9327592d5b3c3dd"},{"image":"ghcr.io/github/github-mcp-server:v0.32.0","digest":"sha256:2763823c63bcca718ce53850a1d7fcf2f501ec84028394f1b63ce7e9f4f9be28","pinned_image":"ghcr.io/github/github-mcp-server:v0.32.0@sha256:2763823c63bcca718ce53850a1d7fcf2f501ec84028394f1b63ce7e9f4f9be28"},{"image":"node:lts-alpine","digest":"sha256:d1b3b4da11eefd5941e7f0b9cf17783fc99d9c6fc34884a665f40a06dbdfc94f","pinned_image":"node:lts-alpine@sha256:d1b3b4da11eefd5941e7f0b9cf17783fc99d9c6fc34884a665f40a06dbdfc94f"}]}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"1653e70cf82b1d082bc423c602fdedb85a48617878823d18be1da051d86a1345","strict":true,"agent_id":"claude"}
+# gh-aw-manifest: {"version":1,"secrets":["ANTHROPIC_API_KEY","GH_AW_GITHUB_MCP_SERVER_TOKEN","GH_AW_GITHUB_TOKEN","GH_AW_OTEL_ENDPOINT","GH_AW_OTEL_HEADERS","GITHUB_TOKEN","SENTRY_ACCESS_TOKEN","SENTRY_OPENAI_API_KEY"],"actions":[{"repo":"actions/checkout","sha":"de0fac2e4500dabe0009e67214ff5f5447ce83dd","version":"v6.0.2"},{"repo":"actions/download-artifact","sha":"3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c","version":"v8.0.1"},{"repo":"actions/github-script","sha":"373c709c69115d41ff229c7e5df9f8788daa9553","version":"v9"},{"repo":"actions/setup-node","sha":"48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e","version":"v6.4.0"},{"repo":"actions/upload-artifact","sha":"043fb46d1a93c77aae656e7c1c64a875d1fc6a0a","version":"v7.0.1"}],"containers":[{"image":"ghcr.io/github/gh-aw-firewall/agent:0.25.28","digest":"sha256:a8834e285807654bf680154faa710d43fe4365a0868142f5c20e48c85e137a7a","pinned_image":"ghcr.io/github/gh-aw-firewall/agent:0.25.28@sha256:a8834e285807654bf680154faa710d43fe4365a0868142f5c20e48c85e137a7a"},{"image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.28","digest":"sha256:93290f2393752252911bd7c39a047f776c0b53063575e7bde4e304962a9a61cb","pinned_image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.28@sha256:93290f2393752252911bd7c39a047f776c0b53063575e7bde4e304962a9a61cb"},{"image":"ghcr.io/github/gh-aw-firewall/squid:0.25.28","digest":"sha256:844c18280f82cd1b06345eb2f4e91966b34185bfc51c9f237c3e022e848fb474","pinned_image":"ghcr.io/github/gh-aw-firewall/squid:0.25.28@sha256:844c18280f82cd1b06345eb2f4e91966b34185bfc51c9f237c3e022e848fb474"},{"image":"ghcr.io/github/gh-aw-mcpg:v0.2.30","digest":"sha256:e950e6d39f003862d33bfb8d4eb93e242d919cf6ca874b90728e5e0ea7434c6f","pinned_image":"ghcr.io/github/gh-aw-mcpg:v0.2.30@sha256:e950e6d39f003862d33bfb8d4eb93e242d919cf6ca874b90728e5e0ea7434c6f"},{"image":"ghcr.io/github/github-mcp-server:v1.0.0","digest":"sha256:d2550953f8050bc5a1c8f80d1678766f66f60bbfbcd953fdeaf661fe4269bd95","pinned_image":"ghcr.io/github/github-mcp-server:v1.0.0@sha256:d2550953f8050bc5a1c8f80d1678766f66f60bbfbcd953fdeaf661fe4269bd95"},{"image":"node:lts-alpine","digest":"sha256:d1b3b4da11eefd5941e7f0b9cf17783fc99d9c6fc34884a665f40a06dbdfc94f","pinned_image":"node:lts-alpine@sha256:d1b3b4da11eefd5941e7f0b9cf17783fc99d9c6fc34884a665f40a06dbdfc94f"}]}
# ___ _ _
# / _ \ | | (_)
# | |_| | __ _ ___ _ __ | |_ _ ___
@@ -14,7 +14,7 @@
# \ /\ / (_) | | | | ( | | | | (_) \ V V /\__ \
# \/ \/ \___/|_| |_|\_\|_| |_|\___/ \_/\_/ |___/
#
-# This file was automatically generated by gh-aw (v0.68.3). DO NOT EDIT.
+# This file was automatically generated by gh-aw. DO NOT EDIT.
#
# To update this file, edit the corresponding .md file and run:
# gh aw compile
@@ -47,22 +47,21 @@
# - actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
# - actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
# - actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9
-# - actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
+# - actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6.4.0
# - actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
-# - github/gh-aw-actions/setup@v0.68.3
#
# Container images used:
-# - ghcr.io/github/gh-aw-firewall/agent:0.25.20@sha256:9161f2415a3306a344aca34dd671ee69f122317e0a512e66dc64c94b9c508682
-# - ghcr.io/github/gh-aw-firewall/api-proxy:0.25.20@sha256:6971639e381e82e45134bcd333181f456df3a52cd6f818a3e3d6de068ff91519
-# - ghcr.io/github/gh-aw-firewall/squid:0.25.20@sha256:5411d903f73ee597e6a084971c2adef3eb0bd405910df3ed7bf5e3d6bd58a236
-# - ghcr.io/github/gh-aw-mcpg:v0.2.19@sha256:44d4d8de7e6c37aaea484eba489940c52df6a0b54078ddcbc9327592d5b3c3dd
-# - ghcr.io/github/github-mcp-server:v0.32.0@sha256:2763823c63bcca718ce53850a1d7fcf2f501ec84028394f1b63ce7e9f4f9be28
+# - ghcr.io/github/gh-aw-firewall/agent:0.25.28@sha256:a8834e285807654bf680154faa710d43fe4365a0868142f5c20e48c85e137a7a
+# - ghcr.io/github/gh-aw-firewall/api-proxy:0.25.28@sha256:93290f2393752252911bd7c39a047f776c0b53063575e7bde4e304962a9a61cb
+# - ghcr.io/github/gh-aw-firewall/squid:0.25.28@sha256:844c18280f82cd1b06345eb2f4e91966b34185bfc51c9f237c3e022e848fb474
+# - ghcr.io/github/gh-aw-mcpg:v0.2.30@sha256:e950e6d39f003862d33bfb8d4eb93e242d919cf6ca874b90728e5e0ea7434c6f
+# - ghcr.io/github/github-mcp-server:v1.0.0@sha256:d2550953f8050bc5a1c8f80d1678766f66f60bbfbcd953fdeaf661fe4269bd95
# - node:lts-alpine@sha256:d1b3b4da11eefd5941e7f0b9cf17783fc99d9c6fc34884a665f40a06dbdfc94f
name: "Daily Token Consumption Report (Sentry OTel)"
"on":
schedule:
- - cron: "15 20 * * 1-5"
+ - cron: "34 10 * * 1-5"
# Friendly format: daily on weekdays (scattered)
workflow_dispatch:
inputs:
@@ -99,9 +98,16 @@ jobs:
setup-trace-id: ${{ steps.setup.outputs.trace-id }}
stale_lock_file_failed: ${{ steps.check-lock-file.outputs.stale_lock_file_failed == 'true' }}
steps:
+ - name: Checkout actions folder
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
+ with:
+ repository: github/gh-aw
+ sparse-checkout: |
+ actions
+ persist-credentials: false
- name: Setup Scripts
id: setup
- uses: github/gh-aw-actions/setup@v0.68.3
+ uses: ./actions/setup
with:
destination: ${{ runner.temp }}/gh-aw/actions
job-name: ${{ github.job }}
@@ -113,16 +119,15 @@ jobs:
GH_AW_INFO_ENGINE_ID: "claude"
GH_AW_INFO_ENGINE_NAME: "Claude Code"
GH_AW_INFO_MODEL: ${{ vars.GH_AW_MODEL_AGENT_CLAUDE || 'auto' }}
- GH_AW_INFO_VERSION: "2.1.98"
- GH_AW_INFO_AGENT_VERSION: "2.1.98"
- GH_AW_INFO_CLI_VERSION: "v0.68.3"
+ GH_AW_INFO_VERSION: "2.1.112"
+ GH_AW_INFO_AGENT_VERSION: "2.1.112"
GH_AW_INFO_WORKFLOW_NAME: "Daily Token Consumption Report (Sentry OTel)"
GH_AW_INFO_EXPERIMENTAL: "false"
GH_AW_INFO_SUPPORTS_TOOLS_ALLOWLIST: "true"
GH_AW_INFO_STAGED: "false"
GH_AW_INFO_ALLOWED_DOMAINS: '["defaults"]'
GH_AW_INFO_FIREWALL_ENABLED: "true"
- GH_AW_INFO_AWF_VERSION: "v0.25.20"
+ GH_AW_INFO_AWF_VERSION: "v0.25.28"
GH_AW_INFO_AWMG_VERSION: ""
GH_AW_INFO_FIREWALL_TYPE: "squid"
GH_AW_COMPILED_STRICT: "true"
@@ -145,8 +150,20 @@ jobs:
sparse-checkout: |
.github
.agents
+ actions/setup
+ .claude
+ .codex
+ .crush
+ .gemini
+ .opencode
sparse-checkout-cone-mode: true
fetch-depth: 1
+ - name: Save agent config folders for base branch restoration
+ env:
+ GH_AW_AGENT_FOLDERS: ".agents .claude .codex .crush .gemini .github .opencode"
+ GH_AW_AGENT_FILES: ".crush.json AGENTS.md CLAUDE.md GEMINI.md opencode.jsonc"
+ # poutine:ignore untrusted_checkout_exec
+ run: bash "${RUNNER_TEMP}/gh-aw/actions/save_base_github_folders.sh"
- name: Check workflow lock file
id: check-lock-file
uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9
@@ -159,16 +176,6 @@ jobs:
setupGlobals(core, github, context, exec, io, getOctokit);
const { main } = require('${{ runner.temp }}/gh-aw/actions/check_workflow_timestamp_api.cjs');
await main();
- - name: Check compile-agentic version
- uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9
- env:
- GH_AW_COMPILED_VERSION: "v0.68.3"
- with:
- script: |
- const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
- setupGlobals(core, github, context, exec, io, getOctokit);
- const { main } = require('${{ runner.temp }}/gh-aw/actions/check_version_updates.cjs');
- await main();
- name: Create prompt with built-in context
env:
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
@@ -196,6 +203,9 @@ jobs:
Tools: create_issue, missing_tool, missing_data, noop
+ GH_AW_PROMPT_3edd0ce42e733bd2_EOF
+ cat "${RUNNER_TEMP}/gh-aw/prompts/mcp_cli_tools_prompt.md"
+ cat << 'GH_AW_PROMPT_3edd0ce42e733bd2_EOF'
The following GitHub context information is available for this workflow:
{{#if __GH_AW_GITHUB_ACTOR__ }}
@@ -259,6 +269,7 @@ jobs:
GH_AW_GITHUB_REPOSITORY: ${{ github.repository }}
GH_AW_GITHUB_RUN_ID: ${{ github.run_id }}
GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }}
+ GH_AW_MCP_CLI_SERVERS_LIST: '- `safeoutputs` — run `safeoutputs --help` to see available tools'
with:
script: |
const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
@@ -277,7 +288,8 @@ jobs:
GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: process.env.GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER,
GH_AW_GITHUB_REPOSITORY: process.env.GH_AW_GITHUB_REPOSITORY,
GH_AW_GITHUB_RUN_ID: process.env.GH_AW_GITHUB_RUN_ID,
- GH_AW_GITHUB_WORKSPACE: process.env.GH_AW_GITHUB_WORKSPACE
+ GH_AW_GITHUB_WORKSPACE: process.env.GH_AW_GITHUB_WORKSPACE,
+ GH_AW_MCP_CLI_SERVERS_LIST: process.env.GH_AW_MCP_CLI_SERVERS_LIST
}
});
- name: Validate prompt placeholders
@@ -299,6 +311,7 @@ jobs:
/tmp/gh-aw/aw_info.json
/tmp/gh-aw/aw-prompts/prompt.txt
/tmp/gh-aw/github_rate_limits.jsonl
+ /tmp/gh-aw/base
if-no-files-found: ignore
retention-days: 1
@@ -327,9 +340,16 @@ jobs:
output_types: ${{ steps.collect_output.outputs.output_types }}
setup-trace-id: ${{ steps.setup.outputs.trace-id }}
steps:
+ - name: Checkout actions folder
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
+ with:
+ repository: github/gh-aw
+ sparse-checkout: |
+ actions
+ persist-credentials: false
- name: Setup Scripts
id: setup
- uses: github/gh-aw-actions/setup@v0.68.3
+ uses: ./actions/setup
with:
destination: ${{ runner.temp }}/gh-aw/actions
job-name: ${{ github.job }}
@@ -349,7 +369,7 @@ jobs:
with:
persist-credentials: false
- name: Setup Node.js
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
+ uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6.4.0
with:
node-version: '24'
package-manager-cache: false
@@ -387,14 +407,14 @@ jobs:
const { main } = require('${{ runner.temp }}/gh-aw/actions/checkout_pr_branch.cjs');
await main();
- name: Setup Node.js
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
+ uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6.4.0
with:
node-version: '24'
package-manager-cache: false
- name: Install AWF binary
- run: bash "${RUNNER_TEMP}/gh-aw/actions/install_awf_binary.sh" v0.25.20
+ run: bash "${RUNNER_TEMP}/gh-aw/actions/install_awf_binary.sh" v0.25.28
- name: Install Claude Code CLI
- run: npm install --ignore-scripts -g @anthropic-ai/claude-code@2.1.98
+ run: npm install --ignore-scripts -g @anthropic-ai/claude-code@2.1.112
- name: Determine automatic lockdown mode for GitHub MCP Server
id: determine-automatic-lockdown
uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9
@@ -406,7 +426,7 @@ jobs:
const determineAutomaticLockdown = require('${{ runner.temp }}/gh-aw/actions/determine_automatic_lockdown.cjs');
await determineAutomaticLockdown(github, context, core);
- name: Download container images
- run: bash "${RUNNER_TEMP}/gh-aw/actions/download_docker_images.sh" ghcr.io/github/gh-aw-firewall/agent:0.25.20@sha256:9161f2415a3306a344aca34dd671ee69f122317e0a512e66dc64c94b9c508682 ghcr.io/github/gh-aw-firewall/api-proxy:0.25.20@sha256:6971639e381e82e45134bcd333181f456df3a52cd6f818a3e3d6de068ff91519 ghcr.io/github/gh-aw-firewall/squid:0.25.20@sha256:5411d903f73ee597e6a084971c2adef3eb0bd405910df3ed7bf5e3d6bd58a236 ghcr.io/github/gh-aw-mcpg:v0.2.19@sha256:44d4d8de7e6c37aaea484eba489940c52df6a0b54078ddcbc9327592d5b3c3dd ghcr.io/github/github-mcp-server:v0.32.0@sha256:2763823c63bcca718ce53850a1d7fcf2f501ec84028394f1b63ce7e9f4f9be28 node:lts-alpine@sha256:d1b3b4da11eefd5941e7f0b9cf17783fc99d9c6fc34884a665f40a06dbdfc94f
+ run: bash "${RUNNER_TEMP}/gh-aw/actions/download_docker_images.sh" ghcr.io/github/gh-aw-firewall/agent:0.25.28@sha256:a8834e285807654bf680154faa710d43fe4365a0868142f5c20e48c85e137a7a ghcr.io/github/gh-aw-firewall/api-proxy:0.25.28@sha256:93290f2393752252911bd7c39a047f776c0b53063575e7bde4e304962a9a61cb ghcr.io/github/gh-aw-firewall/squid:0.25.28@sha256:844c18280f82cd1b06345eb2f4e91966b34185bfc51c9f237c3e022e848fb474 ghcr.io/github/gh-aw-mcpg:v0.2.30@sha256:e950e6d39f003862d33bfb8d4eb93e242d919cf6ca874b90728e5e0ea7434c6f ghcr.io/github/github-mcp-server:v1.0.0@sha256:d2550953f8050bc5a1c8f80d1678766f66f60bbfbcd953fdeaf661fe4269bd95 node:lts-alpine@sha256:d1b3b4da11eefd5941e7f0b9cf17783fc99d9c6fc34884a665f40a06dbdfc94f
- name: Write Safe Outputs Config
run: |
mkdir -p "${RUNNER_TEMP}/gh-aw/safeoutputs"
@@ -595,10 +615,10 @@ jobs:
SENTRY_OPENAI_API_KEY: ${{ secrets.SENTRY_OPENAI_API_KEY }}
run: |
set -eo pipefail
- mkdir -p /tmp/gh-aw/mcp-config
+ mkdir -p "${RUNNER_TEMP}/gh-aw/mcp-config"
# Export gateway environment variables for MCP config and gateway script
- export MCP_GATEWAY_PORT="80"
+ export MCP_GATEWAY_PORT="8080"
export MCP_GATEWAY_DOMAIN="host.docker.internal"
MCP_GATEWAY_API_KEY=$(openssl rand -base64 45 | tr -d '/+=')
echo "::add-mask::${MCP_GATEWAY_API_KEY}"
@@ -609,13 +629,17 @@ jobs:
export DEBUG="*"
export GH_AW_ENGINE="claude"
- export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e GITHUB_AW_OTEL_TRACE_ID -e GITHUB_AW_OTEL_PARENT_SPAN_ID -e SENTRY_ACCESS_TOKEN -e SENTRY_HOST -e SENTRY_OPENAI_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.19'
+ MCP_GATEWAY_UID=$(id -u 2>/dev/null || echo '0')
+ MCP_GATEWAY_GID=$(id -g 2>/dev/null || echo '0')
+ DOCKER_SOCK_GID=$(stat -c '%g' /var/run/docker.sock 2>/dev/null || echo '0')
+ export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host --add-host host.docker.internal:127.0.0.1 --user '"${MCP_GATEWAY_UID}"':'"${MCP_GATEWAY_GID}"' --group-add '"${DOCKER_SOCK_GID}"' -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e GITHUB_AW_OTEL_TRACE_ID -e GITHUB_AW_OTEL_PARENT_SPAN_ID -e SENTRY_ACCESS_TOKEN -e SENTRY_HOST -e SENTRY_OPENAI_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.30'
- cat << GH_AW_MCP_CONFIG_d3a7510b89d26f99_EOF | bash "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh"
+ GH_AW_NODE=$(which node 2>/dev/null || command -v node 2>/dev/null || echo node)
+ cat << GH_AW_MCP_CONFIG_d3a7510b89d26f99_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs"
{
"mcpServers": {
"github": {
- "container": "ghcr.io/github/github-mcp-server:v0.32.0",
+ "container": "ghcr.io/github/github-mcp-server:v1.0.0",
"env": {
"GITHUB_HOST": "$GITHUB_SERVER_URL",
"GITHUB_PERSONAL_ACCESS_TOKEN": "$GITHUB_MCP_SERVER_TOKEN",
@@ -695,11 +719,31 @@ jobs:
}
}
GH_AW_MCP_CONFIG_d3a7510b89d26f99_EOF
+ - name: Mount MCP servers as CLIs
+ id: mount-mcp-clis
+ continue-on-error: true
+ env:
+ MCP_GATEWAY_API_KEY: ${{ steps.start-mcp-gateway.outputs.gateway-api-key }}
+ MCP_GATEWAY_DOMAIN: ${{ steps.start-mcp-gateway.outputs.gateway-domain }}
+ MCP_GATEWAY_PORT: ${{ steps.start-mcp-gateway.outputs.gateway-port }}
+ uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9
+ with:
+ script: |
+ const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
+ setupGlobals(core, github, context, exec, io);
+ const { main } = require('${{ runner.temp }}/gh-aw/actions/mount_mcp_as_cli.cjs');
+ await main();
- name: Download activation artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
name: activation
path: /tmp/gh-aw
+ - name: Restore agent config folders from base branch
+ if: steps.checkout-pr.outcome == 'success'
+ env:
+ GH_AW_AGENT_FOLDERS: ".agents .claude .codex .crush .gemini .github .opencode"
+ GH_AW_AGENT_FILES: ".crush.json AGENTS.md CLAUDE.md GEMINI.md opencode.jsonc"
+ run: bash "${RUNNER_TEMP}/gh-aw/actions/restore_base_github_folders.sh"
- name: Clean git credentials
continue-on-error: true
run: bash "${RUNNER_TEMP}/gh-aw/actions/clean_git_credentials.sh"
@@ -793,8 +837,8 @@ jobs:
touch /tmp/gh-aw/agent-step-summary.md
(umask 177 && touch /tmp/gh-aw/agent-stdio.log)
# shellcheck disable=SC1003
- sudo -E awf --container-workdir "${GITHUB_WORKSPACE}" --mount "${RUNNER_TEMP}/gh-aw:${RUNNER_TEMP}/gh-aw:ro" --mount "${RUNNER_TEMP}/gh-aw:/host${RUNNER_TEMP}/gh-aw:ro" --tty --env-all --exclude-env ANTHROPIC_API_KEY --exclude-env GITHUB_MCP_SERVER_TOKEN --exclude-env MCP_GATEWAY_API_KEY --allow-domains '*.githubusercontent.com,anthropic.com,api.anthropic.com,api.github.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,cdn.playwright.dev,codeload.github.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,files.pythonhosted.org,ghcr.io,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,playwright.download.prss.microsoft.com,ppa.launchpad.net,pypi.org,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,sentry.io,statsig.anthropic.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com,www.googleapis.com' --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --audit-dir /tmp/gh-aw/sandbox/firewall/audit --enable-host-access --image-tag 0.25.20 --skip-pull --enable-api-proxy \
- -- /bin/bash -c 'export PATH="$(find /opt/hostedtoolcache -maxdepth 4 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && claude --print --no-chrome --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools '\''Bash,BashOutput,Edit,ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,NotebookEdit,NotebookRead,Read,Task,TodoWrite,Write,mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users,mcp__sentry__analyze_issue_with_seer,mcp__sentry__find_dsns,mcp__sentry__find_organizations,mcp__sentry__find_projects,mcp__sentry__find_releases,mcp__sentry__find_teams,mcp__sentry__get_doc,mcp__sentry__get_event_attachment,mcp__sentry__get_issue_details,mcp__sentry__get_trace_details,mcp__sentry__search_docs requires SENTRY_OPENAI_API_KEY,mcp__sentry__search_events,mcp__sentry__search_issues,mcp__sentry__whoami'\'' --debug-file /tmp/gh-aw/agent-stdio.log --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_CLAUDE:+ --model "$GH_AW_MODEL_AGENT_CLAUDE"}' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log
+ sudo -E awf --container-workdir "${GITHUB_WORKSPACE}" --mount "${RUNNER_TEMP}/gh-aw:${RUNNER_TEMP}/gh-aw:ro" --mount "${RUNNER_TEMP}/gh-aw:/host${RUNNER_TEMP}/gh-aw:ro" --tty --env-all --exclude-env ANTHROPIC_API_KEY --exclude-env GITHUB_MCP_SERVER_TOKEN --exclude-env MCP_GATEWAY_API_KEY --allow-domains '*.githubusercontent.com,anthropic.com,api.anthropic.com,api.github.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,cdn.playwright.dev,codeload.github.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,files.pythonhosted.org,ghcr.io,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,playwright.download.prss.microsoft.com,ppa.launchpad.net,pypi.org,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,sentry.io,statsig.anthropic.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com,www.googleapis.com' --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --audit-dir /tmp/gh-aw/sandbox/firewall/audit --enable-host-access --allow-host-ports 80,443,8080 --image-tag 0.25.28,squid=sha256:844c18280f82cd1b06345eb2f4e91966b34185bfc51c9f237c3e022e848fb474,agent=sha256:a8834e285807654bf680154faa710d43fe4365a0868142f5c20e48c85e137a7a,api-proxy=sha256:93290f2393752252911bd7c39a047f776c0b53063575e7bde4e304962a9a61cb,cli-proxy=sha256:fdf310e4678ce58d248c466b89399e9680a3003038fd19322c388559016aaac7 --skip-pull --enable-api-proxy \
+ -- /bin/bash -c 'export PATH="${RUNNER_TEMP}/gh-aw/mcp-cli/bin:$PATH" && export PATH="$(find /opt/hostedtoolcache -maxdepth 4 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && claude --print --no-chrome --mcp-config "${{ runner.temp }}/gh-aw/mcp-config/mcp-servers.json" --allowed-tools '\''Bash,BashOutput,Edit,ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,NotebookEdit,NotebookRead,Read,Task,TodoWrite,Write,mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users,mcp__sentry__analyze_issue_with_seer,mcp__sentry__find_dsns,mcp__sentry__find_organizations,mcp__sentry__find_projects,mcp__sentry__find_releases,mcp__sentry__find_teams,mcp__sentry__get_doc,mcp__sentry__get_event_attachment,mcp__sentry__get_issue_details,mcp__sentry__get_trace_details,mcp__sentry__search_docs requires SENTRY_OPENAI_API_KEY,mcp__sentry__search_events,mcp__sentry__search_issues,mcp__sentry__whoami'\'' --debug-file /tmp/gh-aw/agent-stdio.log --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_CLAUDE:+ --model "$GH_AW_MODEL_AGENT_CLAUDE"}' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log
env:
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
BASH_DEFAULT_TIMEOUT_MS: 60000
@@ -802,12 +846,12 @@ jobs:
DISABLE_BUG_COMMAND: 1
DISABLE_ERROR_REPORTING: 1
DISABLE_TELEMETRY: 1
- GH_AW_MCP_CONFIG: /tmp/gh-aw/mcp-config/mcp-servers.json
+ GH_AW_MCP_CONFIG: ${{ runner.temp }}/gh-aw/mcp-config/mcp-servers.json
GH_AW_MODEL_AGENT_CLAUDE: ${{ vars.GH_AW_MODEL_AGENT_CLAUDE || '' }}
GH_AW_PHASE: agent
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
GH_AW_SAFE_OUTPUTS: ${{ steps.set-runtime-paths.outputs.GH_AW_SAFE_OUTPUTS }}
- GH_AW_VERSION: v0.68.3
+ GH_AW_VERSION: dev
GITHUB_AW: true
GITHUB_STEP_SUMMARY: /tmp/gh-aw/agent-step-summary.md
GITHUB_WORKSPACE: ${{ github.workspace }}
@@ -910,9 +954,9 @@ jobs:
env:
AWF_LOGS_DIR: /tmp/gh-aw/sandbox/firewall/logs
run: |
- # Fix permissions on firewall logs so they can be uploaded as artifacts
+ # Fix permissions on firewall logs/audit dirs so they can be uploaded as artifacts
# AWF runs with sudo, creating files owned by root
- sudo chmod -R a+r /tmp/gh-aw/sandbox/firewall/logs 2>/dev/null || true
+ sudo chmod -R a+r /tmp/gh-aw/sandbox/firewall 2>/dev/null || true
# Only run awf logs summary if awf command exists (it may not be installed if workflow failed before install step)
if command -v awf &> /dev/null; then
awf logs summary | tee -a "$GITHUB_STEP_SUMMARY"
@@ -988,9 +1032,16 @@ jobs:
tools_reported: ${{ steps.missing_tool.outputs.tools_reported }}
total_count: ${{ steps.missing_tool.outputs.total_count }}
steps:
+ - name: Checkout actions folder
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
+ with:
+ repository: github/gh-aw
+ sparse-checkout: |
+ actions
+ persist-credentials: false
- name: Setup Scripts
id: setup
- uses: github/gh-aw-actions/setup@v0.68.3
+ uses: ./actions/setup
with:
destination: ${{ runner.temp }}/gh-aw/actions
job-name: ${{ github.job }}
@@ -1085,6 +1136,7 @@ jobs:
GH_AW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
GH_AW_AGENT_CONCLUSION: ${{ needs.agent.result }}
GH_AW_WORKFLOW_ID: "daily-token-consumption-report"
+ GH_AW_ACTION_FAILURE_ISSUE_EXPIRES_HOURS: "12"
GH_AW_ENGINE_ID: "claude"
GH_AW_SECRET_VERIFICATION_RESULT: ${{ needs.activation.outputs.secret_verification_result }}
GH_AW_CHECKOUT_PR_SUCCESS: ${{ needs.agent.outputs.checkout_pr_success }}
@@ -1115,9 +1167,16 @@ jobs:
detection_reason: ${{ steps.detection_conclusion.outputs.reason }}
detection_success: ${{ steps.detection_conclusion.outputs.success }}
steps:
+ - name: Checkout actions folder
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
+ with:
+ repository: github/gh-aw
+ sparse-checkout: |
+ actions
+ persist-credentials: false
- name: Setup Scripts
id: setup
- uses: github/gh-aw-actions/setup@v0.68.3
+ uses: ./actions/setup
with:
destination: ${{ runner.temp }}/gh-aw/actions
job-name: ${{ github.job }}
@@ -1147,7 +1206,7 @@ jobs:
rm -rf /tmp/gh-aw/sandbox/firewall/logs
rm -rf /tmp/gh-aw/sandbox/firewall/audit
- name: Download container images
- run: bash "${RUNNER_TEMP}/gh-aw/actions/download_docker_images.sh" ghcr.io/github/gh-aw-firewall/agent:0.25.20@sha256:9161f2415a3306a344aca34dd671ee69f122317e0a512e66dc64c94b9c508682 ghcr.io/github/gh-aw-firewall/api-proxy:0.25.20@sha256:6971639e381e82e45134bcd333181f456df3a52cd6f818a3e3d6de068ff91519 ghcr.io/github/gh-aw-firewall/squid:0.25.20@sha256:5411d903f73ee597e6a084971c2adef3eb0bd405910df3ed7bf5e3d6bd58a236
+ run: bash "${RUNNER_TEMP}/gh-aw/actions/download_docker_images.sh" ghcr.io/github/gh-aw-firewall/agent:0.25.28@sha256:a8834e285807654bf680154faa710d43fe4365a0868142f5c20e48c85e137a7a ghcr.io/github/gh-aw-firewall/api-proxy:0.25.28@sha256:93290f2393752252911bd7c39a047f776c0b53063575e7bde4e304962a9a61cb ghcr.io/github/gh-aw-firewall/squid:0.25.28@sha256:844c18280f82cd1b06345eb2f4e91966b34185bfc51c9f237c3e022e848fb474
- name: Check if detection needed
id: detection_guard
if: always()
@@ -1165,7 +1224,7 @@ jobs:
- name: Clear MCP configuration for detection
if: always() && steps.detection_guard.outputs.run_detection == 'true'
run: |
- rm -f /tmp/gh-aw/mcp-config/mcp-servers.json
+ rm -f "${RUNNER_TEMP}/gh-aw/mcp-config/mcp-servers.json"
rm -f /home/runner/.copilot/mcp-config.json
rm -f "$GITHUB_WORKSPACE/.gemini/settings.json"
- name: Prepare threat detection files
@@ -1201,14 +1260,14 @@ jobs:
mkdir -p /tmp/gh-aw/threat-detection
touch /tmp/gh-aw/threat-detection/detection.log
- name: Setup Node.js
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
+ uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6.4.0
with:
node-version: '24'
package-manager-cache: false
- name: Install AWF binary
- run: bash "${RUNNER_TEMP}/gh-aw/actions/install_awf_binary.sh" v0.25.20
+ run: bash "${RUNNER_TEMP}/gh-aw/actions/install_awf_binary.sh" v0.25.28
- name: Install Claude Code CLI
- run: npm install --ignore-scripts -g @anthropic-ai/claude-code@2.1.98
+ run: npm install --ignore-scripts -g @anthropic-ai/claude-code@2.1.112
- name: Execute Claude Code CLI
if: always() && steps.detection_guard.outputs.run_detection == 'true'
id: detection_agentic_execution
@@ -1230,7 +1289,7 @@ jobs:
touch /tmp/gh-aw/agent-step-summary.md
(umask 177 && touch /tmp/gh-aw/threat-detection/detection.log)
# shellcheck disable=SC1003
- sudo -E awf --container-workdir "${GITHUB_WORKSPACE}" --mount "${RUNNER_TEMP}/gh-aw:${RUNNER_TEMP}/gh-aw:ro" --mount "${RUNNER_TEMP}/gh-aw:/host${RUNNER_TEMP}/gh-aw:ro" --tty --env-all --exclude-env ANTHROPIC_API_KEY --allow-domains '*.githubusercontent.com,anthropic.com,api.anthropic.com,api.github.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,cdn.playwright.dev,codeload.github.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,files.pythonhosted.org,ghcr.io,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,playwright.download.prss.microsoft.com,ppa.launchpad.net,pypi.org,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,sentry.io,statsig.anthropic.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com' --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --audit-dir /tmp/gh-aw/sandbox/firewall/audit --enable-host-access --image-tag 0.25.20 --skip-pull --enable-api-proxy \
+ sudo -E awf --container-workdir "${GITHUB_WORKSPACE}" --mount "${RUNNER_TEMP}/gh-aw:${RUNNER_TEMP}/gh-aw:ro" --mount "${RUNNER_TEMP}/gh-aw:/host${RUNNER_TEMP}/gh-aw:ro" --tty --env-all --exclude-env ANTHROPIC_API_KEY --allow-domains '*.githubusercontent.com,anthropic.com,api.anthropic.com,api.github.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,cdn.playwright.dev,codeload.github.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,files.pythonhosted.org,ghcr.io,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,playwright.download.prss.microsoft.com,ppa.launchpad.net,pypi.org,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,sentry.io,statsig.anthropic.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com' --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --audit-dir /tmp/gh-aw/sandbox/firewall/audit --enable-host-access --allow-host-ports 80,443,8080 --image-tag 0.25.28,squid=sha256:844c18280f82cd1b06345eb2f4e91966b34185bfc51c9f237c3e022e848fb474,agent=sha256:a8834e285807654bf680154faa710d43fe4365a0868142f5c20e48c85e137a7a,api-proxy=sha256:93290f2393752252911bd7c39a047f776c0b53063575e7bde4e304962a9a61cb,cli-proxy=sha256:fdf310e4678ce58d248c466b89399e9680a3003038fd19322c388559016aaac7 --skip-pull --enable-api-proxy \
-- /bin/bash -c 'export PATH="$(find /opt/hostedtoolcache -maxdepth 4 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && claude --print --no-chrome --allowed-tools Bash,BashOutput,ExitPlanMode,Glob,Grep,KillBash,LS,NotebookRead,Read,Task,TodoWrite --debug-file /tmp/gh-aw/threat-detection/detection.log --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_DETECTION_CLAUDE:+ --model "$GH_AW_MODEL_DETECTION_CLAUDE"}' 2>&1 | tee -a /tmp/gh-aw/threat-detection/detection.log
env:
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
@@ -1242,7 +1301,7 @@ jobs:
GH_AW_MODEL_DETECTION_CLAUDE: ${{ vars.GH_AW_MODEL_DETECTION_CLAUDE || '' }}
GH_AW_PHASE: detection
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
- GH_AW_VERSION: v0.68.3
+ GH_AW_VERSION: dev
GITHUB_AW: true
GITHUB_STEP_SUMMARY: /tmp/gh-aw/agent-step-summary.md
GITHUB_WORKSPACE: ${{ github.workspace }}
@@ -1304,9 +1363,16 @@ jobs:
process_safe_outputs_processed_count: ${{ steps.process_safe_outputs.outputs.processed_count }}
process_safe_outputs_temporary_id_map: ${{ steps.process_safe_outputs.outputs.temporary_id_map }}
steps:
+ - name: Checkout actions folder
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
+ with:
+ repository: github/gh-aw
+ sparse-checkout: |
+ actions
+ persist-credentials: false
- name: Setup Scripts
id: setup
- uses: github/gh-aw-actions/setup@v0.68.3
+ uses: ./actions/setup
with:
destination: ${{ runner.temp }}/gh-aw/actions
job-name: ${{ github.job }}
diff --git a/.github/workflows/smoke-project.lock.yml b/.github/workflows/smoke-project.lock.yml
index 228c765f09b..3bd1d6c005e 100644
--- a/.github/workflows/smoke-project.lock.yml
+++ b/.github/workflows/smoke-project.lock.yml
@@ -1130,7 +1130,7 @@ jobs:
permissions:
contents: write
discussions: write
- issues: write
+ issues: read
pull-requests: write
concurrency:
group: "gh-aw-conclusion-smoke-project"
@@ -1499,7 +1499,7 @@ jobs:
permissions:
contents: write
discussions: write
- issues: write
+ issues: read
pull-requests: write
timeout-minutes: 15
env:
diff --git a/.github/workflows/test-project-url-default.lock.yml b/.github/workflows/test-project-url-default.lock.yml
index 98b8937c414..c4b7f8d3f78 100644
--- a/.github/workflows/test-project-url-default.lock.yml
+++ b/.github/workflows/test-project-url-default.lock.yml
@@ -892,6 +892,7 @@ jobs:
runs-on: ubuntu-slim
permissions:
contents: read
+ issues: read
concurrency:
group: "gh-aw-conclusion-test-project-url-default"
cancel-in-progress: false
@@ -1197,6 +1198,7 @@ jobs:
runs-on: ubuntu-slim
permissions:
contents: read
+ issues: read
timeout-minutes: 15
env:
GH_AW_CALLER_WORKFLOW_ID: "${{ github.repository }}/test-project-url-default"
diff --git a/docs/adr/27895-on-needs-explicit-pre-activation-job-dependencies.md b/docs/adr/27895-on-needs-explicit-pre-activation-job-dependencies.md
new file mode 100644
index 00000000000..6a47f39bf82
--- /dev/null
+++ b/docs/adr/27895-on-needs-explicit-pre-activation-job-dependencies.md
@@ -0,0 +1,78 @@
+# ADR-27895: Introduce `on.needs` for Explicit Pre-Activation Job Dependencies
+
+**Date**: 2026-04-22
+**Status**: Draft
+**Deciders**: pelikhan
+
+---
+
+## Part 1 — Narrative (Human-Friendly)
+
+### Context
+
+Workflow frontmatter supports `on.github-app` credentials that allow the activation job to mint a short-lived GitHub App token. Some security postures require fetching the App ID and private key from an external secret manager at runtime (e.g., HashiCorp Vault, AWS Secrets Manager) via a dedicated job that runs before activation. Prior to this change there was no way to declare such a dependency explicitly: the `pre_activation` and `activation` jobs always ran as the earliest jobs in the graph, making `${{ needs..outputs.* }}` expressions in `on.github-app` always resolve to empty values at runtime.
+
+### Decision
+
+We will add an `on.needs` array field to the workflow frontmatter `on:` section. Jobs listed in `on.needs` are wired as explicit dependencies of both `pre_activation` and `activation` so that their outputs are available before credential resolution occurs. Jobs in `on.needs` are excluded from the automatic `needs: activation` guard that would normally force custom jobs to run after activation. Validation ensures that only declared custom jobs (not built-in control jobs) can appear in `on.needs`, and that `on.github-app` expression references resolve exclusively to jobs available before activation.
+
+### Alternatives Considered
+
+#### Alternative 1: Auto-detect credential-supply jobs from `on.github-app` expressions
+
+The compiler could parse `${{ needs..outputs.* }}` expressions in `on.github-app` fields and automatically promote those jobs to pre-activation dependencies without any user declaration. This approach was not chosen because it would make dependency wiring implicit and hard to reason about: a typo or expression change could silently break the dependency graph in non-obvious ways. Explicit declaration (`on.needs`) keeps the dependency contract visible in the frontmatter.
+
+#### Alternative 2: Require credential-supply logic to be inlined as `on.steps`
+
+The existing `on.steps` mechanism allows injecting arbitrary steps into the pre-activation job. Users could fetch credentials there instead of in a separate job. This was not chosen because it conflates credential supply with pre-activation gate logic, prevents parallel execution of credential-fetch and other pre-activation checks, and does not work for teams that already have standalone credential-supply jobs they want to reuse across multiple workflows.
+
+### Consequences
+
+#### Positive
+- `on.github-app` expressions can now reference `needs..outputs.*` values from jobs that run before activation, enabling dynamic credential supply from external secret managers.
+- Validation at compile time rejects invalid `on.needs` entries (built-in job names, cycle-prone jobs, undeclared jobs), turning silent runtime failures into clear compiler errors.
+
+#### Negative
+- Jobs listed in `on.needs` are exempt from the automatic `needs: activation` safeguard, meaning they run before the activation gate. This widens the surface of pre-activation execution, which must be considered when auditing workflow security.
+- Introduces a new top-level frontmatter concept (`on.needs`) that users must learn; documentation and validation errors must be clear enough to avoid confusion with GitHub Actions' job-level `needs:` field.
+
+#### Neutral
+- When `on.needs` is non-empty and no other pre-activation checks exist, `pre_activation` is forced to be created (even if it would otherwise be omitted), so that `on.needs` jobs are properly sequenced before `activation`.
+- The `activated` output of `pre_activation` is set unconditionally to `"true"` when the job is created solely due to `on.needs` (no permission checks, stop-time, skip-if, etc.), consistent with existing `on.steps`-only behaviour.
+
+---
+
+## Part 2 — Normative Specification (RFC 2119)
+
+> The key words **MUST**, **MUST NOT**, **REQUIRED**, **SHALL**, **SHALL NOT**, **SHOULD**, **SHOULD NOT**, **RECOMMENDED**, **MAY**, and **OPTIONAL** in this section are to be interpreted as described in [RFC 2119](https://www.rfc-editor.org/rfc/rfc2119).
+
+### Schema and Parsing
+
+1. Implementations **MUST** accept `on.needs` as an optional array of strings in the workflow frontmatter `on:` section.
+2. Each entry in `on.needs` **MUST** match the pattern `^[a-zA-Z_][a-zA-Z0-9_-]*$`.
+3. `on.needs` **MUST** contain unique entries (no duplicates).
+4. If `on.needs` is absent or `null`, implementations **MUST** treat it as an empty list and **MUST NOT** alter the dependency graph.
+
+### Compiler Dependency Wiring
+
+1. Implementations **MUST** add every job named in `on.needs` to the `needs` list of the `pre_activation` job.
+2. Implementations **MUST** add every job named in `on.needs` to the `needs` list of the `activation` job (merged with any existing before-activation dependencies).
+3. Implementations **MUST NOT** add an implicit `needs: activation` dependency to any job that is listed in `on.needs`.
+4. If `on.needs` is non-empty, implementations **MUST** create a `pre_activation` job even if no other pre-activation checks (permission, stop-time, skip-if, on.steps) are present.
+5. When `pre_activation` is created solely because `on.needs` is non-empty (no other checks), the `activated` output **MUST** be set unconditionally to `"true"`.
+
+### Validation
+
+1. Implementations **MUST** reject any `on.needs` entry that references a built-in or compiler-generated job ID (e.g., `pre_activation`, `activation`).
+2. Implementations **MUST** reject any `on.needs` entry that references a job that already depends on `pre_activation` or `activation`, to prevent dependency cycles.
+3. Implementations **MUST** reject any `on.needs` entry that does not correspond to a job declared in the top-level `jobs:` section.
+4. When `on.github-app` fields contain `${{ needs..outputs.* }}` expressions, implementations **MUST** verify that the referenced job is available before activation (i.e., listed in `on.needs` or otherwise before-activation). Implementations **MUST** emit a compiler error if the referenced job would run after activation.
+5. Implementations **SHOULD** emit descriptive error messages that distinguish `on.needs` validation failures from job-level `needs:` validation failures.
+
+### Conformance
+
+An implementation is considered conformant with this ADR if it satisfies all **MUST** and **MUST NOT** requirements above. Failure to meet any **MUST** or **MUST NOT** requirement constitutes non-conformance.
+
+---
+
+*This is a DRAFT ADR generated by the [Design Decision Gate](https://github.com/github/gh-aw/actions/runs/24806829131) workflow. The PR author must review, complete, and finalize this document before the PR can merge.*
diff --git a/docs/src/content/docs/reference/frontmatter-full.md b/docs/src/content/docs/reference/frontmatter-full.md
index 0c5b380c1d8..d8de552dcd8 100644
--- a/docs/src/content/docs/reference/frontmatter-full.md
+++ b/docs/src/content/docs/reference/frontmatter-full.md
@@ -983,6 +983,11 @@ on:
# (optional)
statuses: "read"
+ # Explicit additional custom workflow jobs that pre_activation and activation
+ # should depend on.
+ # (optional)
+ needs: ["secrets_fetcher"]
+
# When set to false, disables the frontmatter hash check step in the activation
# job. Default is true (check is enabled). Useful when the workflow source files
# are managed outside the default GitHub repo context (e.g. cross-repo org
diff --git a/docs/src/content/docs/reference/frontmatter.md b/docs/src/content/docs/reference/frontmatter.md
index bbe1514af53..a125e779ee9 100644
--- a/docs/src/content/docs/reference/frontmatter.md
+++ b/docs/src/content/docs/reference/frontmatter.md
@@ -40,6 +40,7 @@ The `on:` section uses standard GitHub Actions syntax to define workflow trigger
- `skip-if-no-match:` - Skip execution when a search query has no matches (supports `scope: none`; use top-level `on.github-token` / `on.github-app` for custom auth)
- `steps:` - Inject custom deterministic steps into the pre-activation job (saves one workflow job vs. multi-job pattern)
- `permissions:` - Grant additional GitHub token scopes to the pre-activation job (for use with `on.steps:` API calls)
+- `needs:` - Add custom job dependencies that both `pre_activation` and `activation` must wait for
- `github-token:` - Custom token for activation job reactions, status comments, and skip-if search queries
- `github-app:` - GitHub App for minting a short-lived token used by the activation job and all skip-if search steps
diff --git a/docs/src/content/docs/reference/triggers.md b/docs/src/content/docs/reference/triggers.md
index b83d4ee3239..9a1e79dc499 100644
--- a/docs/src/content/docs/reference/triggers.md
+++ b/docs/src/content/docs/reference/triggers.md
@@ -608,6 +608,33 @@ if: needs.pre_activation.outputs.has_bug_label == 'true'
Explicit outputs defined in `jobs.pre-activation.outputs` take precedence over auto-wired `_result` outputs on key collision.
+### Pre-Activation and Activation Dependencies (`on.needs:`)
+
+Add custom jobs that both `pre_activation` and `activation` should depend on. Use this when `on.github-app` credentials come from a job output (for example, a secret-manager fetch job).
+
+```yaml wrap
+on:
+ workflow_dispatch:
+ needs: [secrets_fetcher]
+ github-app:
+ client-id: ${{ needs.secrets_fetcher.outputs.app_id }}
+ private-key: ${{ needs.secrets_fetcher.outputs.private_key }}
+
+jobs:
+ secrets_fetcher:
+ runs-on: ubuntu-latest
+ outputs:
+ app_id: ${{ steps.fetch.outputs.app_id }}
+ private_key: ${{ steps.fetch.outputs.private_key }}
+ steps:
+ - id: fetch
+ run: |
+ echo "app_id=123" >> "$GITHUB_OUTPUT"
+ echo "private_key=***" >> "$GITHUB_OUTPUT"
+```
+
+`on.needs` values must reference custom jobs from top-level `jobs:`. Built-in jobs are rejected.
+
### Pre-Activation Permissions (`on.permissions:`)
Grant additional GitHub token permission scopes to the pre-activation job. Use when `on.steps:` make GitHub API calls that require permissions beyond the defaults.
diff --git a/pkg/parser/schemas/main_workflow_schema.json b/pkg/parser/schemas/main_workflow_schema.json
index 8a6e2e4505a..de540a1e181 100644
--- a/pkg/parser/schemas/main_workflow_schema.json
+++ b/pkg/parser/schemas/main_workflow_schema.json
@@ -1903,6 +1903,18 @@
}
]
},
+ "needs": {
+ "type": "array",
+ "description": "Explicit additional custom workflow jobs that pre_activation and activation should depend on.",
+ "items": {
+ "type": "string",
+ "pattern": "^[a-zA-Z_][a-zA-Z0-9_-]*$"
+ },
+ "additionalItems": false,
+ "uniqueItems": true,
+ "default": [],
+ "examples": [["secrets_fetcher"]]
+ },
"steps": {
"type": "array",
"description": "Steps to inject into the pre-activation job. These steps run after all built-in checks (membership, stop-time, skip-if, etc.) and their results are exposed as pre-activation outputs. Use 'id' on steps to reference their results via needs.pre_activation.outputs._result.",
diff --git a/pkg/workflow/compiler.go b/pkg/workflow/compiler.go
index 4c8f5b4c9d7..7031e16d7dd 100644
--- a/pkg/workflow/compiler.go
+++ b/pkg/workflow/compiler.go
@@ -186,6 +186,12 @@ func (c *Compiler) validateWorkflowData(workflowData *WorkflowData, markdownPath
return formatCompilerError(markdownPath, "error", err.Error(), err)
}
+ // Validate on.needs declarations and on.github-app needs expressions
+ log.Printf("Validating on.needs declarations")
+ if err := c.validateOnNeeds(workflowData); err != nil {
+ return formatCompilerError(markdownPath, "error", err.Error(), err)
+ }
+
// Validate safe-job needs: declarations against known generated job IDs
log.Printf("Validating safe-job needs declarations")
if err := validateSafeJobNeeds(workflowData); err != nil {
diff --git a/pkg/workflow/compiler_activation_job_builder.go b/pkg/workflow/compiler_activation_job_builder.go
index f00e4a3015a..b946c13e9f4 100644
--- a/pkg/workflow/compiler_activation_job_builder.go
+++ b/pkg/workflow/compiler_activation_job_builder.go
@@ -384,6 +384,11 @@ func (c *Compiler) addActivationCommandAndLabelOutputs(ctx *activationJobBuildCo
func (c *Compiler) configureActivationNeedsAndCondition(ctx *activationJobBuildContext) {
data := ctx.data
customJobsBeforeActivation := c.getCustomJobsDependingOnPreActivation(data.Jobs)
+ for _, jobName := range data.OnNeeds {
+ if !slices.Contains(customJobsBeforeActivation, jobName) {
+ customJobsBeforeActivation = append(customJobsBeforeActivation, jobName)
+ }
+ }
promptReferencedJobs := c.getCustomJobsReferencedInPromptWithNoActivationDep(data)
for _, jobName := range promptReferencedJobs {
if !slices.Contains(customJobsBeforeActivation, jobName) {
diff --git a/pkg/workflow/compiler_jobs.go b/pkg/workflow/compiler_jobs.go
index e82f231c037..8c3e50d9982 100644
--- a/pkg/workflow/compiler_jobs.go
+++ b/pkg/workflow/compiler_jobs.go
@@ -267,10 +267,11 @@ func (c *Compiler) buildPreActivationAndActivationJobs(data *WorkflowData, front
hasCommandTrigger := len(data.Command) > 0
hasRateLimit := data.RateLimit != nil
hasOnSteps := len(data.OnSteps) > 0
- compilerJobsLog.Printf("Job configuration: needsPermissionCheck=%v, hasStopTime=%v, hasSkipIfMatch=%v, hasSkipIfNoMatch=%v, hasSkipRoles=%v, hasSkipBots=%v, hasCommand=%v, hasRateLimit=%v, hasOnSteps=%v", needsPermissionCheck, hasStopTime, hasSkipIfMatch, hasSkipIfNoMatch, hasSkipRoles, hasSkipBots, hasCommandTrigger, hasRateLimit, hasOnSteps)
+ hasOnNeeds := len(data.OnNeeds) > 0
+ compilerJobsLog.Printf("Job configuration: needsPermissionCheck=%v, hasStopTime=%v, hasSkipIfMatch=%v, hasSkipIfNoMatch=%v, hasSkipRoles=%v, hasSkipBots=%v, hasCommand=%v, hasRateLimit=%v, hasOnSteps=%v, hasOnNeeds=%v", needsPermissionCheck, hasStopTime, hasSkipIfMatch, hasSkipIfNoMatch, hasSkipRoles, hasSkipBots, hasCommandTrigger, hasRateLimit, hasOnSteps, hasOnNeeds)
// Build pre-activation job if needed (combines membership checks, stop-time validation, skip-if-match check, skip-if-no-match check, skip-roles check, skip-bots check, rate limit check, command position check, and on.steps injection)
- if needsPermissionCheck || hasStopTime || hasSkipIfMatch || hasSkipIfNoMatch || hasSkipRoles || hasSkipBots || hasCommandTrigger || hasRateLimit || hasOnSteps {
+ if needsPermissionCheck || hasStopTime || hasSkipIfMatch || hasSkipIfNoMatch || hasSkipRoles || hasSkipBots || hasCommandTrigger || hasRateLimit || hasOnSteps || hasOnNeeds {
compilerJobsLog.Print("Building pre-activation job")
preActivationJob, err := c.buildPreActivationJob(data, needsPermissionCheck)
if err != nil {
@@ -490,6 +491,10 @@ func (c *Compiler) buildCustomJobs(data *WorkflowData, activationJobCreated bool
for _, j := range promptReferencedJobsSlice {
promptReferencedJobs[j] = true
}
+ onNeedsJobs := make(map[string]bool, len(data.OnNeeds))
+ for _, j := range data.OnNeeds {
+ onNeedsJobs[j] = true
+ }
for jobName, jobConfig := range data.Jobs {
// Skip jobs.pre-activation (or pre_activation) as it's handled specially in buildPreActivationJob
@@ -531,11 +536,14 @@ func (c *Compiler) buildCustomJobs(data *WorkflowData, activationJobCreated bool
// Exception: jobs whose outputs are referenced in the markdown body run before activation
// (so the activation job can include their outputs in the prompt).
isReferencedInMarkdown := promptReferencedJobs[jobName]
- if !hasExplicitNeeds && activationJobCreated && !isReferencedInMarkdown {
+ isOnNeedsDependency := onNeedsJobs[jobName]
+ if !hasExplicitNeeds && activationJobCreated && !isReferencedInMarkdown && !isOnNeedsDependency {
job.Needs = append(job.Needs, string(constants.ActivationJobName))
compilerJobsLog.Printf("Added automatic dependency: custom job '%s' now depends on '%s'", jobName, string(constants.ActivationJobName))
} else if !hasExplicitNeeds && isReferencedInMarkdown {
compilerJobsLog.Printf("Custom job '%s' referenced in markdown body runs before activation (no auto-added dependency)", jobName)
+ } else if !hasExplicitNeeds && isOnNeedsDependency {
+ compilerJobsLog.Printf("Custom job '%s' listed in on.needs runs before activation (no auto-added dependency)", jobName)
}
// Extract other job properties
diff --git a/pkg/workflow/compiler_jobs_test.go b/pkg/workflow/compiler_jobs_test.go
index 3165c48cdad..0a8236300aa 100644
--- a/pkg/workflow/compiler_jobs_test.go
+++ b/pkg/workflow/compiler_jobs_test.go
@@ -12,6 +12,8 @@ import (
"github.com/github/gh-aw/pkg/constants"
"github.com/github/gh-aw/pkg/testutil"
"github.com/goccy/go-yaml"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
)
// ========================================
@@ -2753,6 +2755,37 @@ func TestBuildCustomJobsSkipsPreActivationJob(t *testing.T) {
}
}
+// TestBuildCustomJobsDoesNotAutoAddActivationWhenListedInOnNeeds verifies that
+// custom jobs listed in on.needs run before activation and therefore do not get
+// an implicit needs: activation dependency.
+func TestBuildCustomJobsDoesNotAutoAddActivationWhenListedInOnNeeds(t *testing.T) {
+ compiler := NewCompiler()
+ compiler.jobManager = NewJobManager()
+
+ activationJob := &Job{Name: string(constants.ActivationJobName)}
+ require.NoError(t, compiler.jobManager.AddJob(activationJob), "activation job should be added")
+
+ data := &WorkflowData{
+ Name: "Test Workflow",
+ AI: "copilot",
+ OnNeeds: []string{"secrets_fetcher"},
+ Jobs: map[string]any{
+ "secrets_fetcher": map[string]any{
+ "runs-on": "ubuntu-latest",
+ "steps": []any{
+ map[string]any{"run": "echo 'fetch'"},
+ },
+ },
+ },
+ }
+
+ require.NoError(t, compiler.buildCustomJobs(data, true), "custom jobs should build")
+
+ job, exists := compiler.jobManager.GetJob("secrets_fetcher")
+ require.True(t, exists, "secrets_fetcher should be added")
+ assert.NotContains(t, job.Needs, string(constants.ActivationJobName), "on.needs job should not auto-depend on activation")
+}
+
// TestBuildCustomJobsWithStrategy tests custom jobs with matrix strategy configuration
func TestBuildCustomJobsWithStrategy(t *testing.T) {
tmpDir := testutil.TempDir(t, "strategy-test")
diff --git a/pkg/workflow/compiler_orchestrator_workflow.go b/pkg/workflow/compiler_orchestrator_workflow.go
index 166339555d6..93a6c740c9d 100644
--- a/pkg/workflow/compiler_orchestrator_workflow.go
+++ b/pkg/workflow/compiler_orchestrator_workflow.go
@@ -454,5 +454,12 @@ func (c *Compiler) processOnSectionAndFilters(
// Extract on.permissions for pre-activation job permissions
workflowData.OnPermissions = extractOnPermissions(frontmatter)
+ // Extract on.needs for pre-activation/activation job dependencies
+ onNeeds, err := extractOnNeeds(frontmatter)
+ if err != nil {
+ return err
+ }
+ workflowData.OnNeeds = onNeeds
+
return nil
}
diff --git a/pkg/workflow/compiler_pre_activation_job.go b/pkg/workflow/compiler_pre_activation_job.go
index 405979e891c..dcde9ff6bfb 100644
--- a/pkg/workflow/compiler_pre_activation_job.go
+++ b/pkg/workflow/compiler_pre_activation_job.go
@@ -355,8 +355,8 @@ func (c *Compiler) buildPreActivationJob(data *WorkflowData, needsPermissionChec
// The activated output is unconditionally true; the user controls
// agent execution through their own if: condition referencing the
// on.steps outputs (e.g., needs.pre_activation.outputs.gate_result).
- if len(data.OnSteps) > 0 {
- compilerActivationJobsLog.Printf("Pre-activation created with on.steps only (%d steps); activated output is unconditionally true", len(data.OnSteps))
+ if len(data.OnSteps) > 0 || len(data.OnNeeds) > 0 {
+ compilerActivationJobsLog.Printf("Pre-activation created with no checks (on.steps=%d, on.needs=%d); activated output is unconditionally true", len(data.OnSteps), len(data.OnNeeds))
activatedNode = BuildStringLiteral("true")
} else {
// This should never happen - it means pre-activation job was created without any checks
@@ -434,6 +434,7 @@ func (c *Compiler) buildPreActivationJob(data *WorkflowData, needsPermissionChec
Permissions: permissions,
Steps: steps,
Outputs: outputs,
+ Needs: dedupeStringSlice(data.OnNeeds),
}
return job, nil
@@ -647,6 +648,68 @@ func extractOnPermissions(frontmatter map[string]any) *Permissions {
return parser.ToPermissions()
}
+// extractOnNeeds extracts the 'needs' field from the 'on:' section of frontmatter.
+// These dependencies are added to both pre_activation and activation jobs.
+//
+// Returns nil if on.needs is not configured.
+func extractOnNeeds(frontmatter map[string]any) ([]string, error) {
+ onValue, exists := frontmatter["on"]
+ if !exists || onValue == nil {
+ return nil, nil
+ }
+
+ onMap, ok := onValue.(map[string]any)
+ if !ok {
+ return nil, nil
+ }
+
+ return parseOnNeedsValues(onMap)
+}
+
+func parseOnNeedsValues(onMap map[string]any) ([]string, error) {
+ if onMap == nil {
+ return nil, nil
+ }
+
+ needsValue, exists := onMap["needs"]
+ if !exists || needsValue == nil {
+ return nil, nil
+ }
+
+ needsList, ok := needsValue.([]any)
+ if !ok {
+ return nil, fmt.Errorf("on.needs must be an array, got %T", needsValue)
+ }
+
+ result := make([]string, 0, len(needsList))
+ for i, need := range needsList {
+ needStr, ok := need.(string)
+ if !ok {
+ return nil, fmt.Errorf("on.needs[%d] must be a string, got %T", i, need)
+ }
+ result = append(result, needStr)
+ }
+
+ return dedupeStringSlice(result), nil
+}
+
+func dedupeStringSlice(values []string) []string {
+ if len(values) == 0 {
+ return nil
+ }
+
+ seen := make(map[string]bool, len(values))
+ result := make([]string, 0, len(values))
+ for _, v := range values {
+ if seen[v] {
+ continue
+ }
+ seen[v] = true
+ result = append(result, v)
+ }
+ return result
+}
+
// referencesPreActivationOutputs returns true if the condition references the pre_activation job's
// own outputs (e.g., "needs.pre_activation.outputs.foo"). Such conditions cannot be applied to the
// pre_activation job itself (a job cannot reference its own outputs), so they are deferred to
diff --git a/pkg/workflow/compiler_safe_outputs.go b/pkg/workflow/compiler_safe_outputs.go
index fb3b7e8eb0e..00937cc30f2 100644
--- a/pkg/workflow/compiler_safe_outputs.go
+++ b/pkg/workflow/compiler_safe_outputs.go
@@ -215,7 +215,7 @@ func (c *Compiler) parseOnSection(frontmatter map[string]any, workflowData *Work
}
// Extract other (non-conflicting) events excluding slash_command, command, label_command, reaction, status-comment, and stop-after
- otherEvents = excludeMapKeys(onMap, "slash_command", "command", "label_command", "reaction", "status-comment", "stop-after", "github-token", "github-app")
+ otherEvents = excludeMapKeys(onMap, "slash_command", "command", "label_command", "reaction", "status-comment", "stop-after", "github-token", "github-app", "needs")
}
}
diff --git a/pkg/workflow/compiler_types.go b/pkg/workflow/compiler_types.go
index 16a3c00a6c1..f550cfe590a 100644
--- a/pkg/workflow/compiler_types.go
+++ b/pkg/workflow/compiler_types.go
@@ -438,6 +438,7 @@ type WorkflowData struct {
SkipBots []string // users to skip workflow for (e.g., [user1, user2])
OnSteps []map[string]any // steps to inject into the pre-activation job from on.steps
OnPermissions *Permissions // additional permissions for the pre-activation job from on.permissions
+ OnNeeds []string // custom workflow jobs that pre_activation/activation should depend on from on.needs
ManualApproval string // environment name for manual approval from on: section
Command []string // for /command trigger support - multiple command names
CommandEvents []string // events where command should be active (nil = all events)
diff --git a/pkg/workflow/frontmatter_parsing.go b/pkg/workflow/frontmatter_parsing.go
index d9560792655..eabc2938b5b 100644
--- a/pkg/workflow/frontmatter_parsing.go
+++ b/pkg/workflow/frontmatter_parsing.go
@@ -60,10 +60,23 @@ func ParseFrontmatterConfig(frontmatter map[string]any) (*FrontmatterConfig, err
}
}
+ // Parse typed on.needs field if on exists
+ if len(config.On) > 0 {
+ onNeeds, err := parseOnNeedsConfig(config.On)
+ if err == nil {
+ config.OnNeeds = onNeeds
+ frontmatterTypesLog.Printf("Parsed typed on.needs config with %d entries", len(onNeeds))
+ }
+ }
+
frontmatterTypesLog.Printf("Successfully parsed frontmatter config: name=%s, engine=%v", config.Name, config.Engine)
return &config, nil
}
+func parseOnNeedsConfig(on map[string]any) ([]string, error) {
+ return parseOnNeedsValues(on)
+}
+
// parseRuntimesConfig converts a map[string]any to RuntimesConfig
func parseRuntimesConfig(runtimes map[string]any) (*RuntimesConfig, error) {
config := &RuntimesConfig{}
diff --git a/pkg/workflow/frontmatter_types.go b/pkg/workflow/frontmatter_types.go
index 5ca20333ac7..35e5d653a2f 100644
--- a/pkg/workflow/frontmatter_types.go
+++ b/pkg/workflow/frontmatter_types.go
@@ -167,6 +167,7 @@ type FrontmatterConfig struct {
// Event and trigger configuration
On map[string]any `json:"on,omitempty"` // Complex trigger config with many variants (too dynamic to type)
+ OnNeeds []string `json:"-"` // New typed field extracted from on.needs (not in JSON to avoid conflict)
Permissions map[string]any `json:"permissions,omitempty"` // Deprecated: use PermissionsTyped (can be string or map)
Concurrency map[string]any `json:"concurrency,omitempty"`
If string `json:"if,omitempty"`
diff --git a/pkg/workflow/frontmatter_types_test.go b/pkg/workflow/frontmatter_types_test.go
index 5602f417041..7b1aa1e3d02 100644
--- a/pkg/workflow/frontmatter_types_test.go
+++ b/pkg/workflow/frontmatter_types_test.go
@@ -120,6 +120,27 @@ func TestParseFrontmatterConfig(t *testing.T) {
}
})
+ t.Run("parses on.needs config", func(t *testing.T) {
+ frontmatter := map[string]any{
+ "on": map[string]any{
+ "workflow_dispatch": map[string]any{},
+ "needs": []any{"secrets_fetcher", "config_loader"},
+ },
+ }
+
+ config, err := ParseFrontmatterConfig(frontmatter)
+ if err != nil {
+ t.Fatalf("unexpected error: %v", err)
+ }
+
+ if len(config.OnNeeds) != 2 {
+ t.Fatalf("expected 2 on.needs entries, got %d", len(config.OnNeeds))
+ }
+ if config.OnNeeds[0] != "secrets_fetcher" || config.OnNeeds[1] != "config_loader" {
+ t.Fatalf("unexpected on.needs entries: %#v", config.OnNeeds)
+ }
+ })
+
t.Run("handles timeout-minutes as int", func(t *testing.T) {
frontmatter := map[string]any{
"timeout-minutes": 60,
diff --git a/pkg/workflow/on_needs_integration_test.go b/pkg/workflow/on_needs_integration_test.go
new file mode 100644
index 00000000000..f6f8ad0962e
--- /dev/null
+++ b/pkg/workflow/on_needs_integration_test.go
@@ -0,0 +1,65 @@
+//go:build integration
+
+package workflow
+
+import (
+ "os"
+ "path/filepath"
+ "testing"
+
+ "github.com/github/gh-aw/pkg/testutil"
+ "github.com/goccy/go-yaml"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestOnNeedsCompilesAndWiresActivationDependencies(t *testing.T) {
+ tmpDir := testutil.TempDir(t, "on-needs-integration")
+ compiler := NewCompiler()
+
+ workflowContent := `---
+on:
+ workflow_dispatch:
+ needs: [secrets_fetcher]
+ github-app:
+ client-id: ${{ needs.secrets_fetcher.outputs.app_id }}
+ private-key: ${{ needs.secrets_fetcher.outputs.private_key }}
+engine: copilot
+jobs:
+ secrets_fetcher:
+ runs-on: ubuntu-latest
+ outputs:
+ app_id: ${{ steps.fetch.outputs.app_id }}
+ private_key: ${{ steps.fetch.outputs.private_key }}
+ steps:
+ - id: fetch
+ run: |
+ echo "app_id=123" >> "$GITHUB_OUTPUT"
+ echo "private_key=key" >> "$GITHUB_OUTPUT"
+---
+Run with on.needs
+`
+
+ workflowFile := filepath.Join(tmpDir, "test-on-needs.md")
+ require.NoError(t, os.WriteFile(workflowFile, []byte(workflowContent), 0644), "should write test workflow")
+
+ require.NoError(t, compiler.CompileWorkflow(workflowFile), "workflow should compile with on.needs and on.github-app needs expression")
+
+ lockFile := filepath.Join(tmpDir, "test-on-needs.lock.yml")
+ lockBytes, err := os.ReadFile(lockFile)
+ require.NoError(t, err, "should read compiled lock file")
+
+ var lock map[string]any
+ require.NoError(t, yaml.Unmarshal(lockBytes, &lock), "compiled lock file should be valid YAML")
+
+ jobs, ok := lock["jobs"].(map[string]any)
+ require.True(t, ok, "compiled workflow should contain jobs map")
+
+ preActivation, ok := jobs["pre_activation"].(map[string]any)
+ require.True(t, ok, "compiled workflow should contain pre_activation job")
+ assert.Contains(t, preActivation["needs"], "secrets_fetcher", "pre_activation should depend on on.needs job")
+
+ activation, ok := jobs["activation"].(map[string]any)
+ require.True(t, ok, "compiled workflow should contain activation job")
+ assert.Contains(t, activation["needs"], "secrets_fetcher", "activation should depend on on.needs job")
+}
diff --git a/pkg/workflow/on_needs_validation.go b/pkg/workflow/on_needs_validation.go
new file mode 100644
index 00000000000..3fc06f06194
--- /dev/null
+++ b/pkg/workflow/on_needs_validation.go
@@ -0,0 +1,244 @@
+package workflow
+
+import (
+ "fmt"
+ "regexp"
+ "strings"
+
+ "github.com/github/gh-aw/pkg/constants"
+ "github.com/github/gh-aw/pkg/logger"
+)
+
+var onNeedsValidationLog = logger.New("workflow:on_needs_validation")
+
+var onNeedsOutputExpressionPattern = regexp.MustCompile(`^\$\{\{\s*needs\.([A-Za-z_][A-Za-z0-9_-]*)\.outputs\.[A-Za-z_][A-Za-z0-9_-]*\s*\}\}$`)
+
+func (c *Compiler) validateOnNeeds(data *WorkflowData) error {
+ if data == nil {
+ return nil
+ }
+
+ if err := validateOnNeedsTargets(data); err != nil {
+ return err
+ }
+
+ if err := c.validateOnNeedsDependencyChains(data); err != nil {
+ return err
+ }
+
+ if err := c.validateOnGitHubAppNeedsExpressions(data); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+func validateOnNeedsTargets(data *WorkflowData) error {
+ if len(data.OnNeeds) == 0 {
+ return nil
+ }
+
+ customJobs := make(map[string]bool, len(data.Jobs))
+ for jobName := range data.Jobs {
+ if isReservedOnNeedsTarget(jobName) {
+ continue
+ }
+ customJobs[jobName] = true
+ }
+
+ for _, need := range data.OnNeeds {
+ if isReservedOnNeedsTarget(need) {
+ return fmt.Errorf(
+ "on.needs: built-in job %q is not allowed. Expected one of the workflow's custom jobs. Example: on.needs: [secrets_fetcher]",
+ need,
+ )
+ }
+ if !customJobs[need] {
+ return fmt.Errorf(
+ "on.needs: unknown job %q. Expected one of the workflow's custom jobs. Example: on.needs: [secrets_fetcher]",
+ need,
+ )
+ }
+
+ if jobConfig, ok := data.Jobs[need].(map[string]any); ok {
+ if jobDependsOnActivation(jobConfig) || jobDependsOnPreActivation(jobConfig) {
+ return fmt.Errorf(
+ "on.needs: job %q cannot depend on activation/pre_activation because pre_activation and activation depend on on.needs jobs",
+ need,
+ )
+ }
+ }
+ }
+
+ onNeedsValidationLog.Printf("Validated %d on.needs dependency target(s)", len(data.OnNeeds))
+ return nil
+}
+
+func (c *Compiler) validateOnGitHubAppNeedsExpressions(data *WorkflowData) error {
+ if data == nil || data.ActivationGitHubApp == nil {
+ return nil
+ }
+
+ allowed := make(map[string]bool, len(data.OnNeeds))
+ for _, j := range data.OnNeeds {
+ allowed[j] = true
+ }
+ for _, j := range c.getCustomJobsDependingOnPreActivation(data.Jobs) {
+ allowed[j] = true
+ }
+ for _, j := range c.getCustomJobsReferencedInPromptWithNoActivationDep(data) {
+ allowed[j] = true
+ }
+
+ fields := map[string]string{
+ "client-id": data.ActivationGitHubApp.AppID,
+ "private-key": data.ActivationGitHubApp.PrivateKey,
+ }
+
+ for fieldName, value := range fields {
+ jobName, ok := extractNeedsJobFromOutputExpression(value)
+ if !ok {
+ continue
+ }
+
+ if isReservedOnNeedsTarget(jobName) {
+ return fmt.Errorf("on.github-app.%s: built-in job %q is not allowed in needs expressions", fieldName, jobName)
+ }
+ if _, exists := data.Jobs[jobName]; !exists {
+ return fmt.Errorf("on.github-app.%s: unknown job %q in needs expression", fieldName, jobName)
+ }
+ if !allowed[jobName] {
+ return fmt.Errorf(
+ "on.github-app.%s references needs.%s.outputs.* but job %q is not available before activation. Add it to on.needs (example: on.needs: [%s])",
+ fieldName,
+ jobName,
+ jobName,
+ jobName,
+ )
+ }
+ }
+
+ return nil
+}
+
+func (c *Compiler) validateOnNeedsDependencyChains(data *WorkflowData) error {
+ if data == nil || len(data.OnNeeds) == 0 {
+ return nil
+ }
+
+ onNeedsSet := make(map[string]bool, len(data.OnNeeds))
+ for _, job := range data.OnNeeds {
+ onNeedsSet[job] = true
+ }
+
+ promptReferencedSet := make(map[string]bool)
+ for _, job := range c.getCustomJobsReferencedInPromptWithNoActivationDep(data) {
+ promptReferencedSet[job] = true
+ }
+
+ visited := make(map[string]bool, len(data.Jobs))
+ visiting := make(map[string]bool, len(data.Jobs))
+ for _, root := range data.OnNeeds {
+ if err := validateOnNeedsDependencyChain(root, root, data.Jobs, onNeedsSet, promptReferencedSet, visiting, visited); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func validateOnNeedsDependencyChain(
+ root string,
+ current string,
+ allJobs map[string]any,
+ onNeedsSet map[string]bool,
+ promptReferencedSet map[string]bool,
+ visiting map[string]bool,
+ visited map[string]bool,
+) error {
+ if visited[current] {
+ return nil
+ }
+ if visiting[current] {
+ return fmt.Errorf("on.needs: cycle detected while validating dependency chain for %q", root)
+ }
+
+ jobConfigAny, exists := allJobs[current]
+ if !exists {
+ return nil
+ }
+
+ jobConfig, ok := jobConfigAny.(map[string]any)
+ if !ok {
+ return nil
+ }
+
+ visiting[current] = true
+ defer delete(visiting, current)
+
+ for _, dep := range parseNeedsField(jobConfig["needs"]) {
+ if isReservedOnNeedsTarget(dep) {
+ return fmt.Errorf(
+ "on.needs: job %q depends on built-in job %q. Dependencies for on.needs jobs must be custom jobs that run before activation",
+ current,
+ dep,
+ )
+ }
+
+ depAny, depExists := allJobs[dep]
+ if !depExists {
+ continue
+ }
+
+ depConfig, ok := depAny.(map[string]any)
+ if !ok {
+ continue
+ }
+
+ _, depHasExplicitNeeds := depConfig["needs"]
+ if !depHasExplicitNeeds && !onNeedsSet[dep] && !promptReferencedSet[dep] {
+ return fmt.Errorf(
+ "on.needs: job %q depends on %q, but %q has no explicit needs and is not in on.needs. It may get an implicit needs: activation and create a cycle. Add %q to on.needs or give %q explicit needs that run before activation",
+ current,
+ dep,
+ dep,
+ dep,
+ dep,
+ )
+ }
+
+ if err := validateOnNeedsDependencyChain(root, dep, allJobs, onNeedsSet, promptReferencedSet, visiting, visited); err != nil {
+ return err
+ }
+ }
+
+ visited[current] = true
+ return nil
+}
+
+func extractNeedsJobFromOutputExpression(value string) (string, bool) {
+ match := onNeedsOutputExpressionPattern.FindStringSubmatch(strings.TrimSpace(value))
+ if len(match) != 2 {
+ return "", false
+ }
+ return match[1], true
+}
+
+func isReservedOnNeedsTarget(jobName string) bool {
+ switch jobName {
+ case string(constants.AgentJobName),
+ string(constants.ActivationJobName),
+ string(constants.PreActivationJobName),
+ "pre-activation",
+ string(constants.ConclusionJobName),
+ string(constants.SafeOutputsJobName),
+ "safe-outputs",
+ string(constants.DetectionJobName),
+ string(constants.UnlockJobName),
+ "push_repo_memory",
+ "update_cache_memory":
+ return true
+ default:
+ return false
+ }
+}
diff --git a/pkg/workflow/on_needs_validation_test.go b/pkg/workflow/on_needs_validation_test.go
new file mode 100644
index 00000000000..6657b8d7e2f
--- /dev/null
+++ b/pkg/workflow/on_needs_validation_test.go
@@ -0,0 +1,147 @@
+//go:build !integration
+
+package workflow
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestValidateOnNeedsTargets(t *testing.T) {
+ t.Run("valid on.needs target", func(t *testing.T) {
+ data := &WorkflowData{
+ Jobs: map[string]any{
+ "secrets_fetcher": map[string]any{
+ "runs-on": "ubuntu-latest",
+ },
+ },
+ OnNeeds: []string{"secrets_fetcher"},
+ }
+
+ require.NoError(t, validateOnNeedsTargets(data), "expected on.needs validation to pass")
+ })
+
+ t.Run("built-in target rejected", func(t *testing.T) {
+ data := &WorkflowData{
+ Jobs: map[string]any{"secrets_fetcher": map[string]any{}},
+ OnNeeds: []string{"activation"},
+ }
+
+ err := validateOnNeedsTargets(data)
+ require.Error(t, err, "expected on.needs validation error")
+ assert.Contains(t, err.Error(), `built-in job "activation"`, "error should explain invalid built-in target")
+ })
+
+ t.Run("target depending on activation rejected", func(t *testing.T) {
+ data := &WorkflowData{
+ Jobs: map[string]any{
+ "secrets_fetcher": map[string]any{
+ "needs": "activation",
+ },
+ },
+ OnNeeds: []string{"secrets_fetcher"},
+ }
+
+ err := validateOnNeedsTargets(data)
+ require.Error(t, err, "expected on.needs validation error")
+ assert.Contains(t, err.Error(), "cannot depend on activation/pre_activation", "error should explain cyclic dependency risk")
+ })
+}
+
+func TestValidateOnNeedsDependencyChains(t *testing.T) {
+ c := NewCompiler()
+
+ t.Run("rejects chain where transitive dependency may get implicit activation need", func(t *testing.T) {
+ data := &WorkflowData{
+ Jobs: map[string]any{
+ "secrets_fetcher": map[string]any{
+ "needs": []any{"bootstrap"},
+ },
+ "bootstrap": map[string]any{
+ "runs-on": "ubuntu-latest",
+ },
+ },
+ OnNeeds: []string{"secrets_fetcher"},
+ }
+
+ err := c.validateOnNeeds(data)
+ require.Error(t, err, "expected transitive chain validation error")
+ assert.Contains(t, err.Error(), `depends on "bootstrap"`, "error should identify problematic transitive dependency")
+ assert.Contains(t, err.Error(), "implicit needs: activation", "error should explain cycle-prone implicit activation dependency")
+ })
+
+ t.Run("allows chain when transitive dependency is explicitly in on.needs", func(t *testing.T) {
+ data := &WorkflowData{
+ Jobs: map[string]any{
+ "secrets_fetcher": map[string]any{
+ "needs": []any{"bootstrap"},
+ },
+ "bootstrap": map[string]any{
+ "runs-on": "ubuntu-latest",
+ },
+ },
+ OnNeeds: []string{"secrets_fetcher", "bootstrap"},
+ }
+
+ require.NoError(t, c.validateOnNeeds(data), "expected transitive chain to be valid when all dependencies are in on.needs")
+ })
+}
+
+func TestValidateOnGitHubAppNeedsExpressions(t *testing.T) {
+ c := NewCompiler()
+
+ t.Run("allows on.needs expression in on.github-app", func(t *testing.T) {
+ data := &WorkflowData{
+ Jobs: map[string]any{
+ "secrets_fetcher": map[string]any{
+ "runs-on": "ubuntu-latest",
+ },
+ },
+ OnNeeds: []string{"secrets_fetcher"},
+ ActivationGitHubApp: &GitHubAppConfig{
+ AppID: "${{ needs.secrets_fetcher.outputs.app_id }}",
+ PrivateKey: "${{ needs.secrets_fetcher.outputs.private_key }}",
+ },
+ }
+
+ require.NoError(t, c.validateOnNeeds(data), "expected on.github-app needs expression to validate")
+ })
+
+ t.Run("rejects unknown needs expression in on.github-app", func(t *testing.T) {
+ data := &WorkflowData{
+ Jobs: map[string]any{
+ "secrets_fetcher": map[string]any{
+ "runs-on": "ubuntu-latest",
+ },
+ },
+ ActivationGitHubApp: &GitHubAppConfig{
+ AppID: "${{ needs.missing_job.outputs.app_id }}",
+ PrivateKey: "${{ secrets.APP_PRIVATE_KEY }}",
+ },
+ }
+
+ err := c.validateOnNeeds(data)
+ require.Error(t, err, "expected on.github-app validation error")
+ assert.Contains(t, err.Error(), `unknown job "missing_job"`, "error should identify unknown needs job")
+ })
+
+ t.Run("error field label uses client-id", func(t *testing.T) {
+ data := &WorkflowData{
+ Jobs: map[string]any{
+ "secrets_fetcher": map[string]any{
+ "runs-on": "ubuntu-latest",
+ },
+ },
+ ActivationGitHubApp: &GitHubAppConfig{
+ AppID: "${{ needs.secrets_fetcher.outputs.app_id }}",
+ PrivateKey: "${{ secrets.APP_PRIVATE_KEY }}",
+ },
+ }
+
+ err := c.validateOnNeeds(data)
+ require.Error(t, err, "expected on.github-app validation error")
+ assert.Contains(t, err.Error(), "on.github-app.client-id", "error field should use yaml key client-id")
+ })
+}
diff --git a/pkg/workflow/on_steps_test.go b/pkg/workflow/on_steps_test.go
index ea00e75d0c3..ab8eda8bb44 100644
--- a/pkg/workflow/on_steps_test.go
+++ b/pkg/workflow/on_steps_test.go
@@ -10,6 +10,8 @@ import (
"github.com/github/gh-aw/pkg/stringutil"
"github.com/github/gh-aw/pkg/testutil"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
)
// TestOnSteps tests that on.steps are injected into the pre-activation job and their
@@ -403,6 +405,73 @@ func TestExtractOnPermissions(t *testing.T) {
}
}
+// TestExtractOnNeeds tests the extractOnNeeds function directly.
+func TestExtractOnNeeds(t *testing.T) {
+ tests := []struct {
+ name string
+ frontmatter map[string]any
+ expected []string
+ expectError bool
+ errorContains string
+ }{
+ {
+ name: "no_on_section",
+ frontmatter: map[string]any{},
+ expected: nil,
+ },
+ {
+ name: "on_section_string",
+ frontmatter: map[string]any{
+ "on": "push",
+ },
+ expected: nil,
+ },
+ {
+ name: "on_section_without_needs",
+ frontmatter: map[string]any{
+ "on": map[string]any{
+ "workflow_dispatch": nil,
+ },
+ },
+ expected: nil,
+ },
+ {
+ name: "on_needs_valid",
+ frontmatter: map[string]any{
+ "on": map[string]any{
+ "workflow_dispatch": nil,
+ "needs": []any{"secrets_fetcher", "config_loader"},
+ },
+ },
+ expected: []string{"secrets_fetcher", "config_loader"},
+ },
+ {
+ name: "on_needs_wrong_type",
+ frontmatter: map[string]any{
+ "on": map[string]any{
+ "needs": "secrets_fetcher",
+ },
+ },
+ expectError: true,
+ errorContains: "on.needs must be an array",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ needs, err := extractOnNeeds(tt.frontmatter)
+ if tt.expectError {
+ require.Error(t, err, "expected extraction error")
+ assert.Contains(t, err.Error(), tt.errorContains, "error should contain expected text")
+ return
+ }
+
+ require.NoError(t, err, "expected extraction to succeed")
+ assert.Equal(t, tt.expected, needs, "unexpected on.needs result")
+ })
+ }
+}
+
// TestOnPermissionsAppliedToPreActivation tests that on.permissions are applied to the pre-activation job
func TestOnPermissionsAppliedToPreActivation(t *testing.T) {
tmpDir := testutil.TempDir(t, "on-permissions-test")