diff --git a/.github/workflows/smoke-crush.lock.yml b/.github/workflows/smoke-crush.lock.yml
index 4796970ffdb..f21451155d7 100644
--- a/.github/workflows/smoke-crush.lock.yml
+++ b/.github/workflows/smoke-crush.lock.yml
@@ -1,5 +1,5 @@
# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"829d6508945d2851e941dbf4412159d86a70723dc05086c57591b892611e5a4c","strict":true,"agent_id":"crush","agent_model":"anthropic/claude-sonnet-4-20250514"}
-# gh-aw-manifest: {"version":1,"secrets":["COPILOT_GITHUB_TOKEN","GH_AW_GITHUB_MCP_SERVER_TOKEN","GH_AW_GITHUB_TOKEN","GITHUB_TOKEN"],"actions":[{"repo":"actions/checkout","sha":"de0fac2e4500dabe0009e67214ff5f5447ce83dd","version":"v6.0.2"},{"repo":"actions/download-artifact","sha":"3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c","version":"v8.0.1"},{"repo":"actions/github-script","sha":"373c709c69115d41ff229c7e5df9f8788daa9553","version":"v9"},{"repo":"actions/setup-node","sha":"48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e","version":"v6.4.0"},{"repo":"actions/upload-artifact","sha":"043fb46d1a93c77aae656e7c1c64a875d1fc6a0a","version":"v7.0.1"}],"containers":[{"image":"ghcr.io/github/gh-aw-firewall/agent:0.25.26"},{"image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.26"},{"image":"ghcr.io/github/gh-aw-firewall/squid:0.25.26"},{"image":"ghcr.io/github/gh-aw-mcpg:v0.2.26"},{"image":"ghcr.io/github/github-mcp-server:v1.0.0"},{"image":"node:lts-alpine","digest":"sha256:01743339035a5c3c11a373cd7c83aeab6ed1457b55da6a69e014a95ac4e4700b","pinned_image":"node:lts-alpine@sha256:01743339035a5c3c11a373cd7c83aeab6ed1457b55da6a69e014a95ac4e4700b"}]}
+# gh-aw-manifest: {"version":1,"secrets":["ANTHROPIC_API_KEY","GH_AW_GITHUB_MCP_SERVER_TOKEN","GH_AW_GITHUB_TOKEN","GITHUB_TOKEN"],"actions":[{"repo":"actions/checkout","sha":"de0fac2e4500dabe0009e67214ff5f5447ce83dd","version":"v6.0.2"},{"repo":"actions/download-artifact","sha":"3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c","version":"v8.0.1"},{"repo":"actions/github-script","sha":"373c709c69115d41ff229c7e5df9f8788daa9553","version":"v9"},{"repo":"actions/setup-node","sha":"48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e","version":"v6.4.0"},{"repo":"actions/upload-artifact","sha":"043fb46d1a93c77aae656e7c1c64a875d1fc6a0a","version":"v7.0.1"}],"containers":[{"image":"ghcr.io/github/gh-aw-firewall/agent:0.25.26"},{"image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.26"},{"image":"ghcr.io/github/gh-aw-firewall/squid:0.25.26"},{"image":"ghcr.io/github/gh-aw-mcpg:v0.2.26"},{"image":"ghcr.io/github/github-mcp-server:v1.0.0"},{"image":"node:lts-alpine","digest":"sha256:01743339035a5c3c11a373cd7c83aeab6ed1457b55da6a69e014a95ac4e4700b","pinned_image":"node:lts-alpine@sha256:01743339035a5c3c11a373cd7c83aeab6ed1457b55da6a69e014a95ac4e4700b"}]}
# ___ _ _
# / _ \ | | (_)
# | |_| | __ _ ___ _ __ | |_ _ ___
@@ -32,7 +32,7 @@
# - shared/noop-reminder.md
#
# Secrets used:
-# - COPILOT_GITHUB_TOKEN
+# - ANTHROPIC_API_KEY
# - GH_AW_GITHUB_MCP_SERVER_TOKEN
# - GH_AW_GITHUB_TOKEN
# - GITHUB_TOKEN
@@ -152,11 +152,11 @@ jobs:
setupGlobals(core, github, context, exec, io, getOctokit);
const { main } = require('${{ runner.temp }}/gh-aw/actions/add_reaction.cjs');
await main();
- - name: Validate COPILOT_GITHUB_TOKEN secret
+ - name: Validate ANTHROPIC_API_KEY secret
id: validate-secret
- run: bash "${RUNNER_TEMP}/gh-aw/actions/validate_multi_secret.sh" COPILOT_GITHUB_TOKEN 'Crush CLI' https://github.github.com/gh-aw/reference/engines/#crush
+ run: bash "${RUNNER_TEMP}/gh-aw/actions/validate_multi_secret.sh" ANTHROPIC_API_KEY 'Crush CLI' https://github.github.com/gh-aw/reference/engines/#crush
env:
- COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }}
+ ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
- name: Checkout .github and .agents folders
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -896,14 +896,14 @@ jobs:
sudo -E awf --container-workdir "${GITHUB_WORKSPACE}" --mount "${RUNNER_TEMP}/gh-aw:${RUNNER_TEMP}/gh-aw:ro" --mount "${RUNNER_TEMP}/gh-aw:/host${RUNNER_TEMP}/gh-aw:ro" --env-all --allow-domains '*.githubusercontent.com,api.anthropic.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,charm.land,codeload.github.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,docs.github.com,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,github.blog,github.com,github.githubassets.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com,www.googleapis.com' --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --audit-dir /tmp/gh-aw/sandbox/firewall/audit --enable-host-access --allow-host-ports 80,443,8080 --image-tag 0.25.26 --skip-pull --enable-api-proxy \
-- /bin/bash -c 'export PATH="$(find /opt/hostedtoolcache -maxdepth 4 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && crush run --verbose "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log
env:
+ ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
+ ANTHROPIC_BASE_URL: http://host.docker.internal:10000
CRUSH_MODEL: anthropic/claude-sonnet-4-20250514
GH_AW_MCP_CONFIG: ${{ github.workspace }}/.crush.json
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
GH_AW_SAFE_OUTPUTS: ${{ steps.set-runtime-paths.outputs.GH_AW_SAFE_OUTPUTS }}
GITHUB_WORKSPACE: ${{ github.workspace }}
NO_PROXY: localhost,127.0.0.1
- OPENAI_API_KEY: ${{ secrets.COPILOT_GITHUB_TOKEN }}
- OPENAI_BASE_URL: http://host.docker.internal:10005
- name: Configure Git credentials
env:
REPO_NAME: ${{ github.repository }}
@@ -936,8 +936,8 @@ jobs:
const { main } = require('${{ runner.temp }}/gh-aw/actions/redact_secrets.cjs');
await main();
env:
- GH_AW_SECRET_NAMES: 'COPILOT_GITHUB_TOKEN,GH_AW_GITHUB_MCP_SERVER_TOKEN,GH_AW_GITHUB_TOKEN,GITHUB_TOKEN'
- SECRET_COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }}
+ GH_AW_SECRET_NAMES: 'ANTHROPIC_API_KEY,GH_AW_GITHUB_MCP_SERVER_TOKEN,GH_AW_GITHUB_TOKEN,GITHUB_TOKEN'
+ SECRET_ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
SECRET_GH_AW_GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN }}
SECRET_GH_AW_GITHUB_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN }}
SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@@ -1338,12 +1338,12 @@ jobs:
sudo -E awf --container-workdir "${GITHUB_WORKSPACE}" --mount "${RUNNER_TEMP}/gh-aw:${RUNNER_TEMP}/gh-aw:ro" --mount "${RUNNER_TEMP}/gh-aw:/host${RUNNER_TEMP}/gh-aw:ro" --env-all --allow-domains api.anthropic.com,charm.land,github.com,host.docker.internal,raw.githubusercontent.com,registry.npmjs.org --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --audit-dir /tmp/gh-aw/sandbox/firewall/audit --enable-host-access --allow-host-ports 80,443,8080 --image-tag 0.25.26 --skip-pull --enable-api-proxy \
-- /bin/bash -c 'export PATH="$(find /opt/hostedtoolcache -maxdepth 4 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && crush run --verbose "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"' 2>&1 | tee -a /tmp/gh-aw/threat-detection/detection.log
env:
+ ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
+ ANTHROPIC_BASE_URL: http://host.docker.internal:10000
CRUSH_MODEL: anthropic/claude-sonnet-4-20250514
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
GITHUB_WORKSPACE: ${{ github.workspace }}
NO_PROXY: localhost,127.0.0.1
- OPENAI_API_KEY: ${{ secrets.COPILOT_GITHUB_TOKEN }}
- OPENAI_BASE_URL: http://host.docker.internal:10005
- name: Upload threat detection log
if: always() && steps.detection_guard.outputs.run_detection == 'true'
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
diff --git a/.github/workflows/smoke-opencode.lock.yml b/.github/workflows/smoke-opencode.lock.yml
index 68067a05b88..b6837b345da 100644
--- a/.github/workflows/smoke-opencode.lock.yml
+++ b/.github/workflows/smoke-opencode.lock.yml
@@ -1,4 +1,4 @@
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"7b4333746ba94c519add47ef8dc90ac817f6c5c2a92c9826ae763996dfb6b2e7","strict":true,"agent_id":"opencode"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"47285093abe949ccb3324d5b9320619f3426f6dfd84a71118837111d633a87b8","strict":true,"agent_id":"opencode","agent_model":"copilot/gpt-5"}
# gh-aw-manifest: {"version":1,"secrets":["COPILOT_GITHUB_TOKEN","GH_AW_GITHUB_MCP_SERVER_TOKEN","GH_AW_GITHUB_TOKEN","GH_AW_OTEL_ENDPOINT","GH_AW_OTEL_HEADERS","GITHUB_TOKEN"],"actions":[{"repo":"actions/checkout","sha":"de0fac2e4500dabe0009e67214ff5f5447ce83dd","version":"v6.0.2"},{"repo":"actions/download-artifact","sha":"3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c","version":"v8.0.1"},{"repo":"actions/github-script","sha":"373c709c69115d41ff229c7e5df9f8788daa9553","version":"v9"},{"repo":"actions/setup-node","sha":"48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e","version":"v6.4.0"},{"repo":"actions/upload-artifact","sha":"043fb46d1a93c77aae656e7c1c64a875d1fc6a0a","version":"v7.0.1"}],"containers":[{"image":"ghcr.io/github/gh-aw-firewall/agent:0.25.26"},{"image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.26"},{"image":"ghcr.io/github/gh-aw-firewall/squid:0.25.26"},{"image":"ghcr.io/github/gh-aw-mcpg:v0.2.26"},{"image":"ghcr.io/github/github-mcp-server:v1.0.0"},{"image":"node:lts-alpine","digest":"sha256:01743339035a5c3c11a373cd7c83aeab6ed1457b55da6a69e014a95ac4e4700b","pinned_image":"node:lts-alpine@sha256:01743339035a5c3c11a373cd7c83aeab6ed1457b55da6a69e014a95ac4e4700b"}]}
# ___ _ _
# / _ \ | | (_)
@@ -130,7 +130,7 @@ jobs:
env:
GH_AW_INFO_ENGINE_ID: "opencode"
GH_AW_INFO_ENGINE_NAME: "OpenCode"
- GH_AW_INFO_MODEL: ${{ vars.GH_AW_MODEL_AGENT_OPENCODE || 'auto' }}
+ GH_AW_INFO_MODEL: "copilot/gpt-5"
GH_AW_INFO_VERSION: "1.2.14"
GH_AW_INFO_AGENT_VERSION: "1.2.14"
GH_AW_INFO_WORKFLOW_NAME: "Smoke OpenCode"
@@ -242,14 +242,14 @@ jobs:
run: |
bash "${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh"
{
- cat << 'GH_AW_PROMPT_5c781f6d33298184_EOF'
+ cat << 'GH_AW_PROMPT_ce2042b8038d4875_EOF'
- GH_AW_PROMPT_5c781f6d33298184_EOF
+ GH_AW_PROMPT_ce2042b8038d4875_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_5c781f6d33298184_EOF'
+ cat << 'GH_AW_PROMPT_ce2042b8038d4875_EOF'
Tools: add_comment(max:2), create_issue, add_labels, missing_tool, missing_data, noop
@@ -281,16 +281,16 @@ jobs:
{{/if}}
- GH_AW_PROMPT_5c781f6d33298184_EOF
+ GH_AW_PROMPT_ce2042b8038d4875_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
- cat << 'GH_AW_PROMPT_5c781f6d33298184_EOF'
+ cat << 'GH_AW_PROMPT_ce2042b8038d4875_EOF'
{{#runtime-import .github/workflows/shared/gh.md}}
{{#runtime-import .github/workflows/shared/reporting-otlp.md}}
{{#runtime-import .github/workflows/shared/reporting.md}}
{{#runtime-import .github/workflows/shared/observability-otlp.md}}
{{#runtime-import .github/workflows/smoke-opencode.md}}
- GH_AW_PROMPT_5c781f6d33298184_EOF
+ GH_AW_PROMPT_ce2042b8038d4875_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9
@@ -475,9 +475,9 @@ jobs:
mkdir -p "${RUNNER_TEMP}/gh-aw/safeoutputs"
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > "${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" << 'GH_AW_SAFE_OUTPUTS_CONFIG_fd472a81ecb82b12_EOF'
+ cat > "${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" << 'GH_AW_SAFE_OUTPUTS_CONFIG_584e9fae80b27656_EOF'
{"add_comment":{"hide_older_comments":true,"max":2},"add_labels":{"allowed":["smoke-opencode"]},"create_issue":{"close_older_issues":true,"close_older_key":"smoke-opencode","expires":2,"labels":["automation","testing"],"max":1},"create_report_incomplete_issue":{},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"},"report_incomplete":{}}
- GH_AW_SAFE_OUTPUTS_CONFIG_fd472a81ecb82b12_EOF
+ GH_AW_SAFE_OUTPUTS_CONFIG_584e9fae80b27656_EOF
- name: Write Safe Outputs Tools
env:
GH_AW_TOOLS_META_JSON: |
@@ -690,7 +690,7 @@ jobs:
- name: Write MCP Scripts Config
run: |
mkdir -p "${RUNNER_TEMP}/gh-aw/mcp-scripts/logs"
- cat > "${RUNNER_TEMP}/gh-aw/mcp-scripts/tools.json" << 'GH_AW_MCP_SCRIPTS_TOOLS_e7e5788296ef6290_EOF'
+ cat > "${RUNNER_TEMP}/gh-aw/mcp-scripts/tools.json" << 'GH_AW_MCP_SCRIPTS_TOOLS_1a304e83de76d99a_EOF'
{
"serverName": "mcpscripts",
"version": "1.0.0",
@@ -720,8 +720,8 @@ jobs:
}
]
}
- GH_AW_MCP_SCRIPTS_TOOLS_e7e5788296ef6290_EOF
- cat > "${RUNNER_TEMP}/gh-aw/mcp-scripts/mcp-server.cjs" << 'GH_AW_MCP_SCRIPTS_SERVER_90c0622bcb23fd04_EOF'
+ GH_AW_MCP_SCRIPTS_TOOLS_1a304e83de76d99a_EOF
+ cat > "${RUNNER_TEMP}/gh-aw/mcp-scripts/mcp-server.cjs" << 'GH_AW_MCP_SCRIPTS_SERVER_659a58a052c962f3_EOF'
const path = require("path");
const { startHttpServer } = require("./mcp_scripts_mcp_server_http.cjs");
const configPath = path.join(__dirname, "tools.json");
@@ -735,12 +735,12 @@ jobs:
console.error("Failed to start mcp-scripts HTTP server:", error);
process.exit(1);
});
- GH_AW_MCP_SCRIPTS_SERVER_90c0622bcb23fd04_EOF
+ GH_AW_MCP_SCRIPTS_SERVER_659a58a052c962f3_EOF
chmod +x "${RUNNER_TEMP}/gh-aw/mcp-scripts/mcp-server.cjs"
- name: Write MCP Scripts Tool Files
run: |
- cat > "${RUNNER_TEMP}/gh-aw/mcp-scripts/gh.sh" << 'GH_AW_MCP_SCRIPTS_SH_GH_967d9015c9bba591_EOF'
+ cat > "${RUNNER_TEMP}/gh-aw/mcp-scripts/gh.sh" << 'GH_AW_MCP_SCRIPTS_SH_GH_d6d879ba606e4319_EOF'
#!/bin/bash
# Auto-generated mcp-script tool: gh
# Execute any gh CLI command. This tool is accessible as 'mcpscripts-gh'. Provide the full command after 'gh' (e.g., args: 'pr list --limit 5'). The tool will run: gh . Use single quotes ' for complex args to avoid shell interpretation issues.
@@ -752,7 +752,7 @@ jobs:
GH_TOKEN="$GH_AW_GH_TOKEN" gh $INPUT_ARGS
- GH_AW_MCP_SCRIPTS_SH_GH_967d9015c9bba591_EOF
+ GH_AW_MCP_SCRIPTS_SH_GH_d6d879ba606e4319_EOF
chmod +x "${RUNNER_TEMP}/gh-aw/mcp-scripts/gh.sh"
- name: Generate MCP Scripts Server Config
@@ -824,7 +824,7 @@ jobs:
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host --add-host host.docker.internal:127.0.0.1 --user '"${MCP_GATEWAY_UID}"':'"${MCP_GATEWAY_GID}"' --group-add '"${DOCKER_SOCK_GID}"' -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_MCP_SCRIPTS_PORT -e GH_AW_MCP_SCRIPTS_API_KEY -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e GITHUB_AW_OTEL_TRACE_ID -e GITHUB_AW_OTEL_PARENT_SPAN_ID -e GH_AW_GH_TOKEN -e GH_DEBUG -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.26'
GH_AW_NODE=$(which node 2>/dev/null || command -v node 2>/dev/null || echo node)
- cat << GH_AW_MCP_CONFIG_82ff52dd7c04f5a3_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs"
+ cat << GH_AW_MCP_CONFIG_6341de61607a964d_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs"
{
"mcpServers": {
"github": {
@@ -884,7 +884,7 @@ jobs:
}
}
}
- GH_AW_MCP_CONFIG_82ff52dd7c04f5a3_EOF
+ GH_AW_MCP_CONFIG_6341de61607a964d_EOF
- name: Download activation artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
@@ -921,13 +921,15 @@ jobs:
sudo -E awf --container-workdir "${GITHUB_WORKSPACE}" --mount "${RUNNER_TEMP}/gh-aw:${RUNNER_TEMP}/gh-aw:ro" --mount "${RUNNER_TEMP}/gh-aw:/host${RUNNER_TEMP}/gh-aw:ro" --env-all --allow-domains '*.githubusercontent.com,api.githubcopilot.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,codeload.github.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,docs.github.com,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,github.blog,github.com,github.githubassets.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com,www.googleapis.com' --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --audit-dir /tmp/gh-aw/sandbox/firewall/audit --enable-host-access --allow-host-ports 80,443,8080 --image-tag 0.25.26 --skip-pull --enable-api-proxy \
-- /bin/bash -c 'export PATH="$(find /opt/hostedtoolcache -maxdepth 4 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && opencode run --print-logs --log-level DEBUG "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log
env:
+ COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }}
GH_AW_MCP_CONFIG: ${{ github.workspace }}/opencode.jsonc
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
GH_AW_SAFE_OUTPUTS: ${{ steps.set-runtime-paths.outputs.GH_AW_SAFE_OUTPUTS }}
+ GITHUB_COPILOT_BASE_URL: http://host.docker.internal:10002
GITHUB_WORKSPACE: ${{ github.workspace }}
NO_PROXY: localhost,127.0.0.1
OPENAI_API_KEY: ${{ secrets.COPILOT_GITHUB_TOKEN }}
- OPENAI_BASE_URL: http://host.docker.internal:10004
+ OPENCODE_MODEL: copilot/gpt-5
- name: Configure Git credentials
env:
REPO_NAME: ${{ github.repository }}
@@ -1373,11 +1375,13 @@ jobs:
sudo -E awf --container-workdir "${GITHUB_WORKSPACE}" --mount "${RUNNER_TEMP}/gh-aw:${RUNNER_TEMP}/gh-aw:ro" --mount "${RUNNER_TEMP}/gh-aw:/host${RUNNER_TEMP}/gh-aw:ro" --env-all --allow-domains api.githubcopilot.com,github.com,host.docker.internal,raw.githubusercontent.com,registry.npmjs.org --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --audit-dir /tmp/gh-aw/sandbox/firewall/audit --enable-host-access --allow-host-ports 80,443,8080 --image-tag 0.25.26 --skip-pull --enable-api-proxy \
-- /bin/bash -c 'export PATH="$(find /opt/hostedtoolcache -maxdepth 4 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && opencode run --print-logs --log-level DEBUG "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"' 2>&1 | tee -a /tmp/gh-aw/threat-detection/detection.log
env:
+ COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }}
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
+ GITHUB_COPILOT_BASE_URL: http://host.docker.internal:10002
GITHUB_WORKSPACE: ${{ github.workspace }}
NO_PROXY: localhost,127.0.0.1
OPENAI_API_KEY: ${{ secrets.COPILOT_GITHUB_TOKEN }}
- OPENAI_BASE_URL: http://host.docker.internal:10004
+ OPENCODE_MODEL: copilot/gpt-5
- name: Upload threat detection log
if: always() && steps.detection_guard.outputs.run_detection == 'true'
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
@@ -1456,7 +1460,7 @@ jobs:
GH_AW_DETECTION_REASON: ${{ needs.detection.outputs.detection_reason }}
GH_AW_EFFECTIVE_TOKENS: ${{ needs.agent.outputs.effective_tokens }}
GH_AW_ENGINE_ID: "opencode"
- GH_AW_ENGINE_MODEL: ${{ needs.agent.outputs.model }}
+ GH_AW_ENGINE_MODEL: "copilot/gpt-5"
GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 🔥 *[{workflow_name}]({run_url}) — Powered by OpenCode*{effective_tokens_suffix}{history_link}\",\"runStarted\":\"🔥 OpenCode initializing... [{workflow_name}]({run_url}) begins on this {event_type}...\",\"runSuccess\":\"🚀 [{workflow_name}]({run_url}) **MISSION COMPLETE!** OpenCode delivered. 🔥\",\"runFailure\":\"⚠️ [{workflow_name}]({run_url}) {status}. OpenCode encountered unexpected challenges...\"}"
GH_AW_WORKFLOW_ID: "smoke-opencode"
GH_AW_WORKFLOW_NAME: "Smoke OpenCode"
diff --git a/.github/workflows/smoke-opencode.md b/.github/workflows/smoke-opencode.md
index 2e3420070f3..af7333c2ed8 100644
--- a/.github/workflows/smoke-opencode.md
+++ b/.github/workflows/smoke-opencode.md
@@ -14,6 +14,7 @@ permissions:
name: Smoke OpenCode
engine:
id: opencode
+ model: copilot/gpt-5
strict: true
imports:
- shared/gh.md
diff --git a/docs/adr/27708-universal-llm-consumer-engine-for-multi-provider-routing.md b/docs/adr/27708-universal-llm-consumer-engine-for-multi-provider-routing.md
new file mode 100644
index 00000000000..99709615874
--- /dev/null
+++ b/docs/adr/27708-universal-llm-consumer-engine-for-multi-provider-routing.md
@@ -0,0 +1,81 @@
+# ADR-27708: Universal LLM Consumer Engine for Multi-Provider Backend Routing
+
+**Date**: 2026-04-21
+**Status**: Draft
+**Deciders**: pelikhan
+
+---
+
+## Part 1 — Narrative (Human-Friendly)
+
+### Context
+
+OpenCode and Crush are both "universal" LLM consumer agents: unlike the Copilot engine, they are not tied to a single provider and can route requests to Anthropic, OpenAI/Codex, or Copilot backends depending on the model specified. Prior to this change, each engine contained its own hard-coded secret selection and environment-variable injection logic that defaulted exclusively to Copilot/OpenAI-compatible routing. This prevented true BYOK (Bring Your Own Key) usage at the native provider API and caused duplicated, drift-prone logic across the two engines. The agentic workflow framework needed a way to route OpenCode and Crush directly to the Anthropic or OpenAI native APIs when users specify models from those providers via `engine.model`.
+
+### Decision
+
+We will introduce a `UniversalLLMConsumerEngine` struct that OpenCode and Crush both embed as their base type. This struct owns the shared logic for resolving the LLM backend (Copilot, Anthropic, or Codex/OpenAI) from the `engine.model` provider prefix (e.g., `anthropic/claude-sonnet-4`), and exposes unified methods for secret name derivation, secret validation step generation, and provider environment variable injection. We will also add a compiler validation step that requires `engine.model` to be set in `provider/model` format for all universal consumer engines.
+
+### Alternatives Considered
+
+#### Alternative 1: Keep Per-Engine Secret Logic, Add Provider Switch Inline
+
+Each engine continues to own its own `GetRequiredSecretNames`, `GetSecretValidationStep`, and environment-building methods, with a new `switch` on the provider prefix added to each. This was rejected because it duplicates the provider-resolution logic in both `opencode_engine.go` and `crush_engine.go`, making it easy for them to drift out of sync when a new provider is added.
+
+#### Alternative 2: A Standalone Provider Factory / Registry
+
+A dedicated `LLMProviderRegistry` that maps provider strings to backend profiles and is injected into each engine. This would be more decoupled and unit-testable in isolation, but it introduces indirection (a new abstraction layer, new interface, registration pattern) that is not yet justified by the number of providers or engines. Embedding a `UniversalLLMConsumerEngine` struct keeps the shared logic co-located without a separate registration mechanism.
+
+### Consequences
+
+#### Positive
+- Provider-to-backend routing logic is a single source of truth: adding a new supported provider (e.g., Gemini) requires a change in one place (`universal_llm_consumer_engine.go`) rather than two.
+- Compile-time validation ensures that workflows using OpenCode or Crush always declare a valid `engine.model` in `provider/model` format, preventing silent misconfiguration.
+- Native provider API routing (e.g., `ANTHROPIC_API_KEY` + `ANTHROPIC_BASE_URL`) is now correctly applied without requiring manual `engine.env` overrides.
+
+#### Negative
+- OpenCode and Crush are now structurally coupled: a bug or breaking change in `UniversalLLMConsumerEngine` will affect both engines simultaneously.
+- The `engine.model` field becomes required for both engines, which is a breaking change for any existing workflow frontmatter that omits it.
+- The `copilot-requests` feature flag path remains in the shared base, meaning the shared logic must be kept aware of Copilot-specific feature flags.
+
+#### Neutral
+- Compiled workflow lock files (`.lock.yml`) are regenerated to reflect the new secret names and environment variables, which will require safe-update approval gates for secrets like `ANTHROPIC_API_KEY`.
+- The `CrushLLMGatewayPort` constant is no longer referenced directly in the Crush engine; gateway port selection is now driven by the backend profile returned from `getUniversalLLMBackendProfile`.
+
+---
+
+## Part 2 — Normative Specification (RFC 2119)
+
+> The key words **MUST**, **MUST NOT**, **REQUIRED**, **SHALL**, **SHALL NOT**, **SHOULD**, **SHOULD NOT**, **RECOMMENDED**, **MAY**, and **OPTIONAL** in this section are to be interpreted as described in [RFC 2119](https://www.rfc-editor.org/rfc/rfc2119).
+
+### Universal Consumer Engine Identification
+
+1. An engine that supports multiple LLM providers via a user-supplied `engine.model` field **MUST** embed `UniversalLLMConsumerEngine` as its base struct instead of `BaseEngine` directly.
+2. Engines that route exclusively through a single provider (e.g., the Copilot engine) **MUST NOT** embed `UniversalLLMConsumerEngine`.
+
+### Model Field Requirements
+
+1. Compiler validation **MUST** reject workflow frontmatter for universal consumer engines (OpenCode, Crush) when `engine.model` is absent or blank.
+2. The `engine.model` value **MUST** use `provider/model` format (e.g., `anthropic/claude-sonnet-4`, `copilot/gpt-5`, `openai/gpt-4.1`).
+3. The provider prefix **MUST** be one of the supported values: `copilot`, `anthropic`, `openai`, or `codex`. Any other prefix **MUST** produce a compile-time error.
+
+### Backend Profile Resolution
+
+1. The backend profile (secret names, environment variables, base URL env name, gateway port) **MUST** be derived exclusively from the resolved `UniversalLLMBackend` value and the `copilot-requests` feature flag state.
+2. Implementations **MUST NOT** hard-code provider-specific secret names or environment variables in individual engine files (`opencode_engine.go`, `crush_engine.go`); all such logic **MUST** live in `universal_llm_consumer_engine.go`.
+3. When the resolved backend is `anthropic`, the execution environment **MUST** include `ANTHROPIC_API_KEY` and, when the firewall is enabled, **MUST** set `ANTHROPIC_BASE_URL` to the gateway's internal address.
+4. When the resolved backend is `codex`/`openai`, the execution environment **MUST** include both `CODEX_API_KEY` and `OPENAI_API_KEY` (falling back to the same secret value), and **MUST** set `OPENAI_BASE_URL` when the firewall is enabled.
+5. When the resolved backend is `copilot`, the engine **SHOULD** check the `copilot-requests` feature flag; if enabled, **MUST** use `${{ github.token }}` and require no additional secret.
+
+### Adding New Providers
+
+1. New provider support **MUST** be added by extending the `switch` statement in `resolveUniversalLLMBackendFromModel` and adding a corresponding case in `getUniversalLLMBackendProfile`.
+2. New providers **MUST NOT** be handled by overriding methods in individual engine structs.
+
+### Conformance
+
+An implementation is considered conformant with this ADR if it satisfies all **MUST** and **MUST NOT** requirements above. Specifically: universal consumer engines embed `UniversalLLMConsumerEngine`; compiler validation rejects missing or malformed `engine.model`; all provider-to-backend mapping lives in `universal_llm_consumer_engine.go`. Failure to meet any **MUST** or **MUST NOT** requirement constitutes non-conformance.
+
+---
+
+*This is a DRAFT ADR generated by the [Design Decision Gate](https://github.com/github/gh-aw/actions/runs/24751429896) workflow. The PR author must review, complete, and finalize this document before the PR can merge.*
diff --git a/pkg/workflow/agent_validation.go b/pkg/workflow/agent_validation.go
index c1b879c51b5..fedab12239e 100644
--- a/pkg/workflow/agent_validation.go
+++ b/pkg/workflow/agent_validation.go
@@ -48,6 +48,7 @@ import (
"fmt"
"os"
"path/filepath"
+ "strings"
"github.com/github/gh-aw/pkg/console"
"github.com/goccy/go-yaml"
@@ -154,6 +155,25 @@ func (c *Compiler) validateMaxContinuationsSupport(frontmatter map[string]any, e
return nil
}
+// validateUniversalLLMConsumerModel validates that universal consumer engines
+// (OpenCode/Crush) declare a provider-qualified engine.model.
+func (c *Compiler) validateUniversalLLMConsumerModel(frontmatter map[string]any, engine CodingAgentEngine) error {
+ if engine.GetID() != "opencode" && engine.GetID() != "crush" {
+ return nil
+ }
+
+ _, engineConfig := c.ExtractEngineConfig(frontmatter)
+ if engineConfig == nil || strings.TrimSpace(engineConfig.Model) == "" {
+ return fmt.Errorf("engine.model is required for engine '%s' and must use provider/model format (for example: copilot/gpt-5, anthropic/claude-sonnet-4, openai/gpt-4.1)", engine.GetID())
+ }
+
+ if _, err := resolveUniversalLLMBackendFromModel(engineConfig.Model); err != nil {
+ return fmt.Errorf("invalid engine.model for engine '%s': %w", engine.GetID(), err)
+ }
+
+ return nil
+}
+
// validateWebSearchSupport validates that web-search tool is only used with engines that support this feature
func (c *Compiler) validateWebSearchSupport(tools map[string]any, engine CodingAgentEngine) {
// Check if web-search tool is requested
diff --git a/pkg/workflow/agent_validation_model_test.go b/pkg/workflow/agent_validation_model_test.go
new file mode 100644
index 00000000000..25e32c3847f
--- /dev/null
+++ b/pkg/workflow/agent_validation_model_test.go
@@ -0,0 +1,80 @@
+//go:build !integration
+
+package workflow
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestValidateUniversalLLMConsumerModel(t *testing.T) {
+ compiler := NewCompiler()
+
+ t.Run("non universal engine skips validation", func(t *testing.T) {
+ err := compiler.validateUniversalLLMConsumerModel(
+ map[string]any{
+ "engine": map[string]any{
+ "id": "copilot",
+ },
+ },
+ NewCopilotEngine(),
+ )
+ assert.NoError(t, err, "Non-universal engines should skip model validation")
+ })
+
+ t.Run("opencode requires model", func(t *testing.T) {
+ err := compiler.validateUniversalLLMConsumerModel(
+ map[string]any{
+ "engine": map[string]any{
+ "id": "opencode",
+ },
+ },
+ NewOpenCodeEngine(),
+ )
+ require.Error(t, err, "Missing model should fail for opencode")
+ assert.Contains(t, err.Error(), "engine.model is required for engine 'opencode'")
+ })
+
+ t.Run("crush requires provider/model format", func(t *testing.T) {
+ err := compiler.validateUniversalLLMConsumerModel(
+ map[string]any{
+ "engine": map[string]any{
+ "id": "crush",
+ "model": "gpt-4.1",
+ },
+ },
+ NewCrushEngine(),
+ )
+ require.Error(t, err, "Unqualified model should fail for crush")
+ assert.Contains(t, err.Error(), "provider/model format")
+ })
+
+ t.Run("unsupported provider fails", func(t *testing.T) {
+ err := compiler.validateUniversalLLMConsumerModel(
+ map[string]any{
+ "engine": map[string]any{
+ "id": "opencode",
+ "model": "groq/llama-4",
+ },
+ },
+ NewOpenCodeEngine(),
+ )
+ require.Error(t, err, "Unsupported provider should fail")
+ assert.Contains(t, err.Error(), "unsupported provider")
+ })
+
+ t.Run("supported provider passes", func(t *testing.T) {
+ err := compiler.validateUniversalLLMConsumerModel(
+ map[string]any{
+ "engine": map[string]any{
+ "id": "crush",
+ "model": "anthropic/claude-sonnet-4",
+ },
+ },
+ NewCrushEngine(),
+ )
+ assert.NoError(t, err, "Supported provider/model should pass")
+ })
+}
diff --git a/pkg/workflow/compiler_orchestrator_tools.go b/pkg/workflow/compiler_orchestrator_tools.go
index 023a81224f0..81bcd1f86da 100644
--- a/pkg/workflow/compiler_orchestrator_tools.go
+++ b/pkg/workflow/compiler_orchestrator_tools.go
@@ -212,6 +212,11 @@ func (c *Compiler) processToolsAndMarkdown(result *parser.FrontmatterResult, cle
return nil, err
}
+ // Validate universal consumer model requirements (OpenCode/Crush)
+ if err := c.validateUniversalLLMConsumerModel(result.Frontmatter, agenticEngine); err != nil {
+ return nil, err
+ }
+
// Validate web-search support for the current engine (warning only)
c.validateWebSearchSupport(tools, agenticEngine)
diff --git a/pkg/workflow/crush_engine.go b/pkg/workflow/crush_engine.go
index fdd630ca1c6..a1503d6e7f0 100644
--- a/pkg/workflow/crush_engine.go
+++ b/pkg/workflow/crush_engine.go
@@ -3,7 +3,6 @@ package workflow
import (
"fmt"
"maps"
- "strings"
"github.com/github/gh-aw/pkg/constants"
"github.com/github/gh-aw/pkg/logger"
@@ -12,23 +11,26 @@ import (
var crushLog = logger.New("workflow:crush_engine")
// CrushEngine represents the Crush CLI agentic engine.
-// Crush is a provider-agnostic, open-source AI coding agent that supports
-// 75+ models via BYOK (Bring Your Own Key).
+// Crush is a provider-agnostic, open-source AI coding agent with broader BYOK
+// (Bring Your Own Key) support, but gh-aw currently supports a subset of
+// providers for engine.model validation: copilot, anthropic, openai, and codex.
type CrushEngine struct {
- BaseEngine
+ UniversalLLMConsumerEngine
}
func NewCrushEngine() *CrushEngine {
return &CrushEngine{
- BaseEngine: BaseEngine{
- id: "crush",
- displayName: "Crush",
- description: "Crush CLI with headless mode and multi-provider LLM support",
- experimental: true, // Start as experimental until smoke tests pass consistently
- supportsToolsAllowlist: false, // Crush manages its own tool permissions via .crush.json
- supportsMaxTurns: false, // No --max-turns flag in crush run
- supportsWebSearch: false, // Has built-in websearch but not exposed via gh-aw neutral tools yet
- llmGatewayPort: constants.CrushLLMGatewayPort,
+ UniversalLLMConsumerEngine: UniversalLLMConsumerEngine{
+ BaseEngine: BaseEngine{
+ id: "crush",
+ displayName: "Crush",
+ description: "Crush CLI with headless mode and multi-provider LLM support",
+ experimental: true, // Start as experimental until smoke tests pass consistently
+ supportsToolsAllowlist: false, // Crush manages its own tool permissions via .crush.json
+ supportsMaxTurns: false, // No --max-turns flag in crush run
+ supportsWebSearch: false, // Has built-in websearch but not exposed via gh-aw neutral tools yet
+ llmGatewayPort: constants.CrushLLMGatewayPort,
+ },
},
}
}
@@ -50,42 +52,7 @@ func (e *CrushEngine) GetModelEnvVarName() string {
// Additional provider API keys can be added via engine.env overrides.
func (e *CrushEngine) GetRequiredSecretNames(workflowData *WorkflowData) []string {
crushLog.Print("Collecting required secrets for Crush engine")
- var secrets []string
-
- // Default: Copilot routing via COPILOT_GITHUB_TOKEN.
- // When copilot-requests feature is enabled, no secret is needed (uses github.token).
- if !isFeatureEnabled(constants.CopilotRequestsFeatureFlag, workflowData) {
- secrets = append(secrets, "COPILOT_GITHUB_TOKEN")
- }
-
- // Allow additional provider API keys from engine.env overrides
- if workflowData.EngineConfig != nil && len(workflowData.EngineConfig.Env) > 0 {
- for key := range workflowData.EngineConfig.Env {
- if strings.HasSuffix(key, "_API_KEY") || strings.HasSuffix(key, "_KEY") {
- secrets = append(secrets, key)
- }
- }
- }
-
- // Add common MCP secrets (MCP_GATEWAY_API_KEY if MCP servers present, mcp-scripts secrets)
- secrets = append(secrets, collectCommonMCPSecrets(workflowData)...)
-
- // Add GitHub token for GitHub MCP server if present
- if hasGitHubTool(workflowData.ParsedTools) {
- crushLog.Print("Adding GITHUB_MCP_SERVER_TOKEN secret")
- secrets = append(secrets, "GITHUB_MCP_SERVER_TOKEN")
- }
-
- // Add HTTP MCP header secret names
- headerSecrets := collectHTTPMCPHeaderSecrets(workflowData.Tools)
- for varName := range headerSecrets {
- secrets = append(secrets, varName)
- }
- if len(headerSecrets) > 0 {
- crushLog.Printf("Added %d HTTP MCP header secrets", len(headerSecrets))
- }
-
- return secrets
+ return e.GetUniversalRequiredSecretNames(workflowData)
}
// GetInstallationSteps returns the GitHub Actions steps needed to install Crush CLI
@@ -111,13 +78,8 @@ func (e *CrushEngine) GetInstallationSteps(workflowData *WorkflowData) []GitHubA
// GetSecretValidationStep returns the secret validation step for the Crush engine.
// Returns an empty step if copilot-requests feature is enabled (uses GitHub Actions token).
func (e *CrushEngine) GetSecretValidationStep(workflowData *WorkflowData) GitHubActionStep {
- if isFeatureEnabled(constants.CopilotRequestsFeatureFlag, workflowData) {
- crushLog.Print("Skipping secret validation step: copilot-requests feature enabled, using GitHub Actions token")
- return GitHubActionStep{}
- }
- return BuildDefaultSecretValidationStep(
+ return e.GetUniversalSecretValidationStep(
workflowData,
- []string{"COPILOT_GITHUB_TOKEN"},
"Crush CLI",
"https://github.github.com/gh-aw/reference/engines/#crush",
)
@@ -209,37 +171,18 @@ func (e *CrushEngine) GetExecutionSteps(workflowData *WorkflowData, logFile stri
command = fmt.Sprintf("set -o pipefail\n%s 2>&1 | tee -a %s", crushCommand, logFile)
}
- // Environment variables — default to Copilot routing (OpenAI-compatible API).
- // OPENAI_API_KEY is set from COPILOT_GITHUB_TOKEN (or github.token with copilot-requests).
- // #nosec G101 -- These are NOT hardcoded credentials. They are GitHub Actions expression templates
- // that the runtime replaces with actual values.
- var openaiAPIKey string
- useCopilotRequests := isFeatureEnabled(constants.CopilotRequestsFeatureFlag, workflowData)
- if useCopilotRequests {
- openaiAPIKey = "${{ github.token }}"
- crushLog.Print("Using GitHub Actions token as OPENAI_API_KEY (copilot-requests feature enabled)")
- } else {
- openaiAPIKey = "${{ secrets.COPILOT_GITHUB_TOKEN }}"
- }
-
env := map[string]string{
- "OPENAI_API_KEY": openaiAPIKey,
"GH_AW_PROMPT": "/tmp/gh-aw/aw-prompts/prompt.txt",
"GITHUB_WORKSPACE": "${{ github.workspace }}",
"NO_PROXY": "localhost,127.0.0.1",
}
+ e.ApplyUniversalProviderEnv(env, workflowData, firewallEnabled)
// MCP config path
if HasMCPServers(workflowData) {
env["GH_AW_MCP_CONFIG"] = "${{ github.workspace }}/.crush.json"
}
- // LLM gateway base URL override (default Copilot routing via OpenAI-compatible endpoint)
- if firewallEnabled {
- env["OPENAI_BASE_URL"] = fmt.Sprintf("http://host.docker.internal:%d",
- constants.CrushLLMGatewayPort)
- }
-
// Safe outputs env
applySafeOutputEnvToMap(env, workflowData)
diff --git a/pkg/workflow/crush_engine_test.go b/pkg/workflow/crush_engine_test.go
index 721cc1d9c41..284bb6e646f 100644
--- a/pkg/workflow/crush_engine_test.go
+++ b/pkg/workflow/crush_engine_test.go
@@ -42,6 +42,34 @@ func TestCrushEngine(t *testing.T) {
assert.Contains(t, secrets, "COPILOT_GITHUB_TOKEN", "Should require COPILOT_GITHUB_TOKEN for Copilot routing")
})
+ t.Run("required secrets with anthropic model", func(t *testing.T) {
+ workflowData := &WorkflowData{
+ Name: "test",
+ EngineConfig: &EngineConfig{
+ Model: "anthropic/claude-sonnet-4-20250514",
+ },
+ ParsedTools: &ToolsConfig{},
+ Tools: map[string]any{},
+ }
+ secrets := engine.GetRequiredSecretNames(workflowData)
+ assert.Contains(t, secrets, "ANTHROPIC_API_KEY", "Should require ANTHROPIC_API_KEY for anthropic/* models")
+ assert.NotContains(t, secrets, "COPILOT_GITHUB_TOKEN", "Should not require COPILOT_GITHUB_TOKEN for anthropic/* models")
+ })
+
+ t.Run("required secrets with openai model", func(t *testing.T) {
+ workflowData := &WorkflowData{
+ Name: "test",
+ EngineConfig: &EngineConfig{
+ Model: "openai/gpt-4.1",
+ },
+ ParsedTools: &ToolsConfig{},
+ Tools: map[string]any{},
+ }
+ secrets := engine.GetRequiredSecretNames(workflowData)
+ assert.Contains(t, secrets, "CODEX_API_KEY", "Should require CODEX_API_KEY for openai/* models")
+ assert.Contains(t, secrets, "OPENAI_API_KEY", "Should require OPENAI_API_KEY for openai/* models")
+ })
+
t.Run("required secrets with copilot-requests feature", func(t *testing.T) {
workflowData := &WorkflowData{
Name: "test",
@@ -362,7 +390,7 @@ func TestCrushEngineFirewallIntegration(t *testing.T) {
assert.Contains(t, stepContent, "awf", "Should use AWF when firewall is enabled")
assert.Contains(t, stepContent, "--allow-domains", "Should include allow-domains flag")
assert.Contains(t, stepContent, "--enable-api-proxy", "Should include --enable-api-proxy flag")
- assert.Contains(t, stepContent, "OPENAI_BASE_URL: http://host.docker.internal:10005", "Should set OPENAI_BASE_URL to LLM gateway URL")
+ assert.Contains(t, stepContent, "GITHUB_COPILOT_BASE_URL: http://host.docker.internal:10002", "Should route copilot/* fallback through Copilot LLM gateway URL")
})
t.Run("firewall enabled adds mounted MCP CLI path setup", func(t *testing.T) {
diff --git a/pkg/workflow/engine_definition_loader_test.go b/pkg/workflow/engine_definition_loader_test.go
index 50108d8ced0..151df5c30db 100644
--- a/pkg/workflow/engine_definition_loader_test.go
+++ b/pkg/workflow/engine_definition_loader_test.go
@@ -63,15 +63,25 @@ func TestBuiltinEngineMarkdownFiles(t *testing.T) {
// produces a valid lock file with the correct engine ID.
func TestBuiltinEngineStringFormInjection(t *testing.T) {
tests := []struct {
- engineID string
- engineStep string // distinctive step name in the lock file
+ engineID string
+ engineStep string // distinctive step name in the lock file
+ expectError bool
+ errorContains string
}{
- {"copilot", `GH_AW_INFO_ENGINE_ID: "copilot"`},
- {"codex", `GH_AW_INFO_ENGINE_ID: "codex"`},
- {"claude", `GH_AW_INFO_ENGINE_ID: "claude"`},
- {"gemini", `GH_AW_INFO_ENGINE_ID: "gemini"`},
- {"opencode", `GH_AW_INFO_ENGINE_ID: "opencode"`},
- {"crush", `GH_AW_INFO_ENGINE_ID: "crush"`},
+ {engineID: "copilot", engineStep: `GH_AW_INFO_ENGINE_ID: "copilot"`},
+ {engineID: "codex", engineStep: `GH_AW_INFO_ENGINE_ID: "codex"`},
+ {engineID: "claude", engineStep: `GH_AW_INFO_ENGINE_ID: "claude"`},
+ {engineID: "gemini", engineStep: `GH_AW_INFO_ENGINE_ID: "gemini"`},
+ {
+ engineID: "opencode",
+ expectError: true,
+ errorContains: "engine.model is required for engine 'opencode'",
+ },
+ {
+ engineID: "crush",
+ expectError: true,
+ errorContains: "engine.model is required for engine 'crush'",
+ },
}
for _, tt := range tests {
@@ -97,6 +107,11 @@ func TestBuiltinEngineStringFormInjection(t *testing.T) {
compiler := NewCompiler()
err := compiler.CompileWorkflow(mainFile)
+ if tt.expectError {
+ require.Error(t, err, "compilation should fail for engine %s (string form)", tt.engineID)
+ assert.Contains(t, err.Error(), tt.errorContains)
+ return
+ }
require.NoError(t, err, "compilation should succeed for engine %s (string form)", tt.engineID)
lockFile := filepath.Join(workflowsDir, "test-engine-injection.lock.yml")
diff --git a/pkg/workflow/opencode_engine.go b/pkg/workflow/opencode_engine.go
index 879b59cd892..afb3b21f2c0 100644
--- a/pkg/workflow/opencode_engine.go
+++ b/pkg/workflow/opencode_engine.go
@@ -3,7 +3,6 @@ package workflow
import (
"fmt"
"maps"
- "strings"
"github.com/github/gh-aw/pkg/constants"
"github.com/github/gh-aw/pkg/logger"
@@ -15,20 +14,22 @@ var openCodeLog = logger.New("workflow:opencode_engine")
// OpenCode is a provider-agnostic, open-source AI coding agent that supports
// multiple models via BYOK (Bring Your Own Key).
type OpenCodeEngine struct {
- BaseEngine
+ UniversalLLMConsumerEngine
}
func NewOpenCodeEngine() *OpenCodeEngine {
return &OpenCodeEngine{
- BaseEngine: BaseEngine{
- id: "opencode",
- displayName: "OpenCode",
- description: "OpenCode CLI with headless mode and multi-provider LLM support",
- experimental: true,
- supportsToolsAllowlist: false,
- supportsMaxTurns: false,
- supportsWebSearch: false,
- llmGatewayPort: constants.OpenCodeLLMGatewayPort,
+ UniversalLLMConsumerEngine: UniversalLLMConsumerEngine{
+ BaseEngine: BaseEngine{
+ id: "opencode",
+ displayName: "OpenCode",
+ description: "OpenCode CLI with headless mode and multi-provider LLM support",
+ experimental: true,
+ supportsToolsAllowlist: false,
+ supportsMaxTurns: false,
+ supportsWebSearch: false,
+ llmGatewayPort: constants.OpenCodeLLMGatewayPort,
+ },
},
}
}
@@ -50,36 +51,7 @@ func (e *OpenCodeEngine) GetModelEnvVarName() string {
// Additional provider API keys can be added via engine.env overrides.
func (e *OpenCodeEngine) GetRequiredSecretNames(workflowData *WorkflowData) []string {
openCodeLog.Print("Collecting required secrets for OpenCode engine")
- var secrets []string
-
- if !isFeatureEnabled(constants.CopilotRequestsFeatureFlag, workflowData) {
- secrets = append(secrets, "COPILOT_GITHUB_TOKEN")
- }
-
- if workflowData.EngineConfig != nil && len(workflowData.EngineConfig.Env) > 0 {
- for key := range workflowData.EngineConfig.Env {
- if strings.HasSuffix(key, "_API_KEY") || strings.HasSuffix(key, "_KEY") {
- secrets = append(secrets, key)
- }
- }
- }
-
- secrets = append(secrets, collectCommonMCPSecrets(workflowData)...)
-
- if hasGitHubTool(workflowData.ParsedTools) {
- openCodeLog.Print("Adding GITHUB_MCP_SERVER_TOKEN secret")
- secrets = append(secrets, "GITHUB_MCP_SERVER_TOKEN")
- }
-
- headerSecrets := collectHTTPMCPHeaderSecrets(workflowData.Tools)
- for varName := range headerSecrets {
- secrets = append(secrets, varName)
- }
- if len(headerSecrets) > 0 {
- openCodeLog.Printf("Added %d HTTP MCP header secrets", len(headerSecrets))
- }
-
- return secrets
+ return e.GetUniversalRequiredSecretNames(workflowData)
}
// GetInstallationSteps returns the GitHub Actions steps needed to install OpenCode CLI
@@ -104,13 +76,8 @@ func (e *OpenCodeEngine) GetInstallationSteps(workflowData *WorkflowData) []GitH
// GetSecretValidationStep returns the secret validation step for the OpenCode engine.
// Returns an empty step if copilot-requests feature is enabled (uses GitHub Actions token).
func (e *OpenCodeEngine) GetSecretValidationStep(workflowData *WorkflowData) GitHubActionStep {
- if isFeatureEnabled(constants.CopilotRequestsFeatureFlag, workflowData) {
- openCodeLog.Print("Skipping secret validation step: copilot-requests feature enabled, using GitHub Actions token")
- return GitHubActionStep{}
- }
- return BuildDefaultSecretValidationStep(
+ return e.GetUniversalSecretValidationStep(
workflowData,
- []string{"COPILOT_GITHUB_TOKEN"},
"OpenCode CLI",
"https://github.github.com/gh-aw/reference/engines/#opencode",
)
@@ -183,31 +150,17 @@ func (e *OpenCodeEngine) GetExecutionSteps(workflowData *WorkflowData, logFile s
command = fmt.Sprintf("set -o pipefail\n%s 2>&1 | tee -a %s", openCodeCommand, logFile)
}
- var openaiAPIKey string
- useCopilotRequests := isFeatureEnabled(constants.CopilotRequestsFeatureFlag, workflowData)
- if useCopilotRequests {
- openaiAPIKey = "${{ github.token }}"
- openCodeLog.Print("Using GitHub Actions token as OPENAI_API_KEY (copilot-requests feature enabled)")
- } else {
- openaiAPIKey = "${{ secrets.COPILOT_GITHUB_TOKEN }}"
- }
-
env := map[string]string{
- "OPENAI_API_KEY": openaiAPIKey,
"GH_AW_PROMPT": "/tmp/gh-aw/aw-prompts/prompt.txt",
"GITHUB_WORKSPACE": "${{ github.workspace }}",
"NO_PROXY": "localhost,127.0.0.1",
}
+ e.ApplyUniversalProviderEnv(env, workflowData, firewallEnabled)
if HasMCPServers(workflowData) {
env["GH_AW_MCP_CONFIG"] = "${{ github.workspace }}/opencode.jsonc"
}
- if firewallEnabled {
- env["OPENAI_BASE_URL"] = fmt.Sprintf("http://host.docker.internal:%d",
- constants.OpenCodeLLMGatewayPort)
- }
-
applySafeOutputEnvToMap(env, workflowData)
if modelConfigured {
diff --git a/pkg/workflow/opencode_engine_test.go b/pkg/workflow/opencode_engine_test.go
index 4938b21d074..3727757fecc 100644
--- a/pkg/workflow/opencode_engine_test.go
+++ b/pkg/workflow/opencode_engine_test.go
@@ -55,6 +55,9 @@ func TestOpenCodeEngineInstallationAndExecution(t *testing.T) {
t.Run("firewall sets OpenCode gateway base URL", func(t *testing.T) {
workflowData := &WorkflowData{
Name: "test-workflow",
+ EngineConfig: &EngineConfig{
+ Model: "copilot/gpt-5",
+ },
NetworkPermissions: &NetworkPermissions{
Allowed: []string{"defaults"},
Firewall: &FirewallConfig{
@@ -66,6 +69,32 @@ func TestOpenCodeEngineInstallationAndExecution(t *testing.T) {
steps := engine.GetExecutionSteps(workflowData, "/tmp/test.log")
require.Len(t, steps, 2, "Should generate config step and execution step")
execContent := strings.Join(steps[1], "\n")
- assert.Contains(t, execContent, "OPENAI_BASE_URL: http://host.docker.internal:10004", "Should route through OpenCode LLM gateway port")
+ assert.Contains(t, execContent, "GITHUB_COPILOT_BASE_URL: http://host.docker.internal:10002", "Should route through Copilot LLM gateway port for copilot/* models")
+ })
+}
+
+func TestOpenCodeEngineProviderProfiles(t *testing.T) {
+ engine := NewOpenCodeEngine()
+
+ t.Run("anthropic model uses anthropic secret", func(t *testing.T) {
+ workflowData := &WorkflowData{
+ EngineConfig: &EngineConfig{Model: "anthropic/claude-sonnet-4"},
+ ParsedTools: &ToolsConfig{},
+ Tools: map[string]any{},
+ }
+ secrets := engine.GetRequiredSecretNames(workflowData)
+ assert.Contains(t, secrets, "ANTHROPIC_API_KEY", "Should require ANTHROPIC_API_KEY for anthropic/* models")
+ assert.NotContains(t, secrets, "COPILOT_GITHUB_TOKEN", "Should not require COPILOT_GITHUB_TOKEN for anthropic/* models")
+ })
+
+ t.Run("openai model uses codex/openai secrets", func(t *testing.T) {
+ workflowData := &WorkflowData{
+ EngineConfig: &EngineConfig{Model: "openai/gpt-4.1"},
+ ParsedTools: &ToolsConfig{},
+ Tools: map[string]any{},
+ }
+ secrets := engine.GetRequiredSecretNames(workflowData)
+ assert.Contains(t, secrets, "CODEX_API_KEY", "Should require CODEX_API_KEY for openai/* models")
+ assert.Contains(t, secrets, "OPENAI_API_KEY", "Should require OPENAI_API_KEY for openai/* models")
})
}
diff --git a/pkg/workflow/universal_llm_consumer_engine.go b/pkg/workflow/universal_llm_consumer_engine.go
new file mode 100644
index 00000000000..e2ba4ff0322
--- /dev/null
+++ b/pkg/workflow/universal_llm_consumer_engine.go
@@ -0,0 +1,167 @@
+package workflow
+
+import (
+ "errors"
+ "fmt"
+ "maps"
+ "strings"
+
+ "github.com/github/gh-aw/pkg/constants"
+ "github.com/github/gh-aw/pkg/logger"
+)
+
+var universalLLMConsumerLog = logger.New("workflow:universal_llm_consumer_engine")
+
+type UniversalLLMBackend string
+
+const (
+ UniversalLLMBackendCopilot UniversalLLMBackend = "copilot"
+ UniversalLLMBackendAnthropic UniversalLLMBackend = "anthropic"
+ UniversalLLMBackendCodex UniversalLLMBackend = "codex"
+)
+
+type UniversalLLMConsumerEngine struct {
+ BaseEngine
+}
+
+type universalLLMBackendProfile struct {
+ coreSecretNames []string
+ env map[string]string
+ baseURLEnvName string
+ gatewayPort int
+}
+
+func resolveUniversalLLMBackendFromModel(model string) (UniversalLLMBackend, error) {
+ model = strings.TrimSpace(model)
+ if model == "" {
+ return "", errors.New("for universal consumer engines (OpenCode/Crush), engine.model is required and must use provider/model format (supported providers: copilot, anthropic, openai, codex)")
+ }
+
+ parts := strings.SplitN(model, "/", 2)
+ if len(parts) != 2 || strings.TrimSpace(parts[0]) == "" || strings.TrimSpace(parts[1]) == "" {
+ return "", errors.New("for universal consumer engines (OpenCode/Crush), engine.model must use provider/model format (for example: copilot/gpt-5, anthropic/claude-sonnet-4, openai/gpt-4.1)")
+ }
+
+ switch strings.ToLower(strings.TrimSpace(parts[0])) {
+ case "copilot":
+ return UniversalLLMBackendCopilot, nil
+ case "anthropic":
+ return UniversalLLMBackendAnthropic, nil
+ case "openai", "codex":
+ return UniversalLLMBackendCodex, nil
+ default:
+ return "", fmt.Errorf("unsupported provider %q in engine.model; supported providers: copilot, anthropic, openai, codex", parts[0])
+ }
+}
+
+func getUniversalLLMBackendProfile(backend UniversalLLMBackend, useCopilotRequests bool) universalLLMBackendProfile {
+ switch backend {
+ case UniversalLLMBackendAnthropic:
+ return universalLLMBackendProfile{
+ coreSecretNames: []string{"ANTHROPIC_API_KEY"},
+ env: map[string]string{
+ "ANTHROPIC_API_KEY": "${{ secrets.ANTHROPIC_API_KEY }}",
+ },
+ baseURLEnvName: "ANTHROPIC_BASE_URL",
+ gatewayPort: constants.ClaudeLLMGatewayPort,
+ }
+ case UniversalLLMBackendCodex:
+ return universalLLMBackendProfile{
+ coreSecretNames: []string{"CODEX_API_KEY", "OPENAI_API_KEY"},
+ env: map[string]string{
+ "CODEX_API_KEY": "${{ secrets.CODEX_API_KEY || secrets.OPENAI_API_KEY }}",
+ "OPENAI_API_KEY": "${{ secrets.CODEX_API_KEY || secrets.OPENAI_API_KEY }}",
+ },
+ baseURLEnvName: "OPENAI_BASE_URL",
+ gatewayPort: constants.CodexLLMGatewayPort,
+ }
+ default:
+ copilotToken := "${{ secrets.COPILOT_GITHUB_TOKEN }}"
+ coreSecrets := []string{"COPILOT_GITHUB_TOKEN"}
+ if useCopilotRequests {
+ copilotToken = "${{ github.token }}"
+ coreSecrets = []string{}
+ }
+ return universalLLMBackendProfile{
+ coreSecretNames: coreSecrets,
+ env: map[string]string{
+ "COPILOT_GITHUB_TOKEN": copilotToken,
+ "OPENAI_API_KEY": copilotToken,
+ },
+ baseURLEnvName: "GITHUB_COPILOT_BASE_URL",
+ gatewayPort: constants.CopilotLLMGatewayPort,
+ }
+ }
+}
+
+func (e *UniversalLLMConsumerEngine) resolveBackend(workflowData *WorkflowData) UniversalLLMBackend {
+ model := ""
+ if workflowData != nil && workflowData.EngineConfig != nil {
+ model = workflowData.EngineConfig.Model
+ }
+ backend, err := resolveUniversalLLMBackendFromModel(model)
+ if err != nil {
+ universalLLMConsumerLog.Printf("Falling back to copilot backend while resolving model %q: %v", model, err)
+ return UniversalLLMBackendCopilot
+ }
+ return backend
+}
+
+func (e *UniversalLLMConsumerEngine) GetUniversalRequiredSecretNames(workflowData *WorkflowData) []string {
+ backend := e.resolveBackend(workflowData)
+ profile := getUniversalLLMBackendProfile(backend, isFeatureEnabled(constants.CopilotRequestsFeatureFlag, workflowData))
+ secrets := append([]string{}, profile.coreSecretNames...)
+
+ if workflowData != nil && workflowData.EngineConfig != nil && len(workflowData.EngineConfig.Env) > 0 {
+ for key := range workflowData.EngineConfig.Env {
+ if strings.HasSuffix(key, "_API_KEY") || strings.HasSuffix(key, "_KEY") {
+ secrets = append(secrets, key)
+ }
+ }
+ }
+
+ if workflowData != nil {
+ secrets = append(secrets, collectCommonMCPSecrets(workflowData)...)
+ }
+
+ parsedTools, tools := extractToolsConfig(workflowData)
+
+ if hasGitHubTool(parsedTools) {
+ secrets = append(secrets, "GITHUB_MCP_SERVER_TOKEN")
+ }
+
+ headerSecrets := collectHTTPMCPHeaderSecrets(tools)
+ for varName := range headerSecrets {
+ secrets = append(secrets, varName)
+ }
+
+ return secrets
+}
+
+func extractToolsConfig(workflowData *WorkflowData) (*ToolsConfig, map[string]any) {
+ if workflowData == nil {
+ return nil, map[string]any{}
+ }
+ if workflowData.Tools == nil {
+ return workflowData.ParsedTools, map[string]any{}
+ }
+ return workflowData.ParsedTools, workflowData.Tools
+}
+
+func (e *UniversalLLMConsumerEngine) GetUniversalSecretValidationStep(workflowData *WorkflowData, engineName, docsURL string) GitHubActionStep {
+ backend := e.resolveBackend(workflowData)
+ profile := getUniversalLLMBackendProfile(backend, isFeatureEnabled(constants.CopilotRequestsFeatureFlag, workflowData))
+ if len(profile.coreSecretNames) == 0 {
+ return GitHubActionStep{}
+ }
+ return BuildDefaultSecretValidationStep(workflowData, profile.coreSecretNames, engineName, docsURL)
+}
+
+func (e *UniversalLLMConsumerEngine) ApplyUniversalProviderEnv(env map[string]string, workflowData *WorkflowData, firewallEnabled bool) {
+ backend := e.resolveBackend(workflowData)
+ profile := getUniversalLLMBackendProfile(backend, isFeatureEnabled(constants.CopilotRequestsFeatureFlag, workflowData))
+ maps.Copy(env, profile.env)
+ if firewallEnabled {
+ env[profile.baseURLEnvName] = fmt.Sprintf("http://host.docker.internal:%d", profile.gatewayPort)
+ }
+}
diff --git a/pkg/workflow/universal_llm_consumer_engine_test.go b/pkg/workflow/universal_llm_consumer_engine_test.go
new file mode 100644
index 00000000000..dc74811927e
--- /dev/null
+++ b/pkg/workflow/universal_llm_consumer_engine_test.go
@@ -0,0 +1,18 @@
+//go:build !integration
+
+package workflow
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestUniversalLLMConsumerEngine_GetUniversalRequiredSecretNames_NilWorkflowData(t *testing.T) {
+ engine := &UniversalLLMConsumerEngine{}
+
+ assert.NotPanics(t, func() {
+ secrets := engine.GetUniversalRequiredSecretNames(nil)
+ assert.ElementsMatch(t, []string{"COPILOT_GITHUB_TOKEN"}, secrets, "Nil workflow data should safely fall back to only the copilot backend secret profile")
+ }, "GetUniversalRequiredSecretNames should handle nil workflowData safely")
+}