From 7c4ec4bdc7c2369ffa4c1752c5ff89df8f0c8394 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Sun, 12 Apr 2026 00:56:40 +0000
Subject: [PATCH 1/2] Initial plan
From dd1f7928c62ce8a6790dbd8951c1fe7e20f54a1b Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Sun, 12 Apr 2026 01:28:24 +0000
Subject: [PATCH 2/2] fix: quote shell variables to resolve shellcheck
SC2086/SC2012/SC2129 warnings in 15 workflows
Fix 24 shellcheck findings across 15 workflows:
- SC2086: Quote ${RUNNER_TEMP}, $GITHUB_REPOSITORY, $GITHUB_WORKSPACE, $CACHE_AGE
- SC2012: Replace `ls *.tar.gz` with `find -name '*.tar.gz'` in shared/apm.md
- SC2129: Use grouped redirect `{ ... } >> file` in release.md
- Fix unquoted path in compiler_safe_outputs_job.go cat heredoc
Agent-Logs-Url: https://github.com/github/gh-aw/sessions/988adcd5-a968-4d87-b1f9-0236fbfc4170
Co-authored-by: pelikhan <4175913+pelikhan@users.noreply.github.com>
---
.../workflows/copilot-agent-analysis.lock.yml | 2 +-
.../copilot-pr-merged-report.lock.yml | 2 +-
.../copilot-pr-nlp-analysis.lock.yml | 2 +-
.../copilot-pr-prompt-analysis.lock.yml | 2 +-
.../copilot-session-insights.lock.yml | 24 +++++-----
.../workflows/copilot-token-audit.lock.yml | 2 +-
.../copilot-token-optimizer.lock.yml | 2 +-
.../workflows/daily-issues-report.lock.yml | 26 +++++------
.github/workflows/daily-news.lock.yml | 26 +++++------
.github/workflows/daily-news.md | 4 +-
.github/workflows/deep-report.lock.yml | 28 ++++++------
.github/workflows/issue-arborist.lock.yml | 28 ++++++------
.github/workflows/issue-arborist.md | 2 +-
.../prompt-clustering-analysis.lock.yml | 24 +++++-----
.github/workflows/release.lock.yml | 44 ++++++++++---------
.github/workflows/release.md | 22 +++++-----
.github/workflows/shared/apm.md | 2 +-
.../workflows/shared/copilot-pr-data-fetch.md | 2 +-
.../shared/copilot-session-data-fetch.md | 2 +-
.../shared/discussions-data-fetch.md | 2 +-
.github/workflows/shared/issues-data-fetch.md | 4 +-
.github/workflows/shared/mcp/gh-aw.md | 6 +--
.../shared/weekly-issues-data-fetch.md | 4 +-
.github/workflows/smoke-claude.lock.yml | 4 +-
.../workflows/static-analysis-report.lock.yml | 26 +++++------
.github/workflows/static-analysis-report.md | 4 +-
pkg/workflow/compiler_safe_outputs_job.go | 2 +-
27 files changed, 151 insertions(+), 147 deletions(-)
diff --git a/.github/workflows/copilot-agent-analysis.lock.yml b/.github/workflows/copilot-agent-analysis.lock.yml
index a1bbfae6056..6933036f467 100644
--- a/.github/workflows/copilot-agent-analysis.lock.yml
+++ b/.github/workflows/copilot-agent-analysis.lock.yml
@@ -371,7 +371,7 @@ jobs:
run: "mkdir -p /tmp/gh-aw\ncat > /tmp/gh-aw/jqschema.sh << 'EOF'\n#!/usr/bin/env bash\n# jqschema.sh\njq -c '\ndef walk(f):\n . as $in |\n if type == \"object\" then\n reduce keys[] as $k ({}; . + {($k): ($in[$k] | walk(f))})\n elif type == \"array\" then\n if length == 0 then [] else [.[0] | walk(f)] end\n else\n type\n end;\nwalk(.)\n'\nEOF\nchmod +x /tmp/gh-aw/jqschema.sh"
- name: Install gh CLI
run: |
- bash ${RUNNER_TEMP}/gh-aw/actions/install_gh_cli.sh
+ bash "${RUNNER_TEMP}/gh-aw/actions/install_gh_cli.sh"
- env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/copilot-pr-merged-report.lock.yml b/.github/workflows/copilot-pr-merged-report.lock.yml
index 95905681c34..d0e853f1647 100644
--- a/.github/workflows/copilot-pr-merged-report.lock.yml
+++ b/.github/workflows/copilot-pr-merged-report.lock.yml
@@ -354,7 +354,7 @@ jobs:
run: "mkdir -p /tmp/gh-aw\ncat > /tmp/gh-aw/jqschema.sh << 'EOF'\n#!/usr/bin/env bash\n# jqschema.sh\njq -c '\ndef walk(f):\n . as $in |\n if type == \"object\" then\n reduce keys[] as $k ({}; . + {($k): ($in[$k] | walk(f))})\n elif type == \"array\" then\n if length == 0 then [] else [.[0] | walk(f)] end\n else\n type\n end;\nwalk(.)\n'\nEOF\nchmod +x /tmp/gh-aw/jqschema.sh"
- name: Install gh CLI
run: |
- bash ${RUNNER_TEMP}/gh-aw/actions/install_gh_cli.sh
+ bash "${RUNNER_TEMP}/gh-aw/actions/install_gh_cli.sh"
- env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/copilot-pr-nlp-analysis.lock.yml b/.github/workflows/copilot-pr-nlp-analysis.lock.yml
index 93a813c4810..eb66ec0d819 100644
--- a/.github/workflows/copilot-pr-nlp-analysis.lock.yml
+++ b/.github/workflows/copilot-pr-nlp-analysis.lock.yml
@@ -403,7 +403,7 @@ jobs:
run: "mkdir -p /tmp/gh-aw\ncat > /tmp/gh-aw/jqschema.sh << 'EOF'\n#!/usr/bin/env bash\n# jqschema.sh\njq -c '\ndef walk(f):\n . as $in |\n if type == \"object\" then\n reduce keys[] as $k ({}; . + {($k): ($in[$k] | walk(f))})\n elif type == \"array\" then\n if length == 0 then [] else [.[0] | walk(f)] end\n else\n type\n end;\nwalk(.)\n'\nEOF\nchmod +x /tmp/gh-aw/jqschema.sh"
- name: Install gh CLI
run: |
- bash ${RUNNER_TEMP}/gh-aw/actions/install_gh_cli.sh
+ bash "${RUNNER_TEMP}/gh-aw/actions/install_gh_cli.sh"
- env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/copilot-pr-prompt-analysis.lock.yml b/.github/workflows/copilot-pr-prompt-analysis.lock.yml
index 3debdc476f5..ec39800e002 100644
--- a/.github/workflows/copilot-pr-prompt-analysis.lock.yml
+++ b/.github/workflows/copilot-pr-prompt-analysis.lock.yml
@@ -367,7 +367,7 @@ jobs:
run: "mkdir -p /tmp/gh-aw\ncat > /tmp/gh-aw/jqschema.sh << 'EOF'\n#!/usr/bin/env bash\n# jqschema.sh\njq -c '\ndef walk(f):\n . as $in |\n if type == \"object\" then\n reduce keys[] as $k ({}; . + {($k): ($in[$k] | walk(f))})\n elif type == \"array\" then\n if length == 0 then [] else [.[0] | walk(f)] end\n else\n type\n end;\nwalk(.)\n'\nEOF\nchmod +x /tmp/gh-aw/jqschema.sh"
- name: Install gh CLI
run: |
- bash ${RUNNER_TEMP}/gh-aw/actions/install_gh_cli.sh
+ bash "${RUNNER_TEMP}/gh-aw/actions/install_gh_cli.sh"
- env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/copilot-session-insights.lock.yml b/.github/workflows/copilot-session-insights.lock.yml
index 1ae93f628fe..4c8a860a45f 100644
--- a/.github/workflows/copilot-session-insights.lock.yml
+++ b/.github/workflows/copilot-session-insights.lock.yml
@@ -1,4 +1,4 @@
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"78c99cc5a9838452eb384bd44cebe724c27b00c1cf565425a5c134b18eeb39b0","strict":true,"agent_id":"claude"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"86eea1fe5380cca5018d3c2f14bdaea63485d15241b2f1f37ad13abe0e881691","strict":true,"agent_id":"claude"}
# gh-aw-manifest: {"version":1,"secrets":["ANTHROPIC_API_KEY","GH_AW_GITHUB_MCP_SERVER_TOKEN","GH_AW_GITHUB_TOKEN","GITHUB_TOKEN"],"actions":[{"repo":"actions/cache/restore","sha":"668228422ae6a00e4ad889ee87cd7109ec5666a7","version":"v5.0.4"},{"repo":"actions/cache/save","sha":"668228422ae6a00e4ad889ee87cd7109ec5666a7","version":"v5.0.4"},{"repo":"actions/checkout","sha":"de0fac2e4500dabe0009e67214ff5f5447ce83dd","version":"v6.0.2"},{"repo":"actions/download-artifact","sha":"3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c","version":"v8.0.1"},{"repo":"actions/github-script","sha":"373c709c69115d41ff229c7e5df9f8788daa9553","version":"v9"},{"repo":"actions/setup-node","sha":"53b83947a5a98c8d113130e565377fae1a50d02f","version":"v6.3.0"},{"repo":"actions/setup-python","sha":"a309ff8b426b58ec0e2a45f0f869d46889d02405","version":"v6.2.0"},{"repo":"actions/upload-artifact","sha":"043fb46d1a93c77aae656e7c1c64a875d1fc6a0a","version":"v7"}],"containers":[{"image":"ghcr.io/github/gh-aw-firewall/agent:0.25.18","digest":"sha256:c77e8c26bab6c39e8568d8e2f8c17015944849a8cbcdfb4bd9725d8893725ca2","pinned_image":"ghcr.io/github/gh-aw-firewall/agent:0.25.18@sha256:c77e8c26bab6c39e8568d8e2f8c17015944849a8cbcdfb4bd9725d8893725ca2"},{"image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.18","digest":"sha256:d16a40a3ca6e989896d0cef9f31b9412bb1fcc8755bafcafb95012ae1078539b","pinned_image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.18@sha256:d16a40a3ca6e989896d0cef9f31b9412bb1fcc8755bafcafb95012ae1078539b"},{"image":"ghcr.io/github/gh-aw-firewall/squid:0.25.18","digest":"sha256:eb102afcfbae26ffcec016adebb74d3be7b0a5bf376ba306599cdf3effbe288e","pinned_image":"ghcr.io/github/gh-aw-firewall/squid:0.25.18@sha256:eb102afcfbae26ffcec016adebb74d3be7b0a5bf376ba306599cdf3effbe288e"},{"image":"ghcr.io/github/gh-aw-mcpg:v0.2.17","digest":"sha256:a6dec6ec535a11c565d982afa2f98589805ed0598862b9ea9d3c751fc71afae8","pinned_image":"ghcr.io/github/gh-aw-mcpg:v0.2.17@sha256:a6dec6ec535a11c565d982afa2f98589805ed0598862b9ea9d3c751fc71afae8"},{"image":"ghcr.io/github/github-mcp-server:v0.32.0","digest":"sha256:2763823c63bcca718ce53850a1d7fcf2f501ec84028394f1b63ce7e9f4f9be28","pinned_image":"ghcr.io/github/github-mcp-server:v0.32.0@sha256:2763823c63bcca718ce53850a1d7fcf2f501ec84028394f1b63ce7e9f4f9be28"},{"image":"node:lts-alpine","digest":"sha256:01743339035a5c3c11a373cd7c83aeab6ed1457b55da6a69e014a95ac4e4700b","pinned_image":"node:lts-alpine@sha256:01743339035a5c3c11a373cd7c83aeab6ed1457b55da6a69e014a95ac4e4700b"}]}
# ___ _ _
# / _ \ | | (_)
@@ -178,16 +178,16 @@ jobs:
run: |
bash "${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh"
{
- cat << 'GH_AW_PROMPT_a429f767766590df_EOF'
+ cat << 'GH_AW_PROMPT_5c3e03cff84d9269_EOF'
- GH_AW_PROMPT_a429f767766590df_EOF
+ GH_AW_PROMPT_5c3e03cff84d9269_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/cache_memory_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/repo_memory_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_a429f767766590df_EOF'
+ cat << 'GH_AW_PROMPT_5c3e03cff84d9269_EOF'
Tools: create_discussion, upload_asset, missing_tool, missing_data, noop
@@ -221,9 +221,9 @@ jobs:
{{/if}}
- GH_AW_PROMPT_a429f767766590df_EOF
+ GH_AW_PROMPT_5c3e03cff84d9269_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
- cat << 'GH_AW_PROMPT_a429f767766590df_EOF'
+ cat << 'GH_AW_PROMPT_5c3e03cff84d9269_EOF'
{{#runtime-import .github/workflows/shared/jqschema.md}}
{{#runtime-import .github/workflows/shared/copilot-session-data-fetch.md}}
@@ -232,7 +232,7 @@ jobs:
{{#runtime-import .github/workflows/shared/reporting.md}}
{{#runtime-import .github/workflows/shared/python-dataviz.md}}
{{#runtime-import .github/workflows/copilot-session-insights.md}}
- GH_AW_PROMPT_a429f767766590df_EOF
+ GH_AW_PROMPT_5c3e03cff84d9269_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9
@@ -388,7 +388,7 @@ jobs:
run: "mkdir -p /tmp/gh-aw\ncat > /tmp/gh-aw/jqschema.sh << 'EOF'\n#!/usr/bin/env bash\n# jqschema.sh\njq -c '\ndef walk(f):\n . as $in |\n if type == \"object\" then\n reduce keys[] as $k ({}; . + {($k): ($in[$k] | walk(f))})\n elif type == \"array\" then\n if length == 0 then [] else [.[0] | walk(f)] end\n else\n type\n end;\nwalk(.)\n'\nEOF\nchmod +x /tmp/gh-aw/jqschema.sh"
- name: Install gh CLI
run: |
- bash ${RUNNER_TEMP}/gh-aw/actions/install_gh_cli.sh
+ bash "${RUNNER_TEMP}/gh-aw/actions/install_gh_cli.sh"
- env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@@ -495,9 +495,9 @@ jobs:
mkdir -p "${RUNNER_TEMP}/gh-aw/safeoutputs"
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > "${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" << 'GH_AW_SAFE_OUTPUTS_CONFIG_3419a1a124ced573_EOF'
+ cat > "${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" << 'GH_AW_SAFE_OUTPUTS_CONFIG_247be6e762e7c629_EOF'
{"create_discussion":{"category":"audits","close_older_discussions":true,"expires":24,"fallback_to_issue":true,"max":1,"title_prefix":"[copilot-session-insights] "},"create_report_incomplete_issue":{},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"},"push_repo_memory":{"memories":[{"dir":"/tmp/gh-aw/repo-memory/default","id":"default","max_file_count":100,"max_file_size":102400,"max_patch_size":10240}]},"report_incomplete":{},"upload_asset":{"allowed-exts":[".png",".jpg",".jpeg"],"branch":"assets/${{ github.workflow }}","max-size":10240}}
- GH_AW_SAFE_OUTPUTS_CONFIG_3419a1a124ced573_EOF
+ GH_AW_SAFE_OUTPUTS_CONFIG_247be6e762e7c629_EOF
- name: Write Safe Outputs Tools
env:
GH_AW_TOOLS_META_JSON: |
@@ -697,7 +697,7 @@ jobs:
export GH_AW_ENGINE="claude"
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.17'
- cat << GH_AW_MCP_CONFIG_9a611fd27bdeb1fe_EOF | bash "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh"
+ cat << GH_AW_MCP_CONFIG_64d3b1991a6888c7_EOF | bash "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh"
{
"mcpServers": {
"github": {
@@ -737,7 +737,7 @@ jobs:
"payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
}
}
- GH_AW_MCP_CONFIG_9a611fd27bdeb1fe_EOF
+ GH_AW_MCP_CONFIG_64d3b1991a6888c7_EOF
- name: Download activation artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
diff --git a/.github/workflows/copilot-token-audit.lock.yml b/.github/workflows/copilot-token-audit.lock.yml
index 7268e4dbf53..695d9f32d6a 100644
--- a/.github/workflows/copilot-token-audit.lock.yml
+++ b/.github/workflows/copilot-token-audit.lock.yml
@@ -405,7 +405,7 @@ jobs:
- env:
GH_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
name: Install gh-aw extension
- run: "# Install gh-aw if not already available\nif ! gh aw --version >/dev/null 2>&1; then\n echo \"Installing gh-aw extension...\"\n curl -fsSL https://raw.githubusercontent.com/github/gh-aw/refs/heads/main/install-gh-aw.sh | bash\nfi\ngh aw --version\n# Copy the gh-aw binary to ${RUNNER_TEMP}/gh-aw for MCP server containerization\nmkdir -p ${RUNNER_TEMP}/gh-aw\nGH_AW_BIN=$(which gh-aw 2>/dev/null || find ~/.local/share/gh/extensions/gh-aw -name 'gh-aw' -type f 2>/dev/null | head -1)\nif [ -n \"$GH_AW_BIN\" ] && [ -f \"$GH_AW_BIN\" ]; then\n cp \"$GH_AW_BIN\" ${RUNNER_TEMP}/gh-aw/gh-aw\n chmod +x ${RUNNER_TEMP}/gh-aw/gh-aw\n echo \"Copied gh-aw binary to ${RUNNER_TEMP}/gh-aw/gh-aw\"\nelse\n echo \"::error::Failed to find gh-aw binary for MCP server\"\n exit 1\nfi"
+ run: "# Install gh-aw if not already available\nif ! gh aw --version >/dev/null 2>&1; then\n echo \"Installing gh-aw extension...\"\n curl -fsSL https://raw.githubusercontent.com/github/gh-aw/refs/heads/main/install-gh-aw.sh | bash\nfi\ngh aw --version\n# Copy the gh-aw binary to ${RUNNER_TEMP}/gh-aw for MCP server containerization\nmkdir -p \"${RUNNER_TEMP}/gh-aw\"\nGH_AW_BIN=$(which gh-aw 2>/dev/null || find ~/.local/share/gh/extensions/gh-aw -name 'gh-aw' -type f 2>/dev/null | head -1)\nif [ -n \"$GH_AW_BIN\" ] && [ -f \"$GH_AW_BIN\" ]; then\n cp \"$GH_AW_BIN\" \"${RUNNER_TEMP}/gh-aw/gh-aw\"\n chmod +x \"${RUNNER_TEMP}/gh-aw/gh-aw\"\n echo \"Copied gh-aw binary to ${RUNNER_TEMP}/gh-aw/gh-aw\"\nelse\n echo \"::error::Failed to find gh-aw binary for MCP server\"\n exit 1\nfi"
- name: Setup Python environment
run: "# Create working directory for Python scripts\nmkdir -p /tmp/gh-aw/python\nmkdir -p /tmp/gh-aw/python/data\nmkdir -p /tmp/gh-aw/python/charts\nmkdir -p /tmp/gh-aw/python/artifacts\n\necho \"Python environment setup complete\"\necho \"Working directory: /tmp/gh-aw/python\"\necho \"Data directory: /tmp/gh-aw/python/data\"\necho \"Charts directory: /tmp/gh-aw/python/charts\"\necho \"Artifacts directory: /tmp/gh-aw/python/artifacts\"\n"
- name: Install Python scientific libraries
diff --git a/.github/workflows/copilot-token-optimizer.lock.yml b/.github/workflows/copilot-token-optimizer.lock.yml
index d7429f693a7..87b822b4cc5 100644
--- a/.github/workflows/copilot-token-optimizer.lock.yml
+++ b/.github/workflows/copilot-token-optimizer.lock.yml
@@ -384,7 +384,7 @@ jobs:
- env:
GH_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
name: Install gh-aw extension
- run: "# Install gh-aw if not already available\nif ! gh aw --version >/dev/null 2>&1; then\n echo \"Installing gh-aw extension...\"\n curl -fsSL https://raw.githubusercontent.com/github/gh-aw/refs/heads/main/install-gh-aw.sh | bash\nfi\ngh aw --version\n# Copy the gh-aw binary to ${RUNNER_TEMP}/gh-aw for MCP server containerization\nmkdir -p ${RUNNER_TEMP}/gh-aw\nGH_AW_BIN=$(which gh-aw 2>/dev/null || find ~/.local/share/gh/extensions/gh-aw -name 'gh-aw' -type f 2>/dev/null | head -1)\nif [ -n \"$GH_AW_BIN\" ] && [ -f \"$GH_AW_BIN\" ]; then\n cp \"$GH_AW_BIN\" ${RUNNER_TEMP}/gh-aw/gh-aw\n chmod +x ${RUNNER_TEMP}/gh-aw/gh-aw\n echo \"Copied gh-aw binary to ${RUNNER_TEMP}/gh-aw/gh-aw\"\nelse\n echo \"::error::Failed to find gh-aw binary for MCP server\"\n exit 1\nfi"
+ run: "# Install gh-aw if not already available\nif ! gh aw --version >/dev/null 2>&1; then\n echo \"Installing gh-aw extension...\"\n curl -fsSL https://raw.githubusercontent.com/github/gh-aw/refs/heads/main/install-gh-aw.sh | bash\nfi\ngh aw --version\n# Copy the gh-aw binary to ${RUNNER_TEMP}/gh-aw for MCP server containerization\nmkdir -p \"${RUNNER_TEMP}/gh-aw\"\nGH_AW_BIN=$(which gh-aw 2>/dev/null || find ~/.local/share/gh/extensions/gh-aw -name 'gh-aw' -type f 2>/dev/null | head -1)\nif [ -n \"$GH_AW_BIN\" ] && [ -f \"$GH_AW_BIN\" ]; then\n cp \"$GH_AW_BIN\" \"${RUNNER_TEMP}/gh-aw/gh-aw\"\n chmod +x \"${RUNNER_TEMP}/gh-aw/gh-aw\"\n echo \"Copied gh-aw binary to ${RUNNER_TEMP}/gh-aw/gh-aw\"\nelse\n echo \"::error::Failed to find gh-aw binary for MCP server\"\n exit 1\nfi"
- env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
name: Download recent Copilot workflow logs
diff --git a/.github/workflows/daily-issues-report.lock.yml b/.github/workflows/daily-issues-report.lock.yml
index bddd9c24b21..b88b0dcf821 100644
--- a/.github/workflows/daily-issues-report.lock.yml
+++ b/.github/workflows/daily-issues-report.lock.yml
@@ -1,4 +1,4 @@
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"1fea2e8e5bace143ac5c2fd53d06f49fff682127672b6a746add262a2ed8bb89","strict":true,"agent_id":"copilot"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"bd6301136dbc050894c42701718b951df7f5565b577a224e402a190ee5e14563","strict":true,"agent_id":"copilot"}
# gh-aw-manifest: {"version":1,"secrets":["COPILOT_GITHUB_TOKEN","GH_AW_GITHUB_MCP_SERVER_TOKEN","GH_AW_GITHUB_TOKEN","GH_AW_OTEL_ENDPOINT","GH_AW_OTEL_HEADERS","GITHUB_TOKEN"],"actions":[{"repo":"actions/cache/restore","sha":"668228422ae6a00e4ad889ee87cd7109ec5666a7","version":"v5.0.4"},{"repo":"actions/cache/save","sha":"668228422ae6a00e4ad889ee87cd7109ec5666a7","version":"v5.0.4"},{"repo":"actions/checkout","sha":"de0fac2e4500dabe0009e67214ff5f5447ce83dd","version":"v6.0.2"},{"repo":"actions/download-artifact","sha":"3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c","version":"v8.0.1"},{"repo":"actions/github-script","sha":"373c709c69115d41ff229c7e5df9f8788daa9553","version":"v9"},{"repo":"actions/setup-node","sha":"53b83947a5a98c8d113130e565377fae1a50d02f","version":"v6.3.0"},{"repo":"actions/setup-python","sha":"a309ff8b426b58ec0e2a45f0f869d46889d02405","version":"v6.2.0"},{"repo":"actions/upload-artifact","sha":"043fb46d1a93c77aae656e7c1c64a875d1fc6a0a","version":"v7"}],"containers":[{"image":"ghcr.io/github/gh-aw-firewall/agent:0.25.18","digest":"sha256:c77e8c26bab6c39e8568d8e2f8c17015944849a8cbcdfb4bd9725d8893725ca2","pinned_image":"ghcr.io/github/gh-aw-firewall/agent:0.25.18@sha256:c77e8c26bab6c39e8568d8e2f8c17015944849a8cbcdfb4bd9725d8893725ca2"},{"image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.18","digest":"sha256:d16a40a3ca6e989896d0cef9f31b9412bb1fcc8755bafcafb95012ae1078539b","pinned_image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.18@sha256:d16a40a3ca6e989896d0cef9f31b9412bb1fcc8755bafcafb95012ae1078539b"},{"image":"ghcr.io/github/gh-aw-firewall/squid:0.25.18","digest":"sha256:eb102afcfbae26ffcec016adebb74d3be7b0a5bf376ba306599cdf3effbe288e","pinned_image":"ghcr.io/github/gh-aw-firewall/squid:0.25.18@sha256:eb102afcfbae26ffcec016adebb74d3be7b0a5bf376ba306599cdf3effbe288e"},{"image":"ghcr.io/github/gh-aw-mcpg:v0.2.17","digest":"sha256:a6dec6ec535a11c565d982afa2f98589805ed0598862b9ea9d3c751fc71afae8","pinned_image":"ghcr.io/github/gh-aw-mcpg:v0.2.17@sha256:a6dec6ec535a11c565d982afa2f98589805ed0598862b9ea9d3c751fc71afae8"},{"image":"ghcr.io/github/github-mcp-server:v0.32.0","digest":"sha256:2763823c63bcca718ce53850a1d7fcf2f501ec84028394f1b63ce7e9f4f9be28","pinned_image":"ghcr.io/github/github-mcp-server:v0.32.0@sha256:2763823c63bcca718ce53850a1d7fcf2f501ec84028394f1b63ce7e9f4f9be28"},{"image":"node:lts-alpine","digest":"sha256:01743339035a5c3c11a373cd7c83aeab6ed1457b55da6a69e014a95ac4e4700b","pinned_image":"node:lts-alpine@sha256:01743339035a5c3c11a373cd7c83aeab6ed1457b55da6a69e014a95ac4e4700b"}]}
# ___ _ _
# / _ \ | | (_)
@@ -189,15 +189,15 @@ jobs:
run: |
bash "${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh"
{
- cat << 'GH_AW_PROMPT_89d00be6395e1854_EOF'
+ cat << 'GH_AW_PROMPT_38b60c29efb56d75_EOF'
- GH_AW_PROMPT_89d00be6395e1854_EOF
+ GH_AW_PROMPT_38b60c29efb56d75_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/cache_memory_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_89d00be6395e1854_EOF'
+ cat << 'GH_AW_PROMPT_38b60c29efb56d75_EOF'
Tools: create_discussion, upload_asset, missing_tool, missing_data, noop
@@ -231,9 +231,9 @@ jobs:
{{/if}}
- GH_AW_PROMPT_89d00be6395e1854_EOF
+ GH_AW_PROMPT_38b60c29efb56d75_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
- cat << 'GH_AW_PROMPT_89d00be6395e1854_EOF'
+ cat << 'GH_AW_PROMPT_38b60c29efb56d75_EOF'
{{#runtime-import .github/workflows/shared/github-guard-policy.md}}
{{#runtime-import .github/workflows/shared/jqschema.md}}
@@ -244,7 +244,7 @@ jobs:
{{#runtime-import .github/workflows/shared/reporting.md}}
{{#runtime-import .github/workflows/shared/observability-otlp.md}}
{{#runtime-import .github/workflows/daily-issues-report.md}}
- GH_AW_PROMPT_89d00be6395e1854_EOF
+ GH_AW_PROMPT_38b60c29efb56d75_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9
@@ -408,12 +408,12 @@ jobs:
run: "mkdir -p /tmp/gh-aw\ncat > /tmp/gh-aw/jqschema.sh << 'EOF'\n#!/usr/bin/env bash\n# jqschema.sh\njq -c '\ndef walk(f):\n . as $in |\n if type == \"object\" then\n reduce keys[] as $k ({}; . + {($k): ($in[$k] | walk(f))})\n elif type == \"array\" then\n if length == 0 then [] else [.[0] | walk(f)] end\n else\n type\n end;\nwalk(.)\n'\nEOF\nchmod +x /tmp/gh-aw/jqschema.sh"
- name: Install gh CLI
run: |
- bash ${RUNNER_TEMP}/gh-aw/actions/install_gh_cli.sh
+ bash "${RUNNER_TEMP}/gh-aw/actions/install_gh_cli.sh"
- env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
name: Fetch issues
- run: "# Create output directories\nmkdir -p /tmp/gh-aw/issues-data\nmkdir -p /tmp/gh-aw/cache-memory\n\n# Get today's date for cache identification\nTODAY=$(date '+%Y-%m-%d')\nCACHE_DIR=\"/tmp/gh-aw/cache-memory\"\n\n# Check if cached data exists from today\nif [ -f \"$CACHE_DIR/issues-${TODAY}.json\" ] && [ -s \"$CACHE_DIR/issues-${TODAY}.json\" ]; then\n echo \"✓ Found cached issues data from ${TODAY}\"\n cp \"$CACHE_DIR/issues-${TODAY}.json\" /tmp/gh-aw/issues-data/issues.json\n \n # Regenerate schema if missing\n if [ ! -f \"$CACHE_DIR/issues-${TODAY}-schema.json\" ]; then\n /tmp/gh-aw/jqschema.sh < /tmp/gh-aw/issues-data/issues.json > \"$CACHE_DIR/issues-${TODAY}-schema.json\"\n fi\n cp \"$CACHE_DIR/issues-${TODAY}-schema.json\" /tmp/gh-aw/issues-data/issues-schema.json\n \n echo \"Using cached data from ${TODAY}\"\n echo \"Total issues in cache: $(jq 'length' /tmp/gh-aw/issues-data/issues.json)\"\nelse\n echo \"⬇ Downloading fresh issues data...\"\n \n # Fetch all issues (open and closed) using gh CLI\n # Using --limit 1000 to get the last 1000 issues, unfiltered\n echo \"Fetching the last 1000 issues...\"\n if ! gh issue list --repo $GITHUB_REPOSITORY \\\n --state all \\\n --json number,title,author,createdAt,state,url,body,labels,updatedAt,closedAt,milestone,assignees,comments \\\n --limit 1000 \\\n > /tmp/gh-aw/issues-data/issues.json; then\n echo \"::warning::Failed to fetch issues data (issues may be disabled or temporarily unavailable). Using empty dataset. Downstream analysis will report zero issues — check repository Issues settings or retry the workflow if this is unexpected.\"\n echo \"[]\" > /tmp/gh-aw/issues-data/issues.json\n fi\n\n # Generate schema for reference\n /tmp/gh-aw/jqschema.sh < /tmp/gh-aw/issues-data/issues.json > /tmp/gh-aw/issues-data/issues-schema.json\n\n # Store in cache with today's date\n cp /tmp/gh-aw/issues-data/issues.json \"$CACHE_DIR/issues-${TODAY}.json\"\n cp /tmp/gh-aw/issues-data/issues-schema.json \"$CACHE_DIR/issues-${TODAY}-schema.json\"\n\n echo \"✓ Issues data saved to cache: issues-${TODAY}.json\"\n echo \"Total issues found: $(jq 'length' /tmp/gh-aw/issues-data/issues.json)\"\nfi\n\n# Always ensure data is available at expected locations for backward compatibility\necho \"Issues data available at: /tmp/gh-aw/issues-data/issues.json\"\necho \"Schema available at: /tmp/gh-aw/issues-data/issues-schema.json\""
+ run: "# Create output directories\nmkdir -p /tmp/gh-aw/issues-data\nmkdir -p /tmp/gh-aw/cache-memory\n\n# Get today's date for cache identification\nTODAY=$(date '+%Y-%m-%d')\nCACHE_DIR=\"/tmp/gh-aw/cache-memory\"\n\n# Check if cached data exists from today\nif [ -f \"$CACHE_DIR/issues-${TODAY}.json\" ] && [ -s \"$CACHE_DIR/issues-${TODAY}.json\" ]; then\n echo \"✓ Found cached issues data from ${TODAY}\"\n cp \"$CACHE_DIR/issues-${TODAY}.json\" /tmp/gh-aw/issues-data/issues.json\n \n # Regenerate schema if missing\n if [ ! -f \"$CACHE_DIR/issues-${TODAY}-schema.json\" ]; then\n /tmp/gh-aw/jqschema.sh < /tmp/gh-aw/issues-data/issues.json > \"$CACHE_DIR/issues-${TODAY}-schema.json\"\n fi\n cp \"$CACHE_DIR/issues-${TODAY}-schema.json\" /tmp/gh-aw/issues-data/issues-schema.json\n \n echo \"Using cached data from ${TODAY}\"\n echo \"Total issues in cache: $(jq 'length' /tmp/gh-aw/issues-data/issues.json)\"\nelse\n echo \"⬇ Downloading fresh issues data...\"\n \n # Fetch all issues (open and closed) using gh CLI\n # Using --limit 1000 to get the last 1000 issues, unfiltered\n echo \"Fetching the last 1000 issues...\"\n if ! gh issue list --repo \"$GITHUB_REPOSITORY\" \\\n --state all \\\n --json number,title,author,createdAt,state,url,body,labels,updatedAt,closedAt,milestone,assignees,comments \\\n --limit 1000 \\\n > /tmp/gh-aw/issues-data/issues.json; then\n echo \"::warning::Failed to fetch issues data (issues may be disabled or temporarily unavailable). Using empty dataset. Downstream analysis will report zero issues — check repository Issues settings or retry the workflow if this is unexpected.\"\n echo \"[]\" > /tmp/gh-aw/issues-data/issues.json\n fi\n\n # Generate schema for reference\n /tmp/gh-aw/jqschema.sh < /tmp/gh-aw/issues-data/issues.json > /tmp/gh-aw/issues-data/issues-schema.json\n\n # Store in cache with today's date\n cp /tmp/gh-aw/issues-data/issues.json \"$CACHE_DIR/issues-${TODAY}.json\"\n cp /tmp/gh-aw/issues-data/issues-schema.json \"$CACHE_DIR/issues-${TODAY}-schema.json\"\n\n echo \"✓ Issues data saved to cache: issues-${TODAY}.json\"\n echo \"Total issues found: $(jq 'length' /tmp/gh-aw/issues-data/issues.json)\"\nfi\n\n# Always ensure data is available at expected locations for backward compatibility\necho \"Issues data available at: /tmp/gh-aw/issues-data/issues.json\"\necho \"Schema available at: /tmp/gh-aw/issues-data/issues-schema.json\""
- name: Setup Python environment
run: "# Create working directory for Python scripts\nmkdir -p /tmp/gh-aw/python\nmkdir -p /tmp/gh-aw/python/data\nmkdir -p /tmp/gh-aw/python/charts\nmkdir -p /tmp/gh-aw/python/artifacts\n\necho \"Python environment setup complete\"\necho \"Working directory: /tmp/gh-aw/python\"\necho \"Data directory: /tmp/gh-aw/python/data\"\necho \"Charts directory: /tmp/gh-aw/python/charts\"\necho \"Artifacts directory: /tmp/gh-aw/python/artifacts\"\n"
- name: Install Python scientific libraries
@@ -506,9 +506,9 @@ jobs:
mkdir -p "${RUNNER_TEMP}/gh-aw/safeoutputs"
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > "${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" << 'GH_AW_SAFE_OUTPUTS_CONFIG_67cf1eab053a326d_EOF'
+ cat > "${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" << 'GH_AW_SAFE_OUTPUTS_CONFIG_74d27a0a66cb0e13_EOF'
{"create_discussion":{"category":"audits","close_older_discussions":true,"expires":72,"fallback_to_issue":true,"max":1,"title_prefix":"[daily issues] "},"create_report_incomplete_issue":{},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"},"report_incomplete":{},"upload_asset":{"allowed-exts":[".png",".jpg",".jpeg"],"branch":"assets/${{ github.workflow }}","max-size":10240}}
- GH_AW_SAFE_OUTPUTS_CONFIG_67cf1eab053a326d_EOF
+ GH_AW_SAFE_OUTPUTS_CONFIG_74d27a0a66cb0e13_EOF
- name: Write Safe Outputs Tools
env:
GH_AW_TOOLS_META_JSON: |
@@ -707,7 +707,7 @@ jobs:
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e GITHUB_AW_OTEL_TRACE_ID -e GITHUB_AW_OTEL_PARENT_SPAN_ID -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.17'
mkdir -p /home/runner/.copilot
- cat << GH_AW_MCP_CONFIG_91bde249b9c602b6_EOF | bash "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh"
+ cat << GH_AW_MCP_CONFIG_f74d28d9b36ff374_EOF | bash "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh"
{
"mcpServers": {
"github": {
@@ -757,7 +757,7 @@ jobs:
}
}
}
- GH_AW_MCP_CONFIG_91bde249b9c602b6_EOF
+ GH_AW_MCP_CONFIG_f74d28d9b36ff374_EOF
- name: Download activation artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
diff --git a/.github/workflows/daily-news.lock.yml b/.github/workflows/daily-news.lock.yml
index 7e0b61bf0d0..11c369a89a4 100644
--- a/.github/workflows/daily-news.lock.yml
+++ b/.github/workflows/daily-news.lock.yml
@@ -1,4 +1,4 @@
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"e5a5b641150892d124f84174486a550043df6ceeae82c6f35889b284e7b1e716","strict":true,"agent_id":"copilot"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"09999b57ac965fdd752e679281889c97394172778c5b9d7a1b7b5c187f1948da","strict":true,"agent_id":"copilot"}
# gh-aw-manifest: {"version":1,"secrets":["GH_AW_GITHUB_MCP_SERVER_TOKEN","GH_AW_GITHUB_TOKEN","GH_AW_OTEL_ENDPOINT","GH_AW_OTEL_HEADERS","GITHUB_TOKEN","TAVILY_API_KEY"],"actions":[{"repo":"actions/cache/restore","sha":"668228422ae6a00e4ad889ee87cd7109ec5666a7","version":"v5.0.4"},{"repo":"actions/cache/save","sha":"668228422ae6a00e4ad889ee87cd7109ec5666a7","version":"v5.0.4"},{"repo":"actions/checkout","sha":"de0fac2e4500dabe0009e67214ff5f5447ce83dd","version":"v6.0.2"},{"repo":"actions/download-artifact","sha":"3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c","version":"v8.0.1"},{"repo":"actions/github-script","sha":"373c709c69115d41ff229c7e5df9f8788daa9553","version":"v9"},{"repo":"actions/setup-node","sha":"53b83947a5a98c8d113130e565377fae1a50d02f","version":"v6.3.0"},{"repo":"actions/setup-python","sha":"a309ff8b426b58ec0e2a45f0f869d46889d02405","version":"v6.2.0"},{"repo":"actions/upload-artifact","sha":"043fb46d1a93c77aae656e7c1c64a875d1fc6a0a","version":"v7"}],"containers":[{"image":"ghcr.io/github/gh-aw-firewall/agent:0.25.18","digest":"sha256:c77e8c26bab6c39e8568d8e2f8c17015944849a8cbcdfb4bd9725d8893725ca2","pinned_image":"ghcr.io/github/gh-aw-firewall/agent:0.25.18@sha256:c77e8c26bab6c39e8568d8e2f8c17015944849a8cbcdfb4bd9725d8893725ca2"},{"image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.18","digest":"sha256:d16a40a3ca6e989896d0cef9f31b9412bb1fcc8755bafcafb95012ae1078539b","pinned_image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.18@sha256:d16a40a3ca6e989896d0cef9f31b9412bb1fcc8755bafcafb95012ae1078539b"},{"image":"ghcr.io/github/gh-aw-firewall/squid:0.25.18","digest":"sha256:eb102afcfbae26ffcec016adebb74d3be7b0a5bf376ba306599cdf3effbe288e","pinned_image":"ghcr.io/github/gh-aw-firewall/squid:0.25.18@sha256:eb102afcfbae26ffcec016adebb74d3be7b0a5bf376ba306599cdf3effbe288e"},{"image":"ghcr.io/github/gh-aw-mcpg:v0.2.17","digest":"sha256:a6dec6ec535a11c565d982afa2f98589805ed0598862b9ea9d3c751fc71afae8","pinned_image":"ghcr.io/github/gh-aw-mcpg:v0.2.17@sha256:a6dec6ec535a11c565d982afa2f98589805ed0598862b9ea9d3c751fc71afae8"},{"image":"ghcr.io/github/github-mcp-server:v0.32.0","digest":"sha256:2763823c63bcca718ce53850a1d7fcf2f501ec84028394f1b63ce7e9f4f9be28","pinned_image":"ghcr.io/github/github-mcp-server:v0.32.0@sha256:2763823c63bcca718ce53850a1d7fcf2f501ec84028394f1b63ce7e9f4f9be28"},{"image":"node:lts-alpine","digest":"sha256:01743339035a5c3c11a373cd7c83aeab6ed1457b55da6a69e014a95ac4e4700b","pinned_image":"node:lts-alpine@sha256:01743339035a5c3c11a373cd7c83aeab6ed1457b55da6a69e014a95ac4e4700b"}]}
# ___ _ _
# / _ \ | | (_)
@@ -179,16 +179,16 @@ jobs:
run: |
bash "${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh"
{
- cat << 'GH_AW_PROMPT_11a67f259f9ceffc_EOF'
+ cat << 'GH_AW_PROMPT_8d0c783b9a5ddc6a_EOF'
- GH_AW_PROMPT_11a67f259f9ceffc_EOF
+ GH_AW_PROMPT_8d0c783b9a5ddc6a_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/cache_memory_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/repo_memory_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_11a67f259f9ceffc_EOF'
+ cat << 'GH_AW_PROMPT_8d0c783b9a5ddc6a_EOF'
Tools: create_discussion, upload_asset, missing_tool, missing_data, noop
@@ -222,9 +222,9 @@ jobs:
{{/if}}
- GH_AW_PROMPT_11a67f259f9ceffc_EOF
+ GH_AW_PROMPT_8d0c783b9a5ddc6a_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
- cat << 'GH_AW_PROMPT_11a67f259f9ceffc_EOF'
+ cat << 'GH_AW_PROMPT_8d0c783b9a5ddc6a_EOF'
{{#runtime-import .github/workflows/shared/mcp/tavily.md}}
{{#runtime-import .github/workflows/shared/jqschema.md}}
@@ -233,7 +233,7 @@ jobs:
{{#runtime-import .github/workflows/shared/observability-otlp.md}}
{{#runtime-import .github/workflows/shared/python-dataviz.md}}
{{#runtime-import .github/workflows/daily-news.md}}
- GH_AW_PROMPT_11a67f259f9ceffc_EOF
+ GH_AW_PROMPT_8d0c783b9a5ddc6a_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9
@@ -417,13 +417,13 @@ jobs:
retention-days: 30
- name: Install gh CLI
run: |
- bash ${RUNNER_TEMP}/gh-aw/actions/install_gh_cli.sh
+ bash "${RUNNER_TEMP}/gh-aw/actions/install_gh_cli.sh"
- env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
id: check-cache
name: Setup working directories
- run: "set -e\n\n# Create directories\nmkdir -p /tmp/gh-aw/daily-news-data\nmkdir -p /tmp/gh-aw/repo-memory/default/daily-news-data\n\n# Check if cached data exists and is recent (< 24 hours old)\nCACHE_VALID=false\nCACHE_TIMESTAMP_FILE=\"/tmp/gh-aw/repo-memory/default/daily-news-data/.timestamp\"\n\nif [ -f \"$CACHE_TIMESTAMP_FILE\" ]; then\n CACHE_AGE=$(($(date +%s) - $(cat \"$CACHE_TIMESTAMP_FILE\")))\n # 24 hours = 86400 seconds\n if [ $CACHE_AGE -lt 86400 ]; then\n echo \"✅ Found valid cached data (age: ${CACHE_AGE}s, less than 24h)\"\n CACHE_VALID=true\n else\n echo \"⚠ Cached data is stale (age: ${CACHE_AGE}s, more than 24h)\"\n fi\nelse\n echo \"ℹ No cached data found, will fetch fresh data\"\nfi\n\n# Use cached data if valid\nif [ \"$CACHE_VALID\" = true ]; then\n echo \"📦 Using cached data from previous run\"\n cp -r /tmp/gh-aw/repo-memory/default/daily-news-data/* /tmp/gh-aw/daily-news-data/\n echo \"✅ Cached data restored to working directory\"\n echo \"cache_valid=true\" >> \"$GITHUB_OUTPUT\"\nelse\n echo \"🔄 Will fetch fresh data from GitHub API...\"\n echo \"cache_valid=false\" >> \"$GITHUB_OUTPUT\"\n \n # Calculate date range (last 30 days)\n END_DATE=$(date -u +%Y-%m-%d)\n START_DATE=$(date -u -d '30 days ago' +%Y-%m-%d 2>/dev/null || date -u -v-30d +%Y-%m-%d)\n echo \"Fetching data from $START_DATE to $END_DATE\"\nfi\n"
+ run: "set -e\n\n# Create directories\nmkdir -p /tmp/gh-aw/daily-news-data\nmkdir -p /tmp/gh-aw/repo-memory/default/daily-news-data\n\n# Check if cached data exists and is recent (< 24 hours old)\nCACHE_VALID=false\nCACHE_TIMESTAMP_FILE=\"/tmp/gh-aw/repo-memory/default/daily-news-data/.timestamp\"\n\nif [ -f \"$CACHE_TIMESTAMP_FILE\" ]; then\n CACHE_AGE=$(($(date +%s) - $(cat \"$CACHE_TIMESTAMP_FILE\")))\n # 24 hours = 86400 seconds\n if [ \"$CACHE_AGE\" -lt 86400 ]; then\n echo \"✅ Found valid cached data (age: ${CACHE_AGE}s, less than 24h)\"\n CACHE_VALID=true\n else\n echo \"⚠ Cached data is stale (age: ${CACHE_AGE}s, more than 24h)\"\n fi\nelse\n echo \"ℹ No cached data found, will fetch fresh data\"\nfi\n\n# Use cached data if valid\nif [ \"$CACHE_VALID\" = true ]; then\n echo \"📦 Using cached data from previous run\"\n cp -r /tmp/gh-aw/repo-memory/default/daily-news-data/* /tmp/gh-aw/daily-news-data/\n echo \"✅ Cached data restored to working directory\"\n echo \"cache_valid=true\" >> \"$GITHUB_OUTPUT\"\nelse\n echo \"🔄 Will fetch fresh data from GitHub API...\"\n echo \"cache_valid=false\" >> \"$GITHUB_OUTPUT\"\n \n # Calculate date range (last 30 days)\n END_DATE=$(date -u +%Y-%m-%d)\n START_DATE=$(date -u -d '30 days ago' +%Y-%m-%d 2>/dev/null || date -u -v-30d +%Y-%m-%d)\n echo \"Fetching data from $START_DATE to $END_DATE\"\nfi\n"
- env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@@ -560,9 +560,9 @@ jobs:
mkdir -p "${RUNNER_TEMP}/gh-aw/safeoutputs"
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > "${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" << 'GH_AW_SAFE_OUTPUTS_CONFIG_2c5b2a75c38cc1f4_EOF'
+ cat > "${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" << 'GH_AW_SAFE_OUTPUTS_CONFIG_34e2cc8328156bbf_EOF'
{"create_discussion":{"category":"daily-news","close_older_discussions":true,"expires":72,"fallback_to_issue":true,"max":1},"create_report_incomplete_issue":{},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"},"push_repo_memory":{"memories":[{"dir":"/tmp/gh-aw/repo-memory/default","id":"default","max_file_count":100,"max_file_size":102400,"max_patch_size":10240}]},"report_incomplete":{},"upload_asset":{"allowed-exts":[".png",".jpg",".jpeg"],"branch":"assets/${{ github.workflow }}","max-size":10240}}
- GH_AW_SAFE_OUTPUTS_CONFIG_2c5b2a75c38cc1f4_EOF
+ GH_AW_SAFE_OUTPUTS_CONFIG_34e2cc8328156bbf_EOF
- name: Write Safe Outputs Tools
env:
GH_AW_TOOLS_META_JSON: |
@@ -764,7 +764,7 @@ jobs:
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e GITHUB_AW_OTEL_TRACE_ID -e GITHUB_AW_OTEL_PARENT_SPAN_ID -e TAVILY_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.17'
mkdir -p /home/runner/.copilot
- cat << GH_AW_MCP_CONFIG_46d8067c05e9e47a_EOF | bash "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh"
+ cat << GH_AW_MCP_CONFIG_5e1949198bfaa591_EOF | bash "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh"
{
"mcpServers": {
"github": {
@@ -831,7 +831,7 @@ jobs:
}
}
}
- GH_AW_MCP_CONFIG_46d8067c05e9e47a_EOF
+ GH_AW_MCP_CONFIG_5e1949198bfaa591_EOF
- name: Download activation artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
diff --git a/.github/workflows/daily-news.md b/.github/workflows/daily-news.md
index d747ae8129b..455d9bda4df 100644
--- a/.github/workflows/daily-news.md
+++ b/.github/workflows/daily-news.md
@@ -46,7 +46,7 @@ tools:
steps:
- name: Install gh CLI
run: |
- bash ${RUNNER_TEMP}/gh-aw/actions/install_gh_cli.sh
+ bash "${RUNNER_TEMP}/gh-aw/actions/install_gh_cli.sh"
- name: Setup working directories
id: check-cache
@@ -67,7 +67,7 @@ steps:
if [ -f "$CACHE_TIMESTAMP_FILE" ]; then
CACHE_AGE=$(($(date +%s) - $(cat "$CACHE_TIMESTAMP_FILE")))
# 24 hours = 86400 seconds
- if [ $CACHE_AGE -lt 86400 ]; then
+ if [ "$CACHE_AGE" -lt 86400 ]; then
echo "✅ Found valid cached data (age: ${CACHE_AGE}s, less than 24h)"
CACHE_VALID=true
else
diff --git a/.github/workflows/deep-report.lock.yml b/.github/workflows/deep-report.lock.yml
index b41ff336218..50d4bc2b3b5 100644
--- a/.github/workflows/deep-report.lock.yml
+++ b/.github/workflows/deep-report.lock.yml
@@ -1,4 +1,4 @@
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"3cf8399ff3ee174857a25afc7cab60e0f4930f28d8b1cb6918a90196c1a9e729","strict":true,"agent_id":"claude"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"c742a309cbe5043cac0e6bb3fc3972db603cc22a238492a548c21e9c1af308e4","strict":true,"agent_id":"claude"}
# gh-aw-manifest: {"version":1,"secrets":["ANTHROPIC_API_KEY","GH_AW_GITHUB_MCP_SERVER_TOKEN","GH_AW_GITHUB_TOKEN","GITHUB_TOKEN"],"actions":[{"repo":"actions/cache/restore","sha":"668228422ae6a00e4ad889ee87cd7109ec5666a7","version":"v5.0.4"},{"repo":"actions/cache/save","sha":"668228422ae6a00e4ad889ee87cd7109ec5666a7","version":"v5.0.4"},{"repo":"actions/checkout","sha":"de0fac2e4500dabe0009e67214ff5f5447ce83dd","version":"v6.0.2"},{"repo":"actions/download-artifact","sha":"3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c","version":"v8.0.1"},{"repo":"actions/github-script","sha":"373c709c69115d41ff229c7e5df9f8788daa9553","version":"v9"},{"repo":"actions/setup-go","sha":"4a3601121dd01d1626a1e23e37211e3254c1c06c","version":"v6.4.0"},{"repo":"actions/setup-node","sha":"53b83947a5a98c8d113130e565377fae1a50d02f","version":"v6.3.0"},{"repo":"actions/upload-artifact","sha":"043fb46d1a93c77aae656e7c1c64a875d1fc6a0a","version":"v7.0.1"},{"repo":"docker/build-push-action","sha":"bcafcacb16a39f128d818304e6c9c0c18556b85f","version":"v7.1.0"},{"repo":"docker/setup-buildx-action","sha":"4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd","version":"v4"}],"containers":[{"image":"ghcr.io/github/gh-aw-firewall/agent:0.25.18","digest":"sha256:c77e8c26bab6c39e8568d8e2f8c17015944849a8cbcdfb4bd9725d8893725ca2","pinned_image":"ghcr.io/github/gh-aw-firewall/agent:0.25.18@sha256:c77e8c26bab6c39e8568d8e2f8c17015944849a8cbcdfb4bd9725d8893725ca2"},{"image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.18","digest":"sha256:d16a40a3ca6e989896d0cef9f31b9412bb1fcc8755bafcafb95012ae1078539b","pinned_image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.18@sha256:d16a40a3ca6e989896d0cef9f31b9412bb1fcc8755bafcafb95012ae1078539b"},{"image":"ghcr.io/github/gh-aw-firewall/squid:0.25.18","digest":"sha256:eb102afcfbae26ffcec016adebb74d3be7b0a5bf376ba306599cdf3effbe288e","pinned_image":"ghcr.io/github/gh-aw-firewall/squid:0.25.18@sha256:eb102afcfbae26ffcec016adebb74d3be7b0a5bf376ba306599cdf3effbe288e"},{"image":"ghcr.io/github/gh-aw-mcpg:v0.2.17","digest":"sha256:a6dec6ec535a11c565d982afa2f98589805ed0598862b9ea9d3c751fc71afae8","pinned_image":"ghcr.io/github/gh-aw-mcpg:v0.2.17@sha256:a6dec6ec535a11c565d982afa2f98589805ed0598862b9ea9d3c751fc71afae8"},{"image":"ghcr.io/github/github-mcp-server:v0.32.0","digest":"sha256:2763823c63bcca718ce53850a1d7fcf2f501ec84028394f1b63ce7e9f4f9be28","pinned_image":"ghcr.io/github/github-mcp-server:v0.32.0@sha256:2763823c63bcca718ce53850a1d7fcf2f501ec84028394f1b63ce7e9f4f9be28"},{"image":"node:lts-alpine","digest":"sha256:01743339035a5c3c11a373cd7c83aeab6ed1457b55da6a69e014a95ac4e4700b","pinned_image":"node:lts-alpine@sha256:01743339035a5c3c11a373cd7c83aeab6ed1457b55da6a69e014a95ac4e4700b"}]}
# ___ _ _
# / _ \ | | (_)
@@ -174,9 +174,9 @@ jobs:
run: |
bash "${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh"
{
- cat << 'GH_AW_PROMPT_76205308334d26c1_EOF'
+ cat << 'GH_AW_PROMPT_55c24562e4f21dc1_EOF'
- GH_AW_PROMPT_76205308334d26c1_EOF
+ GH_AW_PROMPT_55c24562e4f21dc1_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
@@ -184,7 +184,7 @@ jobs:
cat "${RUNNER_TEMP}/gh-aw/prompts/cache_memory_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/repo_memory_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_76205308334d26c1_EOF'
+ cat << 'GH_AW_PROMPT_55c24562e4f21dc1_EOF'
Tools: create_issue(max:3), create_discussion, missing_tool, missing_data, noop
@@ -216,16 +216,16 @@ jobs:
{{/if}}
- GH_AW_PROMPT_76205308334d26c1_EOF
+ GH_AW_PROMPT_55c24562e4f21dc1_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
- cat << 'GH_AW_PROMPT_76205308334d26c1_EOF'
+ cat << 'GH_AW_PROMPT_55c24562e4f21dc1_EOF'
{{#runtime-import .github/workflows/shared/jqschema.md}}
{{#runtime-import .github/workflows/shared/discussions-data-fetch.md}}
{{#runtime-import .github/workflows/shared/weekly-issues-data-fetch.md}}
{{#runtime-import .github/workflows/shared/reporting.md}}
{{#runtime-import .github/workflows/deep-report.md}}
- GH_AW_PROMPT_76205308334d26c1_EOF
+ GH_AW_PROMPT_55c24562e4f21dc1_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9
@@ -404,7 +404,7 @@ jobs:
run: "mkdir -p /tmp/gh-aw\ncat > /tmp/gh-aw/jqschema.sh << 'EOF'\n#!/usr/bin/env bash\n# jqschema.sh\njq -c '\ndef walk(f):\n . as $in |\n if type == \"object\" then\n reduce keys[] as $k ({}; . + {($k): ($in[$k] | walk(f))})\n elif type == \"array\" then\n if length == 0 then [] else [.[0] | walk(f)] end\n else\n type\n end;\nwalk(.)\n'\nEOF\nchmod +x /tmp/gh-aw/jqschema.sh"
- name: Install gh CLI
run: |
- bash ${RUNNER_TEMP}/gh-aw/actions/install_gh_cli.sh
+ bash "${RUNNER_TEMP}/gh-aw/actions/install_gh_cli.sh"
- env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@@ -414,12 +414,12 @@ jobs:
run: "# Create output directories\nmkdir -p /tmp/gh-aw/discussions-data\nmkdir -p /tmp/gh-aw/cache-memory\n\n# Get today's date for cache identification\nTODAY=$(date '+%Y-%m-%d')\nCACHE_DIR=\"/tmp/gh-aw/cache-memory\"\n\n# Check if cached data exists from today\nif [ -f \"$CACHE_DIR/discussions-${TODAY}.json\" ] && [ -s \"$CACHE_DIR/discussions-${TODAY}.json\" ]; then\n echo \"✓ Found cached discussions data from ${TODAY}\"\n cp \"$CACHE_DIR/discussions-${TODAY}.json\" /tmp/gh-aw/discussions-data/discussions.json\n \n # Regenerate schema if missing\n if [ ! -f \"$CACHE_DIR/discussions-${TODAY}-schema.json\" ]; then\n /tmp/gh-aw/jqschema.sh < /tmp/gh-aw/discussions-data/discussions.json > \"$CACHE_DIR/discussions-${TODAY}-schema.json\"\n fi\n cp \"$CACHE_DIR/discussions-${TODAY}-schema.json\" /tmp/gh-aw/discussions-data/discussions-schema.json\n \n echo \"Using cached data from ${TODAY}\"\n echo \"Total discussions in cache: $(jq 'length' /tmp/gh-aw/discussions-data/discussions.json)\"\nelse\n echo \"⬇ Downloading fresh discussions data...\"\n \n # Fetch OPEN discussions using GraphQL with pagination (up to GH_AW_DISCUSSIONS_COUNT, default 100)\n DISCUSSIONS_FILE=\"/tmp/gh-aw/discussions-data/discussions.json\"\n echo '[]' > \"$DISCUSSIONS_FILE\"\n \n CURSOR=\"\"\n HAS_NEXT_PAGE=true\n PAGE_COUNT=0\n \n while [ \"$HAS_NEXT_PAGE\" = \"true\" ]; do\n if [ -z \"$CURSOR\" ]; then\n CURSOR_ARG=\"\"\n else\n CURSOR_ARG=\", after: \\\"$CURSOR\\\"\"\n fi\n \n RESULT=$(gh api graphql -f query=\"\n query {\n repository(owner: \\\"$REPO_OWNER\\\", name: \\\"$REPO_NAME\\\") {\n discussions(first: 100, states: [OPEN]${CURSOR_ARG}) {\n pageInfo {\n hasNextPage\n endCursor\n }\n nodes {\n number\n title\n body\n createdAt\n updatedAt\n url\n category {\n name\n slug\n }\n author {\n login\n }\n labels(first: 10) {\n nodes {\n name\n }\n }\n }\n }\n }\n }\n \")\n \n # Extract discussions and normalize structure\n echo \"$RESULT\" | jq -r '\n .data.repository.discussions.nodes \n | map({\n number, \n title,\n body,\n createdAt, \n updatedAt,\n url,\n category: .category.name,\n categorySlug: .category.slug,\n author: (if .author then .author.login else \"unknown\" end),\n labels: [.labels.nodes[].name],\n isAgenticWorkflow: (if .body then (.body | test(\"^> AI generated by\"; \"m\")) else false end)\n })\n ' | jq -s 'add' > /tmp/gh-aw/temp_discussions.json\n \n # Merge with existing discussions\n jq -s 'add | unique_by(.number)' \"$DISCUSSIONS_FILE\" /tmp/gh-aw/temp_discussions.json > /tmp/gh-aw/merged.json\n mv /tmp/gh-aw/merged.json \"$DISCUSSIONS_FILE\"\n rm -f /tmp/gh-aw/temp_discussions.json\n \n # Check if there are more pages\n HAS_NEXT_PAGE=$(echo \"$RESULT\" | jq -r '.data.repository.discussions.pageInfo.hasNextPage')\n CURSOR=$(echo \"$RESULT\" | jq -r '.data.repository.discussions.pageInfo.endCursor')\n \n # Check if we've reached the requested count\n CURRENT_COUNT=$(jq 'length' \"$DISCUSSIONS_FILE\")\n MAX_COUNT=\"${GH_AW_DISCUSSIONS_COUNT:-100}\"\n if [ \"$CURRENT_COUNT\" -ge \"$MAX_COUNT\" ]; then\n echo \"Reached requested discussion count ($MAX_COUNT)\"\n # Trim to exact count if we have more\n jq --argjson max \"$MAX_COUNT\" '.[:$max]' \"$DISCUSSIONS_FILE\" > /tmp/gh-aw/trimmed.json\n mv /tmp/gh-aw/trimmed.json \"$DISCUSSIONS_FILE\"\n break\n fi\n \n # Safety check - break after 10 pages (1000 discussions max regardless of count)\n PAGE_COUNT=$((PAGE_COUNT + 1))\n if [ $PAGE_COUNT -ge 10 ]; then\n echo \"Reached pagination limit (10 pages)\"\n break\n fi\n done\n \n # Generate schema for reference\n /tmp/gh-aw/jqschema.sh < /tmp/gh-aw/discussions-data/discussions.json > /tmp/gh-aw/discussions-data/discussions-schema.json\n\n # Store in cache with today's date\n cp /tmp/gh-aw/discussions-data/discussions.json \"$CACHE_DIR/discussions-${TODAY}.json\"\n cp /tmp/gh-aw/discussions-data/discussions-schema.json \"$CACHE_DIR/discussions-${TODAY}-schema.json\"\n\n echo \"✓ Discussions data saved to cache: discussions-${TODAY}.json\"\n echo \"Total discussions found: $(jq 'length' /tmp/gh-aw/discussions-data/discussions.json)\"\nfi\n\n# Always ensure data is available at expected locations for backward compatibility\necho \"Discussions data available at: /tmp/gh-aw/discussions-data/discussions.json\"\necho \"Schema available at: /tmp/gh-aw/discussions-data/discussions-schema.json\""
- name: Install gh CLI
run: |
- bash ${RUNNER_TEMP}/gh-aw/actions/install_gh_cli.sh
+ bash "${RUNNER_TEMP}/gh-aw/actions/install_gh_cli.sh"
- env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
name: Fetch weekly issues
- run: "# Create output directories\nmkdir -p /tmp/gh-aw/weekly-issues-data\nmkdir -p /tmp/gh-aw/cache-memory\n\n# Get today's date for cache identification\nTODAY=$(date '+%Y-%m-%d')\nCACHE_DIR=\"/tmp/gh-aw/cache-memory\"\n\n# Check if cached data exists from today\nif [ -f \"$CACHE_DIR/weekly-issues-${TODAY}.json\" ] && [ -s \"$CACHE_DIR/weekly-issues-${TODAY}.json\" ]; then\n echo \"✓ Found cached weekly issues data from ${TODAY}\"\n cp \"$CACHE_DIR/weekly-issues-${TODAY}.json\" /tmp/gh-aw/weekly-issues-data/issues.json\n \n # Regenerate schema if missing\n if [ ! -f \"$CACHE_DIR/weekly-issues-${TODAY}-schema.json\" ]; then\n /tmp/gh-aw/jqschema.sh < /tmp/gh-aw/weekly-issues-data/issues.json > \"$CACHE_DIR/weekly-issues-${TODAY}-schema.json\"\n fi\n cp \"$CACHE_DIR/weekly-issues-${TODAY}-schema.json\" /tmp/gh-aw/weekly-issues-data/issues-schema.json\n \n echo \"Using cached data from ${TODAY}\"\n echo \"Total issues in cache: $(jq 'length' /tmp/gh-aw/weekly-issues-data/issues.json)\"\nelse\n echo \"⬇ Downloading fresh weekly issues data...\"\n \n # Calculate date 7 days ago (cross-platform: GNU date first, BSD fallback)\n DATE_7_DAYS_AGO=$(date -d '7 days ago' '+%Y-%m-%d' 2>/dev/null || date -v-7d '+%Y-%m-%d')\n \n echo \"Fetching issues created or updated since ${DATE_7_DAYS_AGO}...\"\n \n # Fetch issues from the last 7 days using gh CLI\n # Using --search with updated filter to get recent activity\n gh issue list --repo $GITHUB_REPOSITORY \\\n --search \"updated:>=${DATE_7_DAYS_AGO}\" \\\n --state all \\\n --json number,title,author,createdAt,state,url,body,labels,updatedAt,closedAt,milestone,assignees,comments \\\n --limit 500 \\\n > /tmp/gh-aw/weekly-issues-data/issues.json\n\n # Generate schema for reference\n /tmp/gh-aw/jqschema.sh < /tmp/gh-aw/weekly-issues-data/issues.json > /tmp/gh-aw/weekly-issues-data/issues-schema.json\n\n # Store in cache with today's date\n cp /tmp/gh-aw/weekly-issues-data/issues.json \"$CACHE_DIR/weekly-issues-${TODAY}.json\"\n cp /tmp/gh-aw/weekly-issues-data/issues-schema.json \"$CACHE_DIR/weekly-issues-${TODAY}-schema.json\"\n\n echo \"✓ Weekly issues data saved to cache: weekly-issues-${TODAY}.json\"\n echo \"Total issues found: $(jq 'length' /tmp/gh-aw/weekly-issues-data/issues.json)\"\nfi\n\n# Always ensure data is available at expected locations for backward compatibility\necho \"Weekly issues data available at: /tmp/gh-aw/weekly-issues-data/issues.json\"\necho \"Schema available at: /tmp/gh-aw/weekly-issues-data/issues-schema.json\""
+ run: "# Create output directories\nmkdir -p /tmp/gh-aw/weekly-issues-data\nmkdir -p /tmp/gh-aw/cache-memory\n\n# Get today's date for cache identification\nTODAY=$(date '+%Y-%m-%d')\nCACHE_DIR=\"/tmp/gh-aw/cache-memory\"\n\n# Check if cached data exists from today\nif [ -f \"$CACHE_DIR/weekly-issues-${TODAY}.json\" ] && [ -s \"$CACHE_DIR/weekly-issues-${TODAY}.json\" ]; then\n echo \"✓ Found cached weekly issues data from ${TODAY}\"\n cp \"$CACHE_DIR/weekly-issues-${TODAY}.json\" /tmp/gh-aw/weekly-issues-data/issues.json\n \n # Regenerate schema if missing\n if [ ! -f \"$CACHE_DIR/weekly-issues-${TODAY}-schema.json\" ]; then\n /tmp/gh-aw/jqschema.sh < /tmp/gh-aw/weekly-issues-data/issues.json > \"$CACHE_DIR/weekly-issues-${TODAY}-schema.json\"\n fi\n cp \"$CACHE_DIR/weekly-issues-${TODAY}-schema.json\" /tmp/gh-aw/weekly-issues-data/issues-schema.json\n \n echo \"Using cached data from ${TODAY}\"\n echo \"Total issues in cache: $(jq 'length' /tmp/gh-aw/weekly-issues-data/issues.json)\"\nelse\n echo \"⬇ Downloading fresh weekly issues data...\"\n \n # Calculate date 7 days ago (cross-platform: GNU date first, BSD fallback)\n DATE_7_DAYS_AGO=$(date -d '7 days ago' '+%Y-%m-%d' 2>/dev/null || date -v-7d '+%Y-%m-%d')\n \n echo \"Fetching issues created or updated since ${DATE_7_DAYS_AGO}...\"\n \n # Fetch issues from the last 7 days using gh CLI\n # Using --search with updated filter to get recent activity\n gh issue list --repo \"$GITHUB_REPOSITORY\" \\\n --search \"updated:>=${DATE_7_DAYS_AGO}\" \\\n --state all \\\n --json number,title,author,createdAt,state,url,body,labels,updatedAt,closedAt,milestone,assignees,comments \\\n --limit 500 \\\n > /tmp/gh-aw/weekly-issues-data/issues.json\n\n # Generate schema for reference\n /tmp/gh-aw/jqschema.sh < /tmp/gh-aw/weekly-issues-data/issues.json > /tmp/gh-aw/weekly-issues-data/issues-schema.json\n\n # Store in cache with today's date\n cp /tmp/gh-aw/weekly-issues-data/issues.json \"$CACHE_DIR/weekly-issues-${TODAY}.json\"\n cp /tmp/gh-aw/weekly-issues-data/issues-schema.json \"$CACHE_DIR/weekly-issues-${TODAY}-schema.json\"\n\n echo \"✓ Weekly issues data saved to cache: weekly-issues-${TODAY}.json\"\n echo \"Total issues found: $(jq 'length' /tmp/gh-aw/weekly-issues-data/issues.json)\"\nfi\n\n# Always ensure data is available at expected locations for backward compatibility\necho \"Weekly issues data available at: /tmp/gh-aw/weekly-issues-data/issues.json\"\necho \"Schema available at: /tmp/gh-aw/weekly-issues-data/issues-schema.json\""
# Cache memory file share configuration from frontmatter processed below
- name: Create cache-memory directory
@@ -524,9 +524,9 @@ jobs:
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
mkdir -p "${RUNNER_TEMP}/gh-aw/safeoutputs/upload-artifacts"
- cat > "${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" << 'GH_AW_SAFE_OUTPUTS_CONFIG_3b5d856abae8bdd2_EOF'
+ cat > "${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" << 'GH_AW_SAFE_OUTPUTS_CONFIG_fbcf8c8ab321e2b7_EOF'
{"create_discussion":{"category":"reports","close_older_discussions":true,"expires":168,"fallback_to_issue":true,"max":1},"create_issue":{"expires":48,"group":true,"labels":["automation","improvement","quick-win","cookie"],"max":3,"title_prefix":"[deep-report] "},"create_report_incomplete_issue":{},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"},"push_repo_memory":{"memories":[{"dir":"/tmp/gh-aw/repo-memory/default","id":"default","max_file_count":100,"max_file_size":1048576,"max_patch_size":10240}]},"report_incomplete":{},"upload_artifact":{"max-size-bytes":104857600,"max-uploads":1,"retention-days":30}}
- GH_AW_SAFE_OUTPUTS_CONFIG_3b5d856abae8bdd2_EOF
+ GH_AW_SAFE_OUTPUTS_CONFIG_fbcf8c8ab321e2b7_EOF
- name: Write Safe Outputs Tools
env:
GH_AW_TOOLS_META_JSON: |
@@ -748,7 +748,7 @@ jobs:
export GH_AW_ENGINE="claude"
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.17'
- cat << GH_AW_MCP_CONFIG_2b258a252d313d30_EOF | bash "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh"
+ cat << GH_AW_MCP_CONFIG_6156dc2f04b7f147_EOF | bash "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh"
{
"mcpServers": {
"agenticworkflows": {
@@ -806,7 +806,7 @@ jobs:
"payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
}
}
- GH_AW_MCP_CONFIG_2b258a252d313d30_EOF
+ GH_AW_MCP_CONFIG_6156dc2f04b7f147_EOF
- name: Download activation artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
diff --git a/.github/workflows/issue-arborist.lock.yml b/.github/workflows/issue-arborist.lock.yml
index ebc18045347..32b7472c5f4 100644
--- a/.github/workflows/issue-arborist.lock.yml
+++ b/.github/workflows/issue-arborist.lock.yml
@@ -1,4 +1,4 @@
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"1fe8bce9b06b23b94aa1b0a877324f8683331c20498cf6f237790bc339b21b34","strict":true,"agent_id":"codex"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"4137e974fe3e7c7adede90013b6e134fa420efad63c018810b67672dafdbb1a8","strict":true,"agent_id":"codex"}
# gh-aw-manifest: {"version":1,"secrets":["CODEX_API_KEY","GH_AW_GITHUB_MCP_SERVER_TOKEN","GH_AW_GITHUB_TOKEN","GITHUB_TOKEN","OPENAI_API_KEY"],"actions":[{"repo":"actions/checkout","sha":"de0fac2e4500dabe0009e67214ff5f5447ce83dd","version":"v6.0.2"},{"repo":"actions/download-artifact","sha":"3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c","version":"v8.0.1"},{"repo":"actions/github-script","sha":"373c709c69115d41ff229c7e5df9f8788daa9553","version":"v9"},{"repo":"actions/setup-node","sha":"53b83947a5a98c8d113130e565377fae1a50d02f","version":"v6.3.0"},{"repo":"actions/upload-artifact","sha":"043fb46d1a93c77aae656e7c1c64a875d1fc6a0a","version":"v7.0.1"}],"containers":[{"image":"ghcr.io/github/gh-aw-firewall/agent:0.25.18","digest":"sha256:c77e8c26bab6c39e8568d8e2f8c17015944849a8cbcdfb4bd9725d8893725ca2","pinned_image":"ghcr.io/github/gh-aw-firewall/agent:0.25.18@sha256:c77e8c26bab6c39e8568d8e2f8c17015944849a8cbcdfb4bd9725d8893725ca2"},{"image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.18","digest":"sha256:d16a40a3ca6e989896d0cef9f31b9412bb1fcc8755bafcafb95012ae1078539b","pinned_image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.18@sha256:d16a40a3ca6e989896d0cef9f31b9412bb1fcc8755bafcafb95012ae1078539b"},{"image":"ghcr.io/github/gh-aw-firewall/squid:0.25.18","digest":"sha256:eb102afcfbae26ffcec016adebb74d3be7b0a5bf376ba306599cdf3effbe288e","pinned_image":"ghcr.io/github/gh-aw-firewall/squid:0.25.18@sha256:eb102afcfbae26ffcec016adebb74d3be7b0a5bf376ba306599cdf3effbe288e"},{"image":"ghcr.io/github/gh-aw-mcpg:v0.2.17","digest":"sha256:a6dec6ec535a11c565d982afa2f98589805ed0598862b9ea9d3c751fc71afae8","pinned_image":"ghcr.io/github/gh-aw-mcpg:v0.2.17@sha256:a6dec6ec535a11c565d982afa2f98589805ed0598862b9ea9d3c751fc71afae8"},{"image":"ghcr.io/github/github-mcp-server:v0.32.0","digest":"sha256:2763823c63bcca718ce53850a1d7fcf2f501ec84028394f1b63ce7e9f4f9be28","pinned_image":"ghcr.io/github/github-mcp-server:v0.32.0@sha256:2763823c63bcca718ce53850a1d7fcf2f501ec84028394f1b63ce7e9f4f9be28"},{"image":"node:lts-alpine","digest":"sha256:01743339035a5c3c11a373cd7c83aeab6ed1457b55da6a69e014a95ac4e4700b","pinned_image":"node:lts-alpine@sha256:01743339035a5c3c11a373cd7c83aeab6ed1457b55da6a69e014a95ac4e4700b"}]}
# ___ _ _
# / _ \ | | (_)
@@ -169,14 +169,14 @@ jobs:
run: |
bash "${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh"
{
- cat << 'GH_AW_PROMPT_1ad9e587ddd98d0f_EOF'
+ cat << 'GH_AW_PROMPT_4421671e3a7dd6fc_EOF'
- GH_AW_PROMPT_1ad9e587ddd98d0f_EOF
+ GH_AW_PROMPT_4421671e3a7dd6fc_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_1ad9e587ddd98d0f_EOF'
+ cat << 'GH_AW_PROMPT_4421671e3a7dd6fc_EOF'
Tools: create_issue(max:5), create_discussion, link_sub_issue(max:50), missing_tool, missing_data, noop
@@ -208,15 +208,15 @@ jobs:
{{/if}}
- GH_AW_PROMPT_1ad9e587ddd98d0f_EOF
+ GH_AW_PROMPT_4421671e3a7dd6fc_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
- cat << 'GH_AW_PROMPT_1ad9e587ddd98d0f_EOF'
+ cat << 'GH_AW_PROMPT_4421671e3a7dd6fc_EOF'
{{#runtime-import .github/workflows/shared/github-guard-policy.md}}
{{#runtime-import .github/workflows/shared/jqschema.md}}
{{#runtime-import .github/workflows/shared/reporting.md}}
{{#runtime-import .github/workflows/issue-arborist.md}}
- GH_AW_PROMPT_1ad9e587ddd98d0f_EOF
+ GH_AW_PROMPT_4421671e3a7dd6fc_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9
@@ -356,7 +356,7 @@ jobs:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
name: Fetch issues
- run: "# Create output directory\nmkdir -p /tmp/gh-aw/issues-data\n\necho \"⬇ Downloading the last 100 open issues (excluding sub-issues)...\"\n\n# Fetch the last 100 open issues that don't have a parent issue\n# Using search filter to exclude issues that are already sub-issues\ngh issue list --repo $GITHUB_REPOSITORY \\\n --search \"-parent-issue:*\" \\\n --state open \\\n --json number,title,author,createdAt,state,url,body,labels,updatedAt,closedAt,milestone,assignees \\\n --limit 100 \\\n > /tmp/gh-aw/issues-data/issues.json\n\n# Generate schema for reference using jqschema\n/tmp/gh-aw/jqschema.sh < /tmp/gh-aw/issues-data/issues.json > /tmp/gh-aw/issues-data/issues-schema.json\n\necho \"✓ Issues data saved to /tmp/gh-aw/issues-data/issues.json\"\necho \"✓ Schema saved to /tmp/gh-aw/issues-data/issues-schema.json\"\necho \"Total issues fetched: $(jq 'length' /tmp/gh-aw/issues-data/issues.json)\"\necho \"\"\necho \"Schema of the issues data:\"\ncat /tmp/gh-aw/issues-data/issues-schema.json | jq .\n"
+ run: "# Create output directory\nmkdir -p /tmp/gh-aw/issues-data\n\necho \"⬇ Downloading the last 100 open issues (excluding sub-issues)...\"\n\n# Fetch the last 100 open issues that don't have a parent issue\n# Using search filter to exclude issues that are already sub-issues\ngh issue list --repo \"$GITHUB_REPOSITORY\" \\\n --search \"-parent-issue:*\" \\\n --state open \\\n --json number,title,author,createdAt,state,url,body,labels,updatedAt,closedAt,milestone,assignees \\\n --limit 100 \\\n > /tmp/gh-aw/issues-data/issues.json\n\n# Generate schema for reference using jqschema\n/tmp/gh-aw/jqschema.sh < /tmp/gh-aw/issues-data/issues.json > /tmp/gh-aw/issues-data/issues-schema.json\n\necho \"✓ Issues data saved to /tmp/gh-aw/issues-data/issues.json\"\necho \"✓ Schema saved to /tmp/gh-aw/issues-data/issues-schema.json\"\necho \"Total issues fetched: $(jq 'length' /tmp/gh-aw/issues-data/issues.json)\"\necho \"\"\necho \"Schema of the issues data:\"\ncat /tmp/gh-aw/issues-data/issues-schema.json | jq .\n"
- name: Configure Git credentials
env:
@@ -413,9 +413,9 @@ jobs:
mkdir -p "${RUNNER_TEMP}/gh-aw/safeoutputs"
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > "${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" << 'GH_AW_SAFE_OUTPUTS_CONFIG_696bfa003354f3e5_EOF'
+ cat > "${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" << 'GH_AW_SAFE_OUTPUTS_CONFIG_707d44a2cde293e0_EOF'
{"create_discussion":{"category":"audits","close_older_discussions":true,"expires":24,"fallback_to_issue":true,"max":1,"title_prefix":"[Issue Arborist] "},"create_issue":{"expires":48,"group":true,"max":5,"title_prefix":"[Parent] "},"create_report_incomplete_issue":{},"link_sub_issue":{"max":50},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"},"report_incomplete":{}}
- GH_AW_SAFE_OUTPUTS_CONFIG_696bfa003354f3e5_EOF
+ GH_AW_SAFE_OUTPUTS_CONFIG_707d44a2cde293e0_EOF
- name: Write Safe Outputs Tools
env:
GH_AW_TOOLS_META_JSON: |
@@ -653,7 +653,7 @@ jobs:
export GH_AW_ENGINE="codex"
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.17'
- cat > /tmp/gh-aw/mcp-config/config.toml << GH_AW_MCP_CONFIG_51804360a4ff664d_EOF
+ cat > /tmp/gh-aw/mcp-config/config.toml << GH_AW_MCP_CONFIG_7ff4c5b914999b5c_EOF
[history]
persistence = "none"
@@ -680,10 +680,10 @@ jobs:
[mcp_servers.safeoutputs."guard-policies".write-sink]
accept = ["*"]
- GH_AW_MCP_CONFIG_51804360a4ff664d_EOF
+ GH_AW_MCP_CONFIG_7ff4c5b914999b5c_EOF
# Generate JSON config for MCP gateway
- cat << GH_AW_MCP_CONFIG_51804360a4ff664d_EOF | bash "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh"
+ cat << GH_AW_MCP_CONFIG_7ff4c5b914999b5c_EOF | bash "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh"
{
"mcpServers": {
"github": {
@@ -726,7 +726,7 @@ jobs:
"payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
}
}
- GH_AW_MCP_CONFIG_51804360a4ff664d_EOF
+ GH_AW_MCP_CONFIG_7ff4c5b914999b5c_EOF
- name: Download activation artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
diff --git a/.github/workflows/issue-arborist.md b/.github/workflows/issue-arborist.md
index 628fdc50703..11e314ac178 100644
--- a/.github/workflows/issue-arborist.md
+++ b/.github/workflows/issue-arborist.md
@@ -39,7 +39,7 @@ steps:
# Fetch the last 100 open issues that don't have a parent issue
# Using search filter to exclude issues that are already sub-issues
- gh issue list --repo $GITHUB_REPOSITORY \
+ gh issue list --repo "$GITHUB_REPOSITORY" \
--search "-parent-issue:*" \
--state open \
--json number,title,author,createdAt,state,url,body,labels,updatedAt,closedAt,milestone,assignees \
diff --git a/.github/workflows/prompt-clustering-analysis.lock.yml b/.github/workflows/prompt-clustering-analysis.lock.yml
index 1c965ac6585..f4fec47e8fe 100644
--- a/.github/workflows/prompt-clustering-analysis.lock.yml
+++ b/.github/workflows/prompt-clustering-analysis.lock.yml
@@ -1,4 +1,4 @@
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"10fc7e39a1d898ba61aef0add60638c64e72bc1c765c3d38c30a04ae9d27e893","strict":true,"agent_id":"claude"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"2a6eeae07a443523fb107a3836d3caa77fee6c3580620d3e9b9aa4f7ba940d36","strict":true,"agent_id":"claude"}
# gh-aw-manifest: {"version":1,"secrets":["ANTHROPIC_API_KEY","GH_AW_GITHUB_MCP_SERVER_TOKEN","GH_AW_GITHUB_TOKEN","GITHUB_TOKEN"],"actions":[{"repo":"actions/cache","sha":"668228422ae6a00e4ad889ee87cd7109ec5666a7","version":"v5.0.4"},{"repo":"actions/cache/restore","sha":"668228422ae6a00e4ad889ee87cd7109ec5666a7","version":"v5.0.4"},{"repo":"actions/cache/save","sha":"668228422ae6a00e4ad889ee87cd7109ec5666a7","version":"v5.0.4"},{"repo":"actions/checkout","sha":"de0fac2e4500dabe0009e67214ff5f5447ce83dd","version":"v6.0.2"},{"repo":"actions/download-artifact","sha":"3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c","version":"v8.0.1"},{"repo":"actions/github-script","sha":"373c709c69115d41ff229c7e5df9f8788daa9553","version":"v9"},{"repo":"actions/setup-go","sha":"4a3601121dd01d1626a1e23e37211e3254c1c06c","version":"v6.4.0"},{"repo":"actions/setup-node","sha":"53b83947a5a98c8d113130e565377fae1a50d02f","version":"v6.3.0"},{"repo":"actions/setup-python","sha":"a309ff8b426b58ec0e2a45f0f869d46889d02405","version":"v6.2.0"},{"repo":"actions/upload-artifact","sha":"043fb46d1a93c77aae656e7c1c64a875d1fc6a0a","version":"v7"},{"repo":"docker/build-push-action","sha":"bcafcacb16a39f128d818304e6c9c0c18556b85f","version":"v7.1.0"},{"repo":"docker/setup-buildx-action","sha":"4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd","version":"v4"}],"containers":[{"image":"ghcr.io/github/gh-aw-firewall/agent:0.25.18","digest":"sha256:c77e8c26bab6c39e8568d8e2f8c17015944849a8cbcdfb4bd9725d8893725ca2","pinned_image":"ghcr.io/github/gh-aw-firewall/agent:0.25.18@sha256:c77e8c26bab6c39e8568d8e2f8c17015944849a8cbcdfb4bd9725d8893725ca2"},{"image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.18","digest":"sha256:d16a40a3ca6e989896d0cef9f31b9412bb1fcc8755bafcafb95012ae1078539b","pinned_image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.18@sha256:d16a40a3ca6e989896d0cef9f31b9412bb1fcc8755bafcafb95012ae1078539b"},{"image":"ghcr.io/github/gh-aw-firewall/squid:0.25.18","digest":"sha256:eb102afcfbae26ffcec016adebb74d3be7b0a5bf376ba306599cdf3effbe288e","pinned_image":"ghcr.io/github/gh-aw-firewall/squid:0.25.18@sha256:eb102afcfbae26ffcec016adebb74d3be7b0a5bf376ba306599cdf3effbe288e"},{"image":"ghcr.io/github/gh-aw-mcpg:v0.2.17","digest":"sha256:a6dec6ec535a11c565d982afa2f98589805ed0598862b9ea9d3c751fc71afae8","pinned_image":"ghcr.io/github/gh-aw-mcpg:v0.2.17@sha256:a6dec6ec535a11c565d982afa2f98589805ed0598862b9ea9d3c751fc71afae8"},{"image":"ghcr.io/github/github-mcp-server:v0.32.0","digest":"sha256:2763823c63bcca718ce53850a1d7fcf2f501ec84028394f1b63ce7e9f4f9be28","pinned_image":"ghcr.io/github/github-mcp-server:v0.32.0@sha256:2763823c63bcca718ce53850a1d7fcf2f501ec84028394f1b63ce7e9f4f9be28"},{"image":"node:lts-alpine","digest":"sha256:01743339035a5c3c11a373cd7c83aeab6ed1457b55da6a69e014a95ac4e4700b","pinned_image":"node:lts-alpine@sha256:01743339035a5c3c11a373cd7c83aeab6ed1457b55da6a69e014a95ac4e4700b"}]}
# ___ _ _
# / _ \ | | (_)
@@ -180,16 +180,16 @@ jobs:
run: |
bash "${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh"
{
- cat << 'GH_AW_PROMPT_0dbf5730336bd7cd_EOF'
+ cat << 'GH_AW_PROMPT_af0c15d7ea92bd9a_EOF'
- GH_AW_PROMPT_0dbf5730336bd7cd_EOF
+ GH_AW_PROMPT_af0c15d7ea92bd9a_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/agentic_workflows_guide.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/cache_memory_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_0dbf5730336bd7cd_EOF'
+ cat << 'GH_AW_PROMPT_af0c15d7ea92bd9a_EOF'
Tools: create_discussion, missing_tool, missing_data, noop
@@ -221,9 +221,9 @@ jobs:
{{/if}}
- GH_AW_PROMPT_0dbf5730336bd7cd_EOF
+ GH_AW_PROMPT_af0c15d7ea92bd9a_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
- cat << 'GH_AW_PROMPT_0dbf5730336bd7cd_EOF'
+ cat << 'GH_AW_PROMPT_af0c15d7ea92bd9a_EOF'
{{#runtime-import .github/workflows/shared/jqschema.md}}
{{#runtime-import .github/workflows/shared/reporting.md}}
@@ -231,7 +231,7 @@ jobs:
{{#runtime-import .github/workflows/shared/python-nlp.md}}
{{#runtime-import .github/workflows/shared/trending-charts-simple.md}}
{{#runtime-import .github/workflows/prompt-clustering-analysis.md}}
- GH_AW_PROMPT_0dbf5730336bd7cd_EOF
+ GH_AW_PROMPT_af0c15d7ea92bd9a_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9
@@ -400,7 +400,7 @@ jobs:
run: "mkdir -p /tmp/gh-aw\ncat > /tmp/gh-aw/jqschema.sh << 'EOF'\n#!/usr/bin/env bash\n# jqschema.sh\njq -c '\ndef walk(f):\n . as $in |\n if type == \"object\" then\n reduce keys[] as $k ({}; . + {($k): ($in[$k] | walk(f))})\n elif type == \"array\" then\n if length == 0 then [] else [.[0] | walk(f)] end\n else\n type\n end;\nwalk(.)\n'\nEOF\nchmod +x /tmp/gh-aw/jqschema.sh"
- name: Install gh CLI
run: |
- bash ${RUNNER_TEMP}/gh-aw/actions/install_gh_cli.sh
+ bash "${RUNNER_TEMP}/gh-aw/actions/install_gh_cli.sh"
- env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@@ -538,9 +538,9 @@ jobs:
mkdir -p "${RUNNER_TEMP}/gh-aw/safeoutputs"
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > "${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" << 'GH_AW_SAFE_OUTPUTS_CONFIG_4ddd28f2ef7586a8_EOF'
+ cat > "${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" << 'GH_AW_SAFE_OUTPUTS_CONFIG_b5e8912f40a8b563_EOF'
{"create_discussion":{"category":"audits","close_older_discussions":true,"expires":24,"fallback_to_issue":true,"max":1,"title_prefix":"[prompt-clustering] "},"create_report_incomplete_issue":{},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"},"report_incomplete":{}}
- GH_AW_SAFE_OUTPUTS_CONFIG_4ddd28f2ef7586a8_EOF
+ GH_AW_SAFE_OUTPUTS_CONFIG_b5e8912f40a8b563_EOF
- name: Write Safe Outputs Tools
env:
GH_AW_TOOLS_META_JSON: |
@@ -728,7 +728,7 @@ jobs:
export GH_AW_ENGINE="claude"
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.17'
- cat << GH_AW_MCP_CONFIG_8716ad80f36e446b_EOF | bash "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh"
+ cat << GH_AW_MCP_CONFIG_1f3617a8076818e7_EOF | bash "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh"
{
"mcpServers": {
"agenticworkflows": {
@@ -786,7 +786,7 @@ jobs:
"payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
}
}
- GH_AW_MCP_CONFIG_8716ad80f36e446b_EOF
+ GH_AW_MCP_CONFIG_1f3617a8076818e7_EOF
- name: Download activation artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
diff --git a/.github/workflows/release.lock.yml b/.github/workflows/release.lock.yml
index 2c500fd6021..0fb1b109251 100644
--- a/.github/workflows/release.lock.yml
+++ b/.github/workflows/release.lock.yml
@@ -1,4 +1,4 @@
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"fa263cd8ad1e05a8d89eea074246b0cff27c44fa0f6e60c93e16d46cd2eaea24","strict":true,"agent_id":"copilot"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"f8edda9f6042fb4b505052066f869f726e43ff4b430ba7bd34727953a9b96615","strict":true,"agent_id":"copilot"}
# gh-aw-manifest: {"version":1,"secrets":["COPILOT_GITHUB_TOKEN","GH_AW_GITHUB_MCP_SERVER_TOKEN","GH_AW_GITHUB_TOKEN","GITHUB_TOKEN"],"actions":[{"repo":"actions/checkout","sha":"de0fac2e4500dabe0009e67214ff5f5447ce83dd","version":"v6.0.2"},{"repo":"actions/download-artifact","sha":"3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c","version":"v8.0.1"},{"repo":"actions/github-script","sha":"373c709c69115d41ff229c7e5df9f8788daa9553","version":"v9"},{"repo":"actions/setup-go","sha":"4a3601121dd01d1626a1e23e37211e3254c1c06c","version":"v6.4.0"},{"repo":"actions/upload-artifact","sha":"043fb46d1a93c77aae656e7c1c64a875d1fc6a0a","version":"v7"},{"repo":"anchore/sbom-action","sha":"e22c389904149dbc22b58101806040fa8d37a610","version":"v0.24.0"},{"repo":"docker/build-push-action","sha":"bcafcacb16a39f128d818304e6c9c0c18556b85f","version":"v7"},{"repo":"docker/login-action","sha":"4907a6ddec9925e35a0a9e82d7399ccc52663121","version":"v4.1.0"},{"repo":"docker/metadata-action","sha":"030e881283bb7a6894de51c315a6bfe6a94e05cf","version":"v6"},{"repo":"docker/setup-buildx-action","sha":"4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd","version":"v4"}],"containers":[{"image":"ghcr.io/github/gh-aw-firewall/agent:0.25.18","digest":"sha256:c77e8c26bab6c39e8568d8e2f8c17015944849a8cbcdfb4bd9725d8893725ca2","pinned_image":"ghcr.io/github/gh-aw-firewall/agent:0.25.18@sha256:c77e8c26bab6c39e8568d8e2f8c17015944849a8cbcdfb4bd9725d8893725ca2"},{"image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.18","digest":"sha256:d16a40a3ca6e989896d0cef9f31b9412bb1fcc8755bafcafb95012ae1078539b","pinned_image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.18@sha256:d16a40a3ca6e989896d0cef9f31b9412bb1fcc8755bafcafb95012ae1078539b"},{"image":"ghcr.io/github/gh-aw-firewall/squid:0.25.18","digest":"sha256:eb102afcfbae26ffcec016adebb74d3be7b0a5bf376ba306599cdf3effbe288e","pinned_image":"ghcr.io/github/gh-aw-firewall/squid:0.25.18@sha256:eb102afcfbae26ffcec016adebb74d3be7b0a5bf376ba306599cdf3effbe288e"},{"image":"ghcr.io/github/gh-aw-mcpg:v0.2.17","digest":"sha256:a6dec6ec535a11c565d982afa2f98589805ed0598862b9ea9d3c751fc71afae8","pinned_image":"ghcr.io/github/gh-aw-mcpg:v0.2.17@sha256:a6dec6ec535a11c565d982afa2f98589805ed0598862b9ea9d3c751fc71afae8"},{"image":"ghcr.io/github/github-mcp-server:v0.32.0","digest":"sha256:2763823c63bcca718ce53850a1d7fcf2f501ec84028394f1b63ce7e9f4f9be28","pinned_image":"ghcr.io/github/github-mcp-server:v0.32.0@sha256:2763823c63bcca718ce53850a1d7fcf2f501ec84028394f1b63ce7e9f4f9be28"},{"image":"node:lts-alpine","digest":"sha256:01743339035a5c3c11a373cd7c83aeab6ed1457b55da6a69e014a95ac4e4700b","pinned_image":"node:lts-alpine@sha256:01743339035a5c3c11a373cd7c83aeab6ed1457b55da6a69e014a95ac4e4700b"}]}
# ___ _ _
# / _ \ | | (_)
@@ -183,14 +183,14 @@ jobs:
run: |
bash "${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh"
{
- cat << 'GH_AW_PROMPT_02ed860310fb17fa_EOF'
+ cat << 'GH_AW_PROMPT_74254362467623ae_EOF'
- GH_AW_PROMPT_02ed860310fb17fa_EOF
+ GH_AW_PROMPT_74254362467623ae_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_02ed860310fb17fa_EOF'
+ cat << 'GH_AW_PROMPT_74254362467623ae_EOF'
Tools: update_release, missing_tool, missing_data, noop
@@ -222,13 +222,13 @@ jobs:
{{/if}}
- GH_AW_PROMPT_02ed860310fb17fa_EOF
+ GH_AW_PROMPT_74254362467623ae_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
- cat << 'GH_AW_PROMPT_02ed860310fb17fa_EOF'
+ cat << 'GH_AW_PROMPT_74254362467623ae_EOF'
{{#runtime-import .github/workflows/shared/community-attribution.md}}
{{#runtime-import .github/workflows/release.md}}
- GH_AW_PROMPT_02ed860310fb17fa_EOF
+ GH_AW_PROMPT_74254362467623ae_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9
@@ -420,9 +420,9 @@ jobs:
mkdir -p "${RUNNER_TEMP}/gh-aw/safeoutputs"
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > "${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" << 'GH_AW_SAFE_OUTPUTS_CONFIG_e9ebefbfb5b115de_EOF'
+ cat > "${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" << 'GH_AW_SAFE_OUTPUTS_CONFIG_5c075c282345bc10_EOF'
{"create_report_incomplete_issue":{},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"},"report_incomplete":{},"update_release":{"max":1}}
- GH_AW_SAFE_OUTPUTS_CONFIG_e9ebefbfb5b115de_EOF
+ GH_AW_SAFE_OUTPUTS_CONFIG_5c075c282345bc10_EOF
- name: Write Safe Outputs Tools
env:
GH_AW_TOOLS_META_JSON: |
@@ -609,7 +609,7 @@ jobs:
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.17'
mkdir -p /home/runner/.copilot
- cat << GH_AW_MCP_CONFIG_8015a6cb45aefd2e_EOF | bash "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh"
+ cat << GH_AW_MCP_CONFIG_df2cef0ffaf50707_EOF | bash "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh"
{
"mcpServers": {
"github": {
@@ -650,7 +650,7 @@ jobs:
"payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
}
}
- GH_AW_MCP_CONFIG_8015a6cb45aefd2e_EOF
+ GH_AW_MCP_CONFIG_df2cef0ffaf50707_EOF
- name: Download activation artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
@@ -1367,16 +1367,18 @@ jobs:
retention-days: 1
- name: "Notify - run sync actions and merge PR"
run: |
- echo "## Manual Sync Actions Required" >> "$GITHUB_STEP_SUMMARY"
- echo "" >> "$GITHUB_STEP_SUMMARY"
- echo "The following manual steps must be completed in **github/gh-aw-actions** before this release continues:" >> "$GITHUB_STEP_SUMMARY"
- echo "" >> "$GITHUB_STEP_SUMMARY"
- echo "1. Trigger the **sync-actions** workflow in github/gh-aw-actions:" >> "$GITHUB_STEP_SUMMARY"
- echo " https://github.com/github/gh-aw-actions/actions/workflows/sync-actions.yml" >> "$GITHUB_STEP_SUMMARY"
- echo "2. Merge the PR created by the sync-actions workflow in **github/gh-aw-actions**" >> "$GITHUB_STEP_SUMMARY"
- echo "3. Verify that tag **\`${RELEASE_TAG}\`** exists in github/gh-aw-actions" >> "$GITHUB_STEP_SUMMARY"
- echo "" >> "$GITHUB_STEP_SUMMARY"
- echo "Once the above steps are complete, approve the **gh-aw-actions-release** environment gate to continue the release." >> "$GITHUB_STEP_SUMMARY"
+ {
+ echo "## Manual Sync Actions Required"
+ echo ""
+ echo "The following manual steps must be completed in **github/gh-aw-actions** before this release continues:"
+ echo ""
+ echo "1. Trigger the **sync-actions** workflow in github/gh-aw-actions:"
+ echo " https://github.com/github/gh-aw-actions/actions/workflows/sync-actions.yml"
+ echo "2. Merge the PR created by the sync-actions workflow in **github/gh-aw-actions**"
+ echo "3. Verify that tag **\`${RELEASE_TAG}\`** exists in github/gh-aw-actions"
+ echo ""
+ echo "Once the above steps are complete, approve the **gh-aw-actions-release** environment gate to continue the release."
+ } >> "$GITHUB_STEP_SUMMARY"
echo "Sync actions instructions written for release: $RELEASE_TAG"
echo "Ensure the sync-actions job has been run and the PR merged in github/gh-aw-actions before approving."
diff --git a/.github/workflows/release.md b/.github/workflows/release.md
index d7f2d0a2fea..275df3e53db 100644
--- a/.github/workflows/release.md
+++ b/.github/workflows/release.md
@@ -200,16 +200,18 @@ jobs:
env:
RELEASE_TAG: ${{ needs.config.outputs.release_tag }}
run: |
- echo "## Manual Sync Actions Required" >> "$GITHUB_STEP_SUMMARY"
- echo "" >> "$GITHUB_STEP_SUMMARY"
- echo "The following manual steps must be completed in **github/gh-aw-actions** before this release continues:" >> "$GITHUB_STEP_SUMMARY"
- echo "" >> "$GITHUB_STEP_SUMMARY"
- echo "1. Trigger the **sync-actions** workflow in github/gh-aw-actions:" >> "$GITHUB_STEP_SUMMARY"
- echo " https://github.com/github/gh-aw-actions/actions/workflows/sync-actions.yml" >> "$GITHUB_STEP_SUMMARY"
- echo "2. Merge the PR created by the sync-actions workflow in **github/gh-aw-actions**" >> "$GITHUB_STEP_SUMMARY"
- echo "3. Verify that tag **\`${RELEASE_TAG}\`** exists in github/gh-aw-actions" >> "$GITHUB_STEP_SUMMARY"
- echo "" >> "$GITHUB_STEP_SUMMARY"
- echo "Once the above steps are complete, approve the **gh-aw-actions-release** environment gate to continue the release." >> "$GITHUB_STEP_SUMMARY"
+ {
+ echo "## Manual Sync Actions Required"
+ echo ""
+ echo "The following manual steps must be completed in **github/gh-aw-actions** before this release continues:"
+ echo ""
+ echo "1. Trigger the **sync-actions** workflow in github/gh-aw-actions:"
+ echo " https://github.com/github/gh-aw-actions/actions/workflows/sync-actions.yml"
+ echo "2. Merge the PR created by the sync-actions workflow in **github/gh-aw-actions**"
+ echo "3. Verify that tag **\`${RELEASE_TAG}\`** exists in github/gh-aw-actions"
+ echo ""
+ echo "Once the above steps are complete, approve the **gh-aw-actions-release** environment gate to continue the release."
+ } >> "$GITHUB_STEP_SUMMARY"
echo "Sync actions instructions written for release: $RELEASE_TAG"
echo "Ensure the sync-actions job has been run and the PR merged in github/gh-aw-actions before approving."
diff --git a/.github/workflows/shared/apm.md b/.github/workflows/shared/apm.md
index 36782afa401..30f570a67ab 100644
--- a/.github/workflows/shared/apm.md
+++ b/.github/workflows/shared/apm.md
@@ -72,7 +72,7 @@ steps:
path: /tmp/gh-aw/apm-bundle
- name: Find APM bundle path
id: apm_bundle
- run: echo "path=$(ls /tmp/gh-aw/apm-bundle/*.tar.gz | head -1)" >> "$GITHUB_OUTPUT"
+ run: echo "path=$(find /tmp/gh-aw/apm-bundle -name '*.tar.gz' | head -1)" >> "$GITHUB_OUTPUT"
- name: Restore APM packages
uses: microsoft/apm-action@v1.4.1
with:
diff --git a/.github/workflows/shared/copilot-pr-data-fetch.md b/.github/workflows/shared/copilot-pr-data-fetch.md
index c7f2173f69f..e992b445afb 100644
--- a/.github/workflows/shared/copilot-pr-data-fetch.md
+++ b/.github/workflows/shared/copilot-pr-data-fetch.md
@@ -15,7 +15,7 @@ tools:
steps:
- name: Install gh CLI
run: |
- bash ${RUNNER_TEMP}/gh-aw/actions/install_gh_cli.sh
+ bash "${RUNNER_TEMP}/gh-aw/actions/install_gh_cli.sh"
- name: Fetch Copilot PR data
env:
diff --git a/.github/workflows/shared/copilot-session-data-fetch.md b/.github/workflows/shared/copilot-session-data-fetch.md
index 0f45052b76f..22aaeddf862 100644
--- a/.github/workflows/shared/copilot-session-data-fetch.md
+++ b/.github/workflows/shared/copilot-session-data-fetch.md
@@ -31,7 +31,7 @@ tools:
steps:
- name: Install gh CLI
run: |
- bash ${RUNNER_TEMP}/gh-aw/actions/install_gh_cli.sh
+ bash "${RUNNER_TEMP}/gh-aw/actions/install_gh_cli.sh"
- name: Fetch Copilot session data
env:
diff --git a/.github/workflows/shared/discussions-data-fetch.md b/.github/workflows/shared/discussions-data-fetch.md
index a18fbd39633..fc6a5a6a54f 100644
--- a/.github/workflows/shared/discussions-data-fetch.md
+++ b/.github/workflows/shared/discussions-data-fetch.md
@@ -14,7 +14,7 @@ tools:
steps:
- name: Install gh CLI
run: |
- bash ${RUNNER_TEMP}/gh-aw/actions/install_gh_cli.sh
+ bash "${RUNNER_TEMP}/gh-aw/actions/install_gh_cli.sh"
- name: Fetch discussions
env:
diff --git a/.github/workflows/shared/issues-data-fetch.md b/.github/workflows/shared/issues-data-fetch.md
index 89c2b5bbbd7..20ecea21ce6 100644
--- a/.github/workflows/shared/issues-data-fetch.md
+++ b/.github/workflows/shared/issues-data-fetch.md
@@ -15,7 +15,7 @@ tools:
steps:
- name: Install gh CLI
run: |
- bash ${RUNNER_TEMP}/gh-aw/actions/install_gh_cli.sh
+ bash "${RUNNER_TEMP}/gh-aw/actions/install_gh_cli.sh"
- name: Fetch issues
env:
@@ -49,7 +49,7 @@ steps:
# Fetch all issues (open and closed) using gh CLI
# Using --limit 1000 to get the last 1000 issues, unfiltered
echo "Fetching the last 1000 issues..."
- if ! gh issue list --repo $GITHUB_REPOSITORY \
+ if ! gh issue list --repo "$GITHUB_REPOSITORY" \
--state all \
--json number,title,author,createdAt,state,url,body,labels,updatedAt,closedAt,milestone,assignees,comments \
--limit 1000 \
diff --git a/.github/workflows/shared/mcp/gh-aw.md b/.github/workflows/shared/mcp/gh-aw.md
index 48dd2949fdd..d3eefbce857 100644
--- a/.github/workflows/shared/mcp/gh-aw.md
+++ b/.github/workflows/shared/mcp/gh-aw.md
@@ -23,11 +23,11 @@ steps:
fi
gh aw --version
# Copy the gh-aw binary to ${RUNNER_TEMP}/gh-aw for MCP server containerization
- mkdir -p ${RUNNER_TEMP}/gh-aw
+ mkdir -p "${RUNNER_TEMP}/gh-aw"
GH_AW_BIN=$(which gh-aw 2>/dev/null || find ~/.local/share/gh/extensions/gh-aw -name 'gh-aw' -type f 2>/dev/null | head -1)
if [ -n "$GH_AW_BIN" ] && [ -f "$GH_AW_BIN" ]; then
- cp "$GH_AW_BIN" ${RUNNER_TEMP}/gh-aw/gh-aw
- chmod +x ${RUNNER_TEMP}/gh-aw/gh-aw
+ cp "$GH_AW_BIN" "${RUNNER_TEMP}/gh-aw/gh-aw"
+ chmod +x "${RUNNER_TEMP}/gh-aw/gh-aw"
echo "Copied gh-aw binary to ${RUNNER_TEMP}/gh-aw/gh-aw"
else
echo "::error::Failed to find gh-aw binary for MCP server"
diff --git a/.github/workflows/shared/weekly-issues-data-fetch.md b/.github/workflows/shared/weekly-issues-data-fetch.md
index 01ff59908c6..891c2565a08 100644
--- a/.github/workflows/shared/weekly-issues-data-fetch.md
+++ b/.github/workflows/shared/weekly-issues-data-fetch.md
@@ -15,7 +15,7 @@ tools:
steps:
- name: Install gh CLI
run: |
- bash ${RUNNER_TEMP}/gh-aw/actions/install_gh_cli.sh
+ bash "${RUNNER_TEMP}/gh-aw/actions/install_gh_cli.sh"
- name: Fetch weekly issues
env:
@@ -53,7 +53,7 @@ steps:
# Fetch issues from the last 7 days using gh CLI
# Using --search with updated filter to get recent activity
- gh issue list --repo $GITHUB_REPOSITORY \
+ gh issue list --repo "$GITHUB_REPOSITORY" \
--search "updated:>=${DATE_7_DAYS_AGO}" \
--state all \
--json number,title,author,createdAt,state,url,body,labels,updatedAt,closedAt,milestone,assignees,comments \
diff --git a/.github/workflows/smoke-claude.lock.yml b/.github/workflows/smoke-claude.lock.yml
index 785368ee5fe..d701966edf2 100644
--- a/.github/workflows/smoke-claude.lock.yml
+++ b/.github/workflows/smoke-claude.lock.yml
@@ -855,7 +855,7 @@ jobs:
path: /tmp/gh-aw/apm-bundle
- id: apm_bundle
name: Find APM bundle path
- run: echo "path=$(ls /tmp/gh-aw/apm-bundle/*.tar.gz | head -1)" >> "$GITHUB_OUTPUT"
+ run: echo "path=$(find /tmp/gh-aw/apm-bundle -name '*.tar.gz' | head -1)" >> "$GITHUB_OUTPUT"
- name: Restore APM packages
uses: microsoft/apm-action@a190b0b1a91031057144dc136acf9757a59c9e4d # v1.4.1
with:
@@ -2905,7 +2905,7 @@ jobs:
echo "GH_HOST=${GH_HOST}" >> "$GITHUB_ENV"
- name: Configure Safe Outputs Custom Scripts
run: |
- cat > ${RUNNER_TEMP}/gh-aw/actions/safe_output_script_post_slack_message.cjs << 'GH_AW_SAFE_OUTPUT_SCRIPT_POST_SLACK_MESSAGE_dae07f2089a4b2eb_EOF'
+ cat > "${RUNNER_TEMP}/gh-aw/actions/safe_output_script_post_slack_message.cjs" << 'GH_AW_SAFE_OUTPUT_SCRIPT_POST_SLACK_MESSAGE_dae07f2089a4b2eb_EOF'
// @ts-check
///
// Auto-generated safe-output script handler: post-slack-message
diff --git a/.github/workflows/static-analysis-report.lock.yml b/.github/workflows/static-analysis-report.lock.yml
index 987cc594574..98950a8a22f 100644
--- a/.github/workflows/static-analysis-report.lock.yml
+++ b/.github/workflows/static-analysis-report.lock.yml
@@ -1,4 +1,4 @@
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"2598e01890cdcf9c0142e3408167ea4e513d3a5b3068eefaa57584752b25c1a0","strict":true,"agent_id":"claude"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"c801fa7efd9bf29b71c3d32e6727414357a82d1b16f41aa359e753e9ae441bdf","strict":true,"agent_id":"claude"}
# gh-aw-manifest: {"version":1,"secrets":["ANTHROPIC_API_KEY","GH_AW_GITHUB_MCP_SERVER_TOKEN","GH_AW_GITHUB_TOKEN","GITHUB_TOKEN"],"actions":[{"repo":"actions/cache/restore","sha":"668228422ae6a00e4ad889ee87cd7109ec5666a7","version":"v5.0.4"},{"repo":"actions/cache/save","sha":"668228422ae6a00e4ad889ee87cd7109ec5666a7","version":"v5.0.4"},{"repo":"actions/checkout","sha":"de0fac2e4500dabe0009e67214ff5f5447ce83dd","version":"v6.0.2"},{"repo":"actions/download-artifact","sha":"3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c","version":"v8.0.1"},{"repo":"actions/github-script","sha":"373c709c69115d41ff229c7e5df9f8788daa9553","version":"v9"},{"repo":"actions/setup-go","sha":"4a3601121dd01d1626a1e23e37211e3254c1c06c","version":"v6.4.0"},{"repo":"actions/setup-node","sha":"53b83947a5a98c8d113130e565377fae1a50d02f","version":"v6.3.0"},{"repo":"actions/upload-artifact","sha":"043fb46d1a93c77aae656e7c1c64a875d1fc6a0a","version":"v7.0.1"},{"repo":"docker/build-push-action","sha":"bcafcacb16a39f128d818304e6c9c0c18556b85f","version":"v7.1.0"},{"repo":"docker/setup-buildx-action","sha":"4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd","version":"v4"}],"containers":[{"image":"ghcr.io/github/gh-aw-firewall/agent:0.25.18","digest":"sha256:c77e8c26bab6c39e8568d8e2f8c17015944849a8cbcdfb4bd9725d8893725ca2","pinned_image":"ghcr.io/github/gh-aw-firewall/agent:0.25.18@sha256:c77e8c26bab6c39e8568d8e2f8c17015944849a8cbcdfb4bd9725d8893725ca2"},{"image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.18","digest":"sha256:d16a40a3ca6e989896d0cef9f31b9412bb1fcc8755bafcafb95012ae1078539b","pinned_image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.18@sha256:d16a40a3ca6e989896d0cef9f31b9412bb1fcc8755bafcafb95012ae1078539b"},{"image":"ghcr.io/github/gh-aw-firewall/squid:0.25.18","digest":"sha256:eb102afcfbae26ffcec016adebb74d3be7b0a5bf376ba306599cdf3effbe288e","pinned_image":"ghcr.io/github/gh-aw-firewall/squid:0.25.18@sha256:eb102afcfbae26ffcec016adebb74d3be7b0a5bf376ba306599cdf3effbe288e"},{"image":"ghcr.io/github/gh-aw-mcpg:v0.2.17","digest":"sha256:a6dec6ec535a11c565d982afa2f98589805ed0598862b9ea9d3c751fc71afae8","pinned_image":"ghcr.io/github/gh-aw-mcpg:v0.2.17@sha256:a6dec6ec535a11c565d982afa2f98589805ed0598862b9ea9d3c751fc71afae8"},{"image":"ghcr.io/github/github-mcp-server:v0.32.0","digest":"sha256:2763823c63bcca718ce53850a1d7fcf2f501ec84028394f1b63ce7e9f4f9be28","pinned_image":"ghcr.io/github/github-mcp-server:v0.32.0@sha256:2763823c63bcca718ce53850a1d7fcf2f501ec84028394f1b63ce7e9f4f9be28"},{"image":"node:lts-alpine","digest":"sha256:01743339035a5c3c11a373cd7c83aeab6ed1457b55da6a69e014a95ac4e4700b","pinned_image":"node:lts-alpine@sha256:01743339035a5c3c11a373cd7c83aeab6ed1457b55da6a69e014a95ac4e4700b"}]}
# ___ _ _
# / _ \ | | (_)
@@ -171,16 +171,16 @@ jobs:
run: |
bash "${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh"
{
- cat << 'GH_AW_PROMPT_60ae843a8e889fc9_EOF'
+ cat << 'GH_AW_PROMPT_e1c86301db15eb5d_EOF'
- GH_AW_PROMPT_60ae843a8e889fc9_EOF
+ GH_AW_PROMPT_e1c86301db15eb5d_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/agentic_workflows_guide.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/cache_memory_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_60ae843a8e889fc9_EOF'
+ cat << 'GH_AW_PROMPT_e1c86301db15eb5d_EOF'
Tools: create_issue(max:4), missing_tool, missing_data, noop
@@ -212,13 +212,13 @@ jobs:
{{/if}}
- GH_AW_PROMPT_60ae843a8e889fc9_EOF
+ GH_AW_PROMPT_e1c86301db15eb5d_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
- cat << 'GH_AW_PROMPT_60ae843a8e889fc9_EOF'
+ cat << 'GH_AW_PROMPT_e1c86301db15eb5d_EOF'
{{#runtime-import .github/workflows/shared/reporting.md}}
{{#runtime-import .github/workflows/static-analysis-report.md}}
- GH_AW_PROMPT_60ae843a8e889fc9_EOF
+ GH_AW_PROMPT_e1c86301db15eb5d_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9
@@ -388,13 +388,13 @@ jobs:
run: |
set -e
make build
- $GITHUB_WORKSPACE/gh-aw --version
+ "$GITHUB_WORKSPACE/gh-aw" --version
- name: Pull static analysis Docker images
run: "set -e\necho \"Pulling Docker images for static analysis tools...\"\n\n# Pull zizmor Docker image\necho \"Pulling zizmor image...\"\ndocker pull ghcr.io/zizmorcore/zizmor:latest\n\n# Pull poutine Docker image\necho \"Pulling poutine image...\"\ndocker pull ghcr.io/boostsecurityio/poutine:latest\n\n# Pull runner-guard Docker image\necho \"Pulling runner-guard image...\"\ndocker pull ghcr.io/vigilant-llc/runner-guard:latest\n\necho \"All static analysis Docker images pulled successfully\"\n"
- name: Verify static analysis tools
run: "set -e\necho \"Verifying static analysis tools are available...\"\n\n# Verify zizmor\necho \"Testing zizmor...\"\ndocker run --rm ghcr.io/zizmorcore/zizmor:latest --version || echo \"Warning: zizmor version check failed\"\n\n# Verify poutine\necho \"Testing poutine...\"\ndocker run --rm ghcr.io/boostsecurityio/poutine:latest --version || echo \"Warning: poutine version check failed\"\n\n# Verify runner-guard\necho \"Testing runner-guard...\"\ndocker run --rm ghcr.io/vigilant-llc/runner-guard:latest --version || echo \"Warning: runner-guard version check failed\"\n\necho \"Static analysis tools verification complete\"\n"
- name: Run compile with security tools
- run: "set -e\necho \"Running gh aw compile with security tools to download Docker images...\"\n\n# Run compile with all security scanner flags to download Docker images\n# Store the output in a file for inspection\n$GITHUB_WORKSPACE/gh-aw compile --zizmor --poutine --actionlint --runner-guard 2>&1 | tee /tmp/gh-aw/compile-output.txt\n\necho \"Compile with security tools completed\"\necho \"Output saved to /tmp/gh-aw/compile-output.txt\""
+ run: "set -e\necho \"Running gh aw compile with security tools to download Docker images...\"\n\n# Run compile with all security scanner flags to download Docker images\n# Store the output in a file for inspection\n\"$GITHUB_WORKSPACE/gh-aw\" compile --zizmor --poutine --actionlint --runner-guard 2>&1 | tee /tmp/gh-aw/compile-output.txt\n\necho \"Compile with security tools completed\"\necho \"Output saved to /tmp/gh-aw/compile-output.txt\""
# Cache memory file share configuration from frontmatter processed below
- name: Create cache-memory directory
@@ -488,9 +488,9 @@ jobs:
mkdir -p "${RUNNER_TEMP}/gh-aw/safeoutputs"
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > "${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" << 'GH_AW_SAFE_OUTPUTS_CONFIG_31386a0e0fcf5391_EOF'
+ cat > "${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" << 'GH_AW_SAFE_OUTPUTS_CONFIG_ca3bcd5b88ecf2bc_EOF'
{"create_issue":{"close_older_issues":true,"expires":168,"labels":["security","automation"],"max":4,"title_prefix":"[static-analysis] "},"create_report_incomplete_issue":{},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"},"report_incomplete":{}}
- GH_AW_SAFE_OUTPUTS_CONFIG_31386a0e0fcf5391_EOF
+ GH_AW_SAFE_OUTPUTS_CONFIG_ca3bcd5b88ecf2bc_EOF
- name: Write Safe Outputs Tools
env:
GH_AW_TOOLS_META_JSON: |
@@ -685,7 +685,7 @@ jobs:
export GH_AW_ENGINE="claude"
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.17'
- cat << GH_AW_MCP_CONFIG_fc452aa80c2a0e5a_EOF | bash "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh"
+ cat << GH_AW_MCP_CONFIG_e220b45a4dce17e0_EOF | bash "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh"
{
"mcpServers": {
"agenticworkflows": {
@@ -743,7 +743,7 @@ jobs:
"payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
}
}
- GH_AW_MCP_CONFIG_fc452aa80c2a0e5a_EOF
+ GH_AW_MCP_CONFIG_e220b45a4dce17e0_EOF
- name: Download activation artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
diff --git a/.github/workflows/static-analysis-report.md b/.github/workflows/static-analysis-report.md
index 33d21bb8981..400e7d36818 100644
--- a/.github/workflows/static-analysis-report.md
+++ b/.github/workflows/static-analysis-report.md
@@ -35,7 +35,7 @@ steps:
run: |
set -e
make build
- $GITHUB_WORKSPACE/gh-aw --version
+ "$GITHUB_WORKSPACE/gh-aw" --version
- name: Pull static analysis Docker images
run: |
set -e
@@ -79,7 +79,7 @@ steps:
# Run compile with all security scanner flags to download Docker images
# Store the output in a file for inspection
- $GITHUB_WORKSPACE/gh-aw compile --zizmor --poutine --actionlint --runner-guard 2>&1 | tee /tmp/gh-aw/compile-output.txt
+ "$GITHUB_WORKSPACE/gh-aw" compile --zizmor --poutine --actionlint --runner-guard 2>&1 | tee /tmp/gh-aw/compile-output.txt
echo "Compile with security tools completed"
echo "Output saved to /tmp/gh-aw/compile-output.txt"
diff --git a/pkg/workflow/compiler_safe_outputs_job.go b/pkg/workflow/compiler_safe_outputs_job.go
index 317b838e62a..b6c2fe7c193 100644
--- a/pkg/workflow/compiler_safe_outputs_job.go
+++ b/pkg/workflow/compiler_safe_outputs_job.go
@@ -787,7 +787,7 @@ func buildCustomScriptFilesStep(scripts map[string]*SafeScriptConfig, frontmatte
return nil, fmt.Errorf("safe-output script %q: %w", scriptName, err)
}
- steps = append(steps, fmt.Sprintf(" cat > %s << '%s'\n", filePath, delimiter))
+ steps = append(steps, fmt.Sprintf(" cat > \"%s\" << '%s'\n", filePath, delimiter))
for line := range strings.SplitSeq(scriptContent, "\n") {
steps = append(steps, " "+line+"\n")
}